[SEBA-412] Automated reformat of Python code

Passes of modernize, autopep8, black, then check with flake8

flake8 + manual fixes:
  lib/xos-config
  lib/xos-kafka
  lib/xos-util
  xos/coreapi
  xos/api
  xos/xos_client

Change-Id: Ib23cf84cb13beb3c6381fa0d79594dc9131dc815
diff --git a/VERSION b/VERSION
index 98dd696..f00fa32 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-2.1.31
+2.1.32
diff --git a/containers/chameleon/Dockerfile.chameleon b/containers/chameleon/Dockerfile.chameleon
index b983cb6..c996e52 100644
--- a/containers/chameleon/Dockerfile.chameleon
+++ b/containers/chameleon/Dockerfile.chameleon
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 # xosproject/chameleon
-FROM xosproject/xos-base:2.1.31
+FROM xosproject/xos-base:2.1.32
 
 # xos-base already has protoc and dependencies installed
 
diff --git a/containers/xos/Dockerfile.client b/containers/xos/Dockerfile.client
index 132662a..3514dc1 100644
--- a/containers/xos/Dockerfile.client
+++ b/containers/xos/Dockerfile.client
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 # xosproject/xos-client
-FROM xosproject/xos-libraries:2.1.31
+FROM xosproject/xos-libraries:2.1.32
 
 # Install XOS client
 COPY xos/xos_client /tmp/xos_client
diff --git a/containers/xos/Dockerfile.libraries b/containers/xos/Dockerfile.libraries
index bf4f9b1..29fc7a8 100644
--- a/containers/xos/Dockerfile.libraries
+++ b/containers/xos/Dockerfile.libraries
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 # xosproject/xos-libraries
-FROM xosproject/xos-base:2.1.31
+FROM xosproject/xos-base:2.1.32
 
 # Add libraries
 COPY lib /opt/xos/lib
diff --git a/containers/xos/Dockerfile.synchronizer-base b/containers/xos/Dockerfile.synchronizer-base
index dcc729c..00da3ef 100644
--- a/containers/xos/Dockerfile.synchronizer-base
+++ b/containers/xos/Dockerfile.synchronizer-base
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 # xosproject/xos-synchronizer-base
-FROM xosproject/xos-client:2.1.31
+FROM xosproject/xos-client:2.1.32
 
 COPY xos/synchronizers/new_base /opt/xos/synchronizers/new_base
 COPY xos/xos/logger.py /opt/xos/xos/logger.py
diff --git a/containers/xos/Dockerfile.xos-core b/containers/xos/Dockerfile.xos-core
index 82a4026..f3e5d7c 100644
--- a/containers/xos/Dockerfile.xos-core
+++ b/containers/xos/Dockerfile.xos-core
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 # xosproject/xos-core
-FROM xosproject/xos-libraries:2.1.31
+FROM xosproject/xos-libraries:2.1.32
 
 # Install XOS
 ADD xos /opt/xos
diff --git a/lib/__init__.py b/lib/__init__.py
index 42722a8..b0fb0b2 100644
--- a/lib/__init__.py
+++ b/lib/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/lib/xos-config/setup.py b/lib/xos-config/setup.py
index 777a7a1..1e683f1 100644
--- a/lib/xos-config/setup.py
+++ b/lib/xos-config/setup.py
@@ -14,8 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from setuptools import setup
-
 try:
     from xosutil.autoversion_setup import setup_with_auto_version as setup
 except ImportError:
@@ -25,15 +23,14 @@
 
 from xosconfig.version import __version__
 
-setup(name='XosConfig',
-      version=__version__,
-      description='XOS Config Library',
-      author='Matteo Scandolo',
-      author_email='teo@onlab.us',
-      packages=['xosconfig'],
-      include_package_data=True,
-      # TODO add all deps to the install_requires section
-      install_requires=[
-          'pykwalify>=1.6.0'
-          ]
-     )
+setup(
+    name="XosConfig",
+    version=__version__,
+    description="XOS Config Library",
+    author="Matteo Scandolo",
+    author_email="teo@onlab.us",
+    packages=["xosconfig"],
+    include_package_data=True,
+    # TODO add all deps to the install_requires section
+    install_requires=["pykwalify>=1.6.0"],
+)
diff --git a/lib/xos-config/tests/__init__.py b/lib/xos-config/tests/__init__.py
index d4e8062..b0fb0b2 100644
--- a/lib/xos-config/tests/__init__.py
+++ b/lib/xos-config/tests/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/lib/xos-config/tests/test_config.py b/lib/xos-config/tests/test_config.py
index ee80b52..5eb86af 100644
--- a/lib/xos-config/tests/test_config.py
+++ b/lib/xos-config/tests/test_config.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,39 +14,50 @@
 
 
 import unittest
-from mock import patch
 import os
 from xosconfig import Config
 from xosconfig import Config as Config2
 
-basic_conf = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/confs/basic_conf.yaml")
-yaml_not_valid = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/confs/yaml_not_valid.yaml")
-invalid_format = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/confs/invalid_format.yaml")
-sample_conf = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/confs/sample_conf.yaml")
-override_conf = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/confs/override_conf.yaml")
-extend_conf = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/confs/extend_conf.yaml")
+basic_conf = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/confs/basic_conf.yaml"
+)
+yaml_not_valid = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/confs/yaml_not_valid.yaml"
+)
+invalid_format = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/confs/invalid_format.yaml"
+)
+sample_conf = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/confs/sample_conf.yaml"
+)
+override_conf = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/confs/override_conf.yaml"
+)
+extend_conf = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/confs/extend_conf.yaml"
+)
 
-small_schema = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/schemas/small_schema.yaml")
+small_schema = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/schemas/small_schema.yaml"
+)
 
-services_list = {
-  "xos-ws": [],
-  "xos-db": [],
-}
+services_list = {"xos-ws": [], "xos-db": []}
 
 db_service = [
-          {
-            "ModifyIndex": 6,
-            "CreateIndex": 6,
-            "Node": "0152982c3159",
-            "Address": "172.19.0.2",
-            "ServiceID": "0d53ce210785:frontend_xos_db_1:5432",
-            "ServiceName": "xos-db",
-            "ServiceTags": [],
-            "ServiceAddress": "172.18.0.4",
-            "ServicePort": 5432,
-            "ServiceEnableTagOverride": "false"
-          }
-        ]
+    {
+        "ModifyIndex": 6,
+        "CreateIndex": 6,
+        "Node": "0152982c3159",
+        "Address": "172.19.0.2",
+        "ServiceID": "0d53ce210785:frontend_xos_db_1:5432",
+        "ServiceName": "xos-db",
+        "ServiceTags": [],
+        "ServiceAddress": "172.18.0.4",
+        "ServicePort": 5432,
+        "ServiceEnableTagOverride": "false",
+    }
+]
+
 
 class XOSConfigTest(unittest.TestCase):
     """
@@ -77,15 +87,19 @@
         """
         with self.assertRaises(Exception) as e:
             Config.get("database")
-        self.assertEqual(e.exception.message, "[XOS-Config] Module has not been initialized")
+        self.assertEqual(
+            e.exception.message, "[XOS-Config] Module has not been initialized"
+        )
 
     def test_missing_file_exception(self):
         """
-        [XOS-Config] Raise if file not found 
+        [XOS-Config] Raise if file not found
         """
         with self.assertRaises(Exception) as e:
             Config.init("missing_conf")
-        self.assertEqual(e.exception.message, "[XOS-Config] Config file not found at: missing_conf")
+        self.assertEqual(
+            e.exception.message, "[XOS-Config] Config file not found at: missing_conf"
+        )
 
     def test_yaml_not_valid(self):
         """
@@ -93,7 +107,9 @@
         """
         with self.assertRaises(Exception) as e:
             Config.init(yaml_not_valid)
-        self.assertTrue(e.exception.message.startswith("[XOS-Config] The config format is wrong:"))
+        self.assertTrue(
+            e.exception.message.startswith("[XOS-Config] The config format is wrong:")
+        )
 
     def test_invalid_format(self):
         """
@@ -101,7 +117,13 @@
         """
         with self.assertRaises(Exception) as e:
             Config.init(invalid_format)
-        self.assertEqual(e.exception.message, "[XOS-Config] The config format is wrong: Schema validation failed:\n - Value '['I am', 'a yaml', 'but the', 'format is not', 'correct']' is not a dict. Value path: ''.")
+        self.assertEqual(
+            e.exception.message,
+            (
+                "[XOS-Config] The config format is wrong: Schema validation failed:\n"
+                " - Value '['I am', 'a yaml', 'but the', 'format is not', 'correct']' is not a dict. Value path: ''."
+            ),
+        )
 
     def test_env_override(self):
         """
@@ -110,7 +132,9 @@
         os.environ["XOS_CONFIG_FILE"] = "env.yaml"
         with self.assertRaises(Exception) as e:
             Config.init("missing_conf")
-        self.assertEqual(e.exception.message, "[XOS-Config] Config file not found at: env.yaml")
+        self.assertEqual(
+            e.exception.message, "[XOS-Config] Config file not found at: env.yaml"
+        )
         del os.environ["XOS_CONFIG_FILE"]
 
     def test_schema_override(self):
@@ -120,7 +144,10 @@
         os.environ["XOS_CONFIG_SCHEMA"] = "env-schema.yaml"
         with self.assertRaises(Exception) as e:
             Config.init(basic_conf)
-        self.assertRegexpMatches(e.exception.message, '\[XOS\-Config\] Config schema not found at: (.+)env-schema\.yaml')
+        self.assertRegexpMatches(
+            e.exception.message,
+            r"\[XOS\-Config\] Config schema not found at: (.+)env-schema\.yaml",
+        )
         # self.assertEqual(e.exception.message, "[XOS-Config] Config schema not found at: env-schema.yaml")
         del os.environ["XOS_CONFIG_SCHEMA"]
 
@@ -131,7 +158,13 @@
         os.environ["XOS_CONFIG_SCHEMA"] = small_schema
         with self.assertRaises(Exception) as e:
             Config.init(basic_conf)
-        self.assertEqual(e.exception.message, "[XOS-Config] The config format is wrong: Schema validation failed:\n - Key 'database' was not defined. Path: ''.")
+        self.assertEqual(
+            e.exception.message,
+            (
+                "[XOS-Config] The config format is wrong: Schema validation failed:\n"
+                " - Key 'database' was not defined. Path: ''."
+            ),
+        )
         del os.environ["XOS_CONFIG_SCHEMA"]
 
     def test_get_cli_param(self):
@@ -174,11 +207,7 @@
         # NOTE we are using Config2 here to be sure that the configuration is readable from any import,
         # not only from the one that has been used to initialize it
         res = Config2.get("database")
-        self.assertEqual(res, {
-            "name": "xos",
-            "username": "test",
-            "password": "safe"
-        })
+        self.assertEqual(res, {"name": "xos", "username": "test", "password": "safe"})
 
     def test_get_child_level(self):
         """
@@ -192,7 +221,7 @@
         """
         [XOS-Config] If an override is provided for the config, it should return the overridden value
         """
-        Config.init(sample_conf, 'xos-config-schema.yaml', override_conf)
+        Config.init(sample_conf, "xos-config-schema.yaml", override_conf)
         res = Config.get("logging.level")
         self.assertEqual(res, "info")
         res = Config.get("database.password")
@@ -200,13 +229,16 @@
 
     def test_config_extend(self):
         """
-        [XOS-Config] If an override is provided for the config, it should return the overridden value (also if not defined in the base one)
+        [XOS-Config] If an override is provided for the config, it should
+        return the overridden value (also if not defined in the base one)
         """
-        Config.init(sample_conf, 'xos-config-schema.yaml', extend_conf)
+
+        Config.init(sample_conf, "xos-config-schema.yaml", extend_conf)
         res = Config.get("xos_dir")
         self.assertEqual(res, "/opt/xos")
         res = Config.get("database.password")
         self.assertEqual(res, "safe")
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/lib/xos-config/xosconfig/__init__.py b/lib/xos-config/xosconfig/__init__.py
index d4e8305..9a0b30c 100644
--- a/lib/xos-config/xosconfig/__init__.py
+++ b/lib/xos-config/xosconfig/__init__.py
@@ -13,3 +13,5 @@
 # limitations under the License.
 
 from .config import Config
+
+__all__ = ["Config"]
diff --git a/lib/xos-config/xosconfig/config.py b/lib/xos-config/xosconfig/config.py
index ebc696f..aac6ffb 100644
--- a/lib/xos-config/xosconfig/config.py
+++ b/lib/xos-config/xosconfig/config.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,7 +16,6 @@
 import os
 import sys
 import yaml
-import requests
 import default
 from pykwalify.core import Core as PyKwalify
 import pykwalify
@@ -25,20 +23,25 @@
 pykwalify.init_logging(1)
 
 DEFAULT_CONFIG_FILE = "/opt/xos/xos_config.yaml"
-DEFAULT_CONFIG_SCHEMA = 'xos-config-schema.yaml'
+DEFAULT_CONFIG_SCHEMA = "xos-config-schema.yaml"
 INITIALIZED = False
 CONFIG_FILE = None
 CONFIG = {}
 
 OVERRIDE_CONFIG = {}
 
+
 class Config:
     """
     XOS Configuration APIs
     """
 
     @staticmethod
-    def init(config_file=DEFAULT_CONFIG_FILE, config_schema=DEFAULT_CONFIG_SCHEMA, override_config_file=None):
+    def init(
+        config_file=DEFAULT_CONFIG_FILE,
+        config_schema=DEFAULT_CONFIG_SCHEMA,
+        override_config_file=None,
+    ):
 
         # make schema relative to this directory
         # TODO give the possibility to specify an absolute path
@@ -58,37 +61,40 @@
 
         # the config module can be initialized only one
         if INITIALIZED:
-            raise Exception('[XOS-Config] Module already initialized')
+            raise Exception("[XOS-Config] Module already initialized")
         INITIALIZED = True
 
         # if XOS_CONFIG_FILE is defined override the config_file
         # FIXME shouldn't this stay in whatever module call this one? and then just pass the file to the init method
-        if os.environ.get('XOS_CONFIG_FILE'):
-            config_file = os.environ['XOS_CONFIG_FILE']
+        if os.environ.get("XOS_CONFIG_FILE"):
+            config_file = os.environ["XOS_CONFIG_FILE"]
 
         # if XOS_CONFIG_SCHEMA is defined override the config_schema
         # FIXME shouldn't this stay in whatever module call this one? and then just pass the file to the init method
-        if os.environ.get('XOS_CONFIG_SCHEMA'):
-            config_schema = Config.get_abs_path(os.environ['XOS_CONFIG_SCHEMA'])
+        if os.environ.get("XOS_CONFIG_SCHEMA"):
+            config_schema = Config.get_abs_path(os.environ["XOS_CONFIG_SCHEMA"])
 
         # allow OVERRIDE_CONFIG_* to be overridden  by env vars
-        if os.environ.get('XOS_OVERRIDE_CONFIG_FILE'):
-            OVERRIDE_CONFIG_FILE = os.environ['XOS_OVERRIDE_CONFIG_FILE']
-        if os.environ.get('XOS_OVERRIDE_CONFIG_SCHEMA'):
-            OVERRIDE_CONFIG_SCHEMA = Config.get_abs_path(os.environ['XOS_OVERRIDE_CONFIG_SCHEMA'])
+        if os.environ.get("XOS_OVERRIDE_CONFIG_FILE"):
+            OVERRIDE_CONFIG_FILE = os.environ["XOS_OVERRIDE_CONFIG_FILE"]
+        if os.environ.get("XOS_OVERRIDE_CONFIG_SCHEMA"):
+            OVERRIDE_CONFIG_SCHEMA = Config.get_abs_path(
+                os.environ["XOS_OVERRIDE_CONFIG_SCHEMA"]
+            )
 
         # if a -C parameter is set in the cli override the config_file
         # FIXME shouldn't this stay in whatever module call this one? and then just pass the file to the init method
         if Config.get_cli_param(sys.argv):
             config_schema = Config.get_cli_param(sys.argv)
 
-
         CONFIG_FILE = config_file
         CONFIG = Config.read_config(config_file, config_schema)
 
         # if an override is set
         if OVERRIDE_CONFIG_FILE is not None:
-            OVERRIDE_CONFIG = Config.read_config(OVERRIDE_CONFIG_FILE, OVERRIDE_CONFIG_SCHEMA, True)
+            OVERRIDE_CONFIG = Config.read_config(
+                OVERRIDE_CONFIG_FILE, OVERRIDE_CONFIG_SCHEMA, True
+            )
 
     @staticmethod
     def get_config_file():
@@ -103,7 +109,7 @@
     def get_abs_path(path):
         if os.path.isabs(path):
             return path
-        return os.path.dirname(os.path.realpath(__file__)) + '/' + path
+        return os.path.dirname(os.path.realpath(__file__)) + "/" + path
 
     @staticmethod
     def validate_config_format(config_file, config_schema):
@@ -115,7 +121,7 @@
     def get_cli_param(args):
         last = None
         for arg in args:
-            if last == '-C':
+            if last == "-C":
                 return arg
             last = arg
 
@@ -127,25 +133,27 @@
         :return: dict
         """
 
-        if(not os.path.exists(config_file) and ignore_if_not_found):
+        if not os.path.exists(config_file) and ignore_if_not_found:
             return {}
 
         if not os.path.exists(config_file):
-            raise Exception('[XOS-Config] Config file not found at: %s' % config_file)
+            raise Exception("[XOS-Config] Config file not found at: %s" % config_file)
 
         if not os.path.exists(config_schema):
-            raise Exception('[XOS-Config] Config schema not found at: %s' % config_schema)
+            raise Exception(
+                "[XOS-Config] Config schema not found at: %s" % config_schema
+            )
 
         try:
             Config.validate_config_format(config_file, config_schema)
-        except Exception, e:
+        except Exception as e:
             try:
                 error_msg = e.msg
             except AttributeError:
                 error_msg = str(e)
-            raise Exception('[XOS-Config] The config format is wrong: %s' % error_msg)
+            raise Exception("[XOS-Config] The config format is wrong: %s" % error_msg)
 
-        with open(config_file, 'r') as stream:
+        with open(config_file, "r") as stream:
             return yaml.safe_load(stream)
 
     @staticmethod
@@ -161,7 +169,7 @@
         global OVERRIDE_CONFIG_FILE
 
         if not INITIALIZED:
-            raise Exception('[XOS-Config] Module has not been initialized')
+            raise Exception("[XOS-Config] Module has not been initialized")
 
         val = Config.get_param(query, CONFIG)
         if OVERRIDE_CONFIG_FILE or not val:
@@ -186,10 +194,10 @@
         :param config: the config source to read from (can be the config file or the defaults)
         :return: the requested parameter in any format the parameter is specified
         """
-        keys = query.split('.')
+        keys = query.split(".")
         if len(keys) == 1:
             key = keys[0]
-            if not config.has_key(key):
+            if key not in config:
                 return None
             return config[key]
         else:
@@ -205,10 +213,11 @@
         """
         param = config
         for k in keys:
-            if not param.has_key(k):
+            if k not in param:
                 return None
             param = param[k]
         return param
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     Config.init()
diff --git a/lib/xos-config/xosconfig/default.py b/lib/xos-config/xosconfig/default.py
index 2c73b26..afed387 100644
--- a/lib/xos-config/xosconfig/default.py
+++ b/lib/xos-config/xosconfig/default.py
@@ -13,52 +13,34 @@
 # limitations under the License.
 
 DEFAULT_VALUES = {
-    'xos_dir': '/opt/xos',
-
+    "xos_dir": "/opt/xos",
     # The configuration below inherits from the standard config of the Python logging module
     # See: https://docs.python.org/2/library/logging.config.html
     # multistructlog supports this config in all of its generality
     # So for instance, you can add new handlers. Note that all handlers will
     # receive logs simultaneously.
-
-    'blueprints': {},
-    'logging': {
-        'version': 1,
-        'handlers': {
-            'console': {
-                'class': 'logging.StreamHandler',
+    "blueprints": {},
+    "logging": {
+        "version": 1,
+        "handlers": {
+            "console": {"class": "logging.StreamHandler"},
+            "file": {
+                "class": "logging.handlers.RotatingFileHandler",
+                "filename": "/var/log/xos.log",
+                "maxBytes": 10485760,
+                "backupCount": 5,
             },
-            'file': {
-                'class': 'logging.handlers.RotatingFileHandler',
-                'filename': '/var/log/xos.log',
-                'maxBytes': 10485760,
-                'backupCount': 5
-            }
         },
-        'loggers': {
-            '': {
-                'handlers': ['console', 'file'],
-                'level': 'DEBUG'
-            }
-        }
+        "loggers": {"": {"handlers": ["console", "file"], "level": "DEBUG"}},
     },
-    'accessor': {
-        'endpoint': 'xos-core.cord.lab:50051',
-        'kind': 'grpcapi',
-    },
-    'keep_temp_files': False,
-    'dependency_graph': None,
-    'error_map_path': '/opt/xos/error_map.txt',
-    'feefie': {
-        'client_user': 'pl'
-    },
-    'proxy_ssh': {
-        'enabled': True,
-        'key': '/opt/cord_profile/node_key',
-        'user': 'root'
-    },
-    'node_key': '/opt/cord_profile/node_key',
-    'config_dir': '/etc/xos/sync',
-    'backoff_disabled': True,
-    'kafka_bootstrap_servers': ['cord-kafka:9092'],
+    "accessor": {"endpoint": "xos-core.cord.lab:50051", "kind": "grpcapi"},
+    "keep_temp_files": False,
+    "dependency_graph": None,
+    "error_map_path": "/opt/xos/error_map.txt",
+    "feefie": {"client_user": "pl"},
+    "proxy_ssh": {"enabled": True, "key": "/opt/cord_profile/node_key", "user": "root"},
+    "node_key": "/opt/cord_profile/node_key",
+    "config_dir": "/etc/xos/sync",
+    "backoff_disabled": True,
+    "kafka_bootstrap_servers": ["cord-kafka:9092"],
 }
diff --git a/lib/xos-config/xosconfig/version.py b/lib/xos-config/xosconfig/version.py
index a118c43..2c84950 100644
--- a/lib/xos-config/xosconfig/version.py
+++ b/lib/xos-config/xosconfig/version.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os
-
 # This file will be replaced by setup.py
 __version__ = "unknown"
diff --git a/lib/xos-genx/__init__.py b/lib/xos-genx/__init__.py
index 42722a8..b0fb0b2 100644
--- a/lib/xos-genx/__init__.py
+++ b/lib/xos-genx/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/lib/xos-genx/bin/xosgenx b/lib/xos-genx/bin/xosgenx
index d48f6b0..46392d7 100644
--- a/lib/xos-genx/bin/xosgenx
+++ b/lib/xos-genx/bin/xosgenx
@@ -1,4 +1,4 @@
 #!/usr/bin/env python
 
 from xosgenx.xosgen import XosGen
-XosGen.init()
\ No newline at end of file
+XosGen.init()
diff --git a/lib/xos-genx/setup.py b/lib/xos-genx/setup.py
index c97ba52..c294ca4 100644
--- a/lib/xos-genx/setup.py
+++ b/lib/xos-genx/setup.py
@@ -14,8 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from setuptools import setup
-
 try:
     from xosutil.autoversion_setup import setup_with_auto_version as setup
 except ImportError:
@@ -25,18 +23,15 @@
 
 from xosgenx.version import __version__
 
-setup(name='XosGenX',
-      version=__version__,
-      description='XOS Generative Toolchain',
-      author='Sapan Bhatia, Matteo Scandolo',
-      author_email='sapan@opennetworking.org, teo@opennetworking.org',
-      packages=['xosgenx'],
-      scripts=['bin/xosgenx'],
-      include_package_data=True,
-      # TODO add all deps to the install_requires section
-      install_requires=[
-          'inflect>=1.0.1',
-          'astunparse>=1.5.0'
-          ]
-     )
-
+setup(
+    name="XosGenX",
+    version=__version__,
+    description="XOS Generative Toolchain",
+    author="Sapan Bhatia, Matteo Scandolo",
+    author_email="sapan@opennetworking.org, teo@opennetworking.org",
+    packages=["xosgenx"],
+    scripts=["bin/xosgenx"],
+    include_package_data=True,
+    # TODO add all deps to the install_requires section
+    install_requires=["inflect>=1.0.1", "astunparse>=1.5.0"],
+)
diff --git a/lib/xos-genx/tox.ini b/lib/xos-genx/tox.ini
new file mode 100644
index 0000000..56c85a5
--- /dev/null
+++ b/lib/xos-genx/tox.ini
@@ -0,0 +1,21 @@
+; Copyright 2017-present Open Networking Foundation
+;
+; Licensed under the Apache License, Version 2.0 (the "License");
+; you may not use this file except in compliance with the License.
+; You may obtain a copy of the License at
+;
+; http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing, software
+; distributed under the License is distributed on an "AS IS" BASIS,
+; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+; See the License for the specific language governing permissions and
+; limitations under the License.
+
+; using tox.ini to store config information for other tools
+
+[flake8]
+; F821, ignoring undefined names would be valuable, but our testing dynamically loads them
+; W503, allow breaks before binary operators (see: https://github.com/PyCQA/pycodestyle/issues/498)
+ignore = F821, W503
+max-line-length = 200
diff --git a/lib/xos-genx/xos-genx-tests/__init__.py b/lib/xos-genx/xos-genx-tests/__init__.py
index d4e8062..b0fb0b2 100644
--- a/lib/xos-genx/xos-genx-tests/__init__.py
+++ b/lib/xos-genx/xos-genx-tests/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/lib/xos-genx/xos-genx-tests/attics/xosmodel_bottom.py b/lib/xos-genx/xos-genx-tests/attics/xosmodel_bottom.py
index 734aed5..b4fbd54 100644
--- a/lib/xos-genx/xos-genx-tests/attics/xosmodel_bottom.py
+++ b/lib/xos-genx/xos-genx-tests/attics/xosmodel_bottom.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,4 +14,4 @@
 
 
 def bottom():
-    return 'bottom'
+    return "bottom"
diff --git a/lib/xos-genx/xos-genx-tests/attics/xosmodel_header.py b/lib/xos-genx/xos-genx-tests/attics/xosmodel_header.py
index 5cee4a3..9ac13cc 100644
--- a/lib/xos-genx/xos-genx-tests/attics/xosmodel_header.py
+++ b/lib/xos-genx/xos-genx-tests/attics/xosmodel_header.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,4 +14,4 @@
 
 
 def header():
-    return 'header'
+    return "header"
diff --git a/lib/xos-genx/xos-genx-tests/attics/xosmodel_model.py b/lib/xos-genx/xos-genx-tests/attics/xosmodel_model.py
index 048641a..5d5ab91 100644
--- a/lib/xos-genx/xos-genx-tests/attics/xosmodel_model.py
+++ b/lib/xos-genx/xos-genx-tests/attics/xosmodel_model.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,4 +14,4 @@
 
 
 def model():
-    return 'model'
+    return "model"
diff --git a/lib/xos-genx/xos-genx-tests/attics/xosmodel_top.py b/lib/xos-genx/xos-genx-tests/attics/xosmodel_top.py
index 79752d0..e25560a 100644
--- a/lib/xos-genx/xos-genx-tests/attics/xosmodel_top.py
+++ b/lib/xos-genx/xos-genx-tests/attics/xosmodel_top.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,4 +14,4 @@
 
 
 def top():
-    return 'top'
+    return "top"
diff --git a/lib/xos-genx/xos-genx-tests/counts b/lib/xos-genx/xos-genx-tests/counts
index 8ab44d4..bfbbd7c 100755
--- a/lib/xos-genx/xos-genx-tests/counts
+++ b/lib/xos-genx/xos-genx-tests/counts
@@ -5,14 +5,16 @@
 from core.models import XOSBase, PlModelMixIn
 import pdb
 
+
 def count(lst):
     c = 0
     for l in lst[0]:
-       ll = l.lstrip()
-       if (ll and not ll.startswith('#') and ll.rstrip()!='pass' and 'ModelLink' not in ll and 'CHOICES' not in ll):
-           c+=1
+        ll = l.lstrip()
+        if (ll and not ll.startswith('#') and ll.rstrip() != 'pass' and 'ModelLink' not in ll and 'CHOICES' not in ll):
+            c += 1
     return c
 
+
 def is_model_class(model):
     """ Return True if 'model' is something that we're interested in """
     if not inspect.isclass(model):
@@ -26,9 +28,9 @@
 
     return False
 
+
 for a in dir(core.models):
-    x = getattr(core.models,a)
+    x = getattr(core.models, a)
     if (is_model_class(x)):
         lines = inspect.getsourcelines(x)
-        print x.__name__,":",count(lines)
-
+        print x.__name__, ":", count(lines)
diff --git a/lib/xos-genx/xos-genx-tests/helpers.py b/lib/xos-genx/xos-genx-tests/helpers.py
index ae52076..4687bb3 100644
--- a/lib/xos-genx/xos-genx-tests/helpers.py
+++ b/lib/xos-genx/xos-genx-tests/helpers.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,18 +18,19 @@
 # Constants
 OUTPUT_DIR = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/out/")
 
-TMP_TARGET_PATH = os.path.join(OUTPUT_DIR, 'tmp.xtarget')
+TMP_TARGET_PATH = os.path.join(OUTPUT_DIR, "tmp.xtarget")
 
 # Passed around in various security / validation checks
+
+
 class FakeObject:
     pass
 
-class XProtoTestHelpers:
 
+class XProtoTestHelpers:
     @staticmethod
     def write_tmp_target(target):
-        tmp_file = open(TMP_TARGET_PATH, 'w')
+        tmp_file = open(TMP_TARGET_PATH, "w")
         tmp_file.write(target)
         tmp_file.close()
         return TMP_TARGET_PATH
-
diff --git a/lib/xos-genx/xos-genx-tests/test_cli.py b/lib/xos-genx/xos-genx-tests/test_cli.py
index 4beef06..3f94865 100644
--- a/lib/xos-genx/xos-genx-tests/test_cli.py
+++ b/lib/xos-genx/xos-genx-tests/test_cli.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,33 +18,39 @@
 from mock import patch
 from xosgenx.xosgen import XosGen
 
+
 class Args:
     pass
 
+
 class XOSProcessorTest(unittest.TestCase):
     """
     Testing the CLI binding for the XOS Generative Toolchain
     """
 
     def setUp(self):
-        os.chdir(os.path.join(os.path.abspath(os.path.dirname(os.path.realpath(__file__))), "..")) 
+        os.chdir(
+            os.path.join(
+                os.path.abspath(os.path.dirname(os.path.realpath(__file__))), ".."
+            )
+        )
 
     def test_generator(self):
         """
         [XOS-GenX] The CLI entry point should correctly parse params
         """
         args = Args()
-        args.files = ['xos-genx-tests/xproto/test.xproto']
-        args.target = 'xos-genx-tests/xtarget/test.xtarget'
-        args.output = 'xos-genx-tests/out/dir/'
+        args.files = ["xos-genx-tests/xproto/test.xproto"]
+        args.target = "xos-genx-tests/xtarget/test.xtarget"
+        args.output = "xos-genx-tests/out/dir/"
         args.write_to_file = "target"
         args.dest_file = None
         args.dest_extension = None
 
         expected_args = Args()
-        expected_args.files = [os.path.abspath(os.getcwd() + '/' + args.files[0])]
-        expected_args.target = os.path.abspath(os.getcwd() + '/' + args.target)
-        expected_args.output = os.path.abspath(os.getcwd() + '/' + args.output)
+        expected_args.files = [os.path.abspath(os.getcwd() + "/" + args.files[0])]
+        expected_args.target = os.path.abspath(os.getcwd() + "/" + args.target)
+        expected_args.output = os.path.abspath(os.getcwd() + "/" + args.output)
 
         with patch("xosgenx.xosgen.XOSProcessor.process") as generator:
             XosGen.init(args)
@@ -53,5 +58,6 @@
             self.assertEqual(actual_args.files, expected_args.files)
             self.assertEqual(actual_args.output, expected_args.output)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/lib/xos-genx/xos-genx-tests/test_django_generator.py b/lib/xos-genx/xos-genx-tests/test_django_generator.py
index 4f4dae4..a81f80c 100644
--- a/lib/xos-genx/xos-genx-tests/test_django_generator.py
+++ b/lib/xos-genx/xos-genx-tests/test_django_generator.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,34 +17,36 @@
 import os
 from xosgenx.generator import XOSProcessor, XOSProcessorArgs
 
-VROUTER_XPROTO = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xproto/vrouterport.xproto")
+VROUTER_XPROTO = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xproto/vrouterport.xproto"
+)
 
 # Generate Protobuf from Xproto and then parse the resulting Protobuf
+
+
 class XProtoProtobufGeneratorTest(unittest.TestCase):
     def test_proto_generator(self):
         """
         [XOS-GenX] Generate DJANGO models, verify Fields and Foreign Keys
         """
-        args = XOSProcessorArgs(files = [VROUTER_XPROTO],
-                                target = 'django.xtarget')
+        args = XOSProcessorArgs(files=[VROUTER_XPROTO], target="django.xtarget")
         output = XOSProcessor.process(args)
 
-        fields = filter(lambda s:'Field(' in s, output.splitlines())
+        fields = filter(lambda s: "Field(" in s, output.splitlines())
         self.assertEqual(len(fields), 2)
-        links = filter(lambda s:'Key(' in s, output.splitlines())
+        links = filter(lambda s: "Key(" in s, output.splitlines())
         self.assertEqual(len(links), 2)
 
     def test_optional_relations(self):
         """
         [XOS-GenX] Generate DJANGO models, verify relations
         """
-        xproto = \
-            """
+        xproto = """
             option app_label = "test";
 
             message ENodeB {
             }
-            
+
             message Handover {
             }
 
@@ -55,15 +56,14 @@
             }
             """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = 'django.xtarget')
+        args = XOSProcessorArgs(inputs=xproto, target="django.xtarget")
         output = XOSProcessor.process(args)
 
-        null_true = filter(lambda s: 'null = True' in s, output.splitlines())
-        null_false = filter(lambda s: 'null = False' in s, output.splitlines())
+        null_true = filter(lambda s: "null = True" in s, output.splitlines())
+        null_false = filter(lambda s: "null = False" in s, output.splitlines())
 
-        blank_true = filter(lambda s: 'blank = True' in s, output.splitlines())
-        blank_false = filter(lambda s: 'blank = False' in s, output.splitlines())
+        blank_true = filter(lambda s: "blank = True" in s, output.splitlines())
+        blank_false = filter(lambda s: "blank = False" in s, output.splitlines())
 
         self.assertEqual(len(null_true), 1)
         self.assertEqual(len(null_false), 1)
@@ -74,8 +74,7 @@
         """
         [XOS-GenX] Generate DJANGO models, verify feedback_state fields
         """
-        xproto = \
-            """
+        xproto = """
             option app_label = "test";
 
             message ParentFoo {
@@ -87,8 +86,7 @@
             }
             """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = 'django.xtarget')
+        args = XOSProcessorArgs(inputs=xproto, target="django.xtarget")
         output = XOSProcessor.process(args)
 
         self.assertIn("feedback_state_fields = ['parent_name', 'name']", output)
@@ -97,8 +95,7 @@
         """
         [XOS-GenX] Use django validors for min and max values
         """
-        xproto = \
-            """
+        xproto = """
             option app_label = "test";
 
             message Foo (ParentFoo) {
@@ -106,15 +103,13 @@
             }
             """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = 'django.xtarget')
+        args = XOSProcessorArgs(inputs=xproto, target="django.xtarget")
         output = XOSProcessor.process(args)
 
         self.assertIn("validators=[", output)
         self.assertIn("MinValueValidator(1)", output)
         self.assertIn("MaxValueValidator(10)", output)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_field_graph.py b/lib/xos-genx/xos-genx-tests/test_field_graph.py
index 478708a..cfb4c8b 100644
--- a/lib/xos-genx/xos-genx-tests/test_field_graph.py
+++ b/lib/xos-genx/xos-genx-tests/test_field_graph.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,11 +17,12 @@
 from xosgenx.jinja2_extensions import FieldNotFound
 from helpers import XProtoTestHelpers
 from xosgenx.generator import XOSProcessor, XOSProcessorArgs
+from functools import reduce
+
 
 class XProtoFieldGraphTest(unittest.TestCase):
     def _test_field_graph(self):
-        xproto = \
-"""
+        xproto = """
 message VRouterDevice (PlCoreBase){
      optional string name = 1 [help_text = "device friendly name", max_length = 20, null = True, db_index = False, blank = True, unique_with="openflow_id"];
      required string openflow_id = 2 [help_text = "device identifier in ONOS", max_length = 20, null = False, db_index = False, blank = False, unique_with="name"];
@@ -39,45 +39,46 @@
 }
 """
         target = XProtoTestHelpers.write_tmp_target(
-"""
+            """
 {{ xproto_field_graph_components(proto.messages.0.fields, proto.messages.0) }}
-""")
+"""
+        )
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = target)
+        args = XOSProcessorArgs(inputs=xproto, target=target)
         output = XOSProcessor.process(args)
-        output =  eval(output)
-        self.assertIn({'A','B','C'}, output)
-        self.assertIn({'openflow_id','name'}, output)
-        self.assertIn({'config_key','vrouter_service','driver'}, output)
-        self.assertIn({'E','F','G'}, output)
-        
-        union = reduce(lambda acc,x: acc | x, output)
-        self.assertNotIn('D', union) 
+        output = eval(output)
+        self.assertIn({"A", "B", "C"}, output)
+        self.assertIn({"openflow_id", "name"}, output)
+        self.assertIn({"config_key", "vrouter_service", "driver"}, output)
+        self.assertIn({"E", "F", "G"}, output)
+
+        union = reduce(lambda acc, x: acc | x, output)
+        self.assertNotIn("D", union)
 
     def test_missing_field(self):
-        xproto = \
-"""
+        xproto = """
 message Foo (PlCoreBase){
      required string A = 6 [unique_with="B"];
 }
 """
         target = XProtoTestHelpers.write_tmp_target(
-"""
+            """
 {{ xproto_field_graph_components(proto.messages.0.fields, proto.messages.0) }}
-""")
+"""
+        )
 
         def generate():
-            args = XOSProcessorArgs(inputs = xproto,
-                                    target = target)
+            args = XOSProcessorArgs(inputs=xproto, target=target)
             output = XOSProcessor.process(args)
 
         with self.assertRaises(FieldNotFound) as e:
-             generate()
+            generate()
 
-        self.assertEqual(e.exception.message, 'Field "B" not found in model "Foo", referenced from field "A" by option "unique_with"')
+        self.assertEqual(
+            e.exception.message,
+            'Field "B" not found in model "Foo", referenced from field "A" by option "unique_with"',
+        )
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_general_security.py b/lib/xos-genx/xos-genx-tests/test_general_security.py
index 1a7b7ca..c675b16 100644
--- a/lib/xos-genx/xos-genx-tests/test_general_security.py
+++ b/lib/xos-genx/xos-genx-tests/test_general_security.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,33 +20,38 @@
 """The function below is for eliminating warnings arising due to the missing output_security_check,
 which is generated and loaded dynamically.
 """
+
+
 def output_security_check(x, y):
     raise Exception("Security enforcer not generated. Test failed.")
     return False
 
+
 """
-The tests below use the Python code target to generate 
+The tests below use the Python code target to generate
 Python security policies, set up an appropriate environment and execute the Python.
 """
+
+
 class XProtoSecurityTest(unittest.TestCase):
     def setUp(self):
-        self.target = XProtoTestHelpers.write_tmp_target("""
+        self.target = XProtoTestHelpers.write_tmp_target(
+            """
 {% for name, policy in proto.policies.items() %}
 {{ xproto_fol_to_python_test(name, policy, None, '0') }}
 {% endfor %}
-""")
+"""
+        )
 
     def test_constant(self):
-        xproto = \
-"""
+        xproto = """
     policy output < True >
 """
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
 
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def output_security_check(obj, ctx):
@@ -59,17 +63,15 @@
         self.assertTrue(verdict)
 
     def test_equal(self):
-        xproto = \
-"""
+        xproto = """
     policy output < ctx.user = obj.user >
 """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
 
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def output_security_check(obj, ctx):
@@ -85,18 +87,18 @@
         verdict = output_security_check(obj, ctx)
 
     def test_call_policy(self):
-        xproto = \
-"""
+        xproto = """
     policy sub_policy < ctx.user = obj.user >
     policy output < *sub_policy(child) >
 """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
 
-        exec(output,globals()) # This loads the generated function, which should look like this:
+        exec(
+            output, globals()
+        )  # This loads the generated function, which should look like this:
 
         """
         def sub_policy_security_check(obj, ctx):
@@ -105,10 +107,10 @@
 
         def output_security_check(obj, ctx):
             if obj.child:
-		i1 = sub_policy_security_check(obj.child, ctx)
-	    else:
-		i1 = True
-	    return i1
+                i1 = sub_policy_security_check(obj.child, ctx)
+            else:
+                i1 = True
+            return i1
         """
 
         obj = FakeObject()
@@ -122,18 +124,18 @@
         self.assertTrue(verdict)
 
     def test_call_policy_child_none(self):
-        xproto = \
-"""
+        xproto = """
     policy sub_policy < ctx.user = obj.user >
     policy output < *sub_policy(child) >
 """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
 
-        exec(output,globals()) # This loads the generated function, which should look like this:
+        exec(
+            output, globals()
+        )  # This loads the generated function, which should look like this:
 
         """
         def sub_policy_security_check(obj, ctx):
@@ -142,10 +144,10 @@
 
         def output_security_check(obj, ctx):
             if obj.child:
-		i1 = sub_policy_security_check(obj.child, ctx)
-	    else:
-		i1 = True
-	    return i1
+                i1 = sub_policy_security_check(obj.child, ctx)
+            else:
+                i1 = True
+            return i1
         """
 
         obj = FakeObject()
@@ -158,23 +160,21 @@
         self.assertTrue(verdict)
 
     def test_bin(self):
-        xproto = \
-"""
+        xproto = """
     policy output < ctx.is_admin = True | obj.empty = True>
 """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
-	def output_security_check(obj, ctx):
-	    i2 = (ctx.is_admin == True)
-	    i3 = (obj.empty == True)
-	    i1 = (i2 or i3)
-	    return i1
+        def output_security_check(obj, ctx):
+            i2 = (ctx.is_admin == True)
+            i3 = (obj.empty == True)
+            i1 = (i2 or i3)
+            return i1
         """
 
         obj = FakeObject()
@@ -187,35 +187,30 @@
 
         self.assertTrue(verdict)
 
-        
     def test_exists(self):
-        xproto = \
-"""
+        xproto = """
     policy output < exists Privilege: Privilege.object_id = obj.id >
 """
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
-	def output_security_check(obj, ctx):
-	    i1 = Privilege.objects.filter(object_id=obj.id)
-    	    return i1
+        def output_security_check(obj, ctx):
+            i1 = Privilege.objects.filter(object_id=obj.id)
+                return i1
         """
 
         self.assertTrue(output_security_check is not None)
-	
+
     def test_python(self):
-        xproto = \
-"""
+        xproto = """
     policy output < {{ "jack" in ["the", "box"] }} = False >
 """
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
         output = XOSProcessor.process(args)
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def output_security_check(obj, ctx):
@@ -228,13 +223,11 @@
 
     def test_forall(self):
         # This one we only parse
-        xproto = \
-"""
+        xproto = """
     policy output < forall Credential: Credential.obj_id = obj_id >
 """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
         """
@@ -245,5 +238,6 @@
         """
         exec(output)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/lib/xos-genx/xos-genx-tests/test_general_validation.py b/lib/xos-genx/xos-genx-tests/test_general_validation.py
index 048167a..0e2a785 100644
--- a/lib/xos-genx/xos-genx-tests/test_general_validation.py
+++ b/lib/xos-genx/xos-genx-tests/test_general_validation.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -22,33 +21,38 @@
 """The function below is for eliminating warnings arising due to the missing policy_output_validator,
 which is generated and loaded dynamically.
 """
+
+
 def policy_output_validator(x, y):
     raise Exception("Validator not generated. Test failed.")
     return False
 
+
 """
-The tests below use the Python code target to generate 
+The tests below use the Python code target to generate
 Python validation policies, set up an appropriate environment and execute the Python.
 """
+
+
 class XProtoGeneralValidationTest(unittest.TestCase):
     def setUp(self):
-        self.target = XProtoTestHelpers.write_tmp_target("""
+        self.target = XProtoTestHelpers.write_tmp_target(
+            """
 {% for name, policy in proto.policies.items() %}
 {{ xproto_fol_to_python_validator(name, policy, None, 'Necessary Failure') }}
 {% endfor %}
-""")
+"""
+        )
 
     def test_constant(self):
-        xproto = \
-"""
+        xproto = """
     policy output < False >
 """
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
 
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -58,20 +62,18 @@
         """
 
         with self.assertRaises(Exception):
-           policy_output_validator({}, {})
-    
+            policy_output_validator({}, {})
+
     def test_equal(self):
-        xproto = \
-"""
+        xproto = """
     policy output < not (ctx.user = obj.user) >
 """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
 
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -87,20 +89,18 @@
         ctx.user = 1
 
         with self.assertRaises(Exception):
-           policy_output_validator(obj, ctx)
+            policy_output_validator(obj, ctx)
 
     def test_equal(self):
-        xproto = \
-"""
+        xproto = """
     policy output < not (ctx.user = obj.user) >
 """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
 
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -116,11 +116,10 @@
         ctx.user = 1
 
         with self.assertRaises(Exception):
-           policy_output_validator(obj, ctx)
+            policy_output_validator(obj, ctx)
 
     def test_bin(self):
-        xproto = \
-"""
+        xproto = """
     policy output < (ctx.is_admin = True | obj.empty = True) | False>
 """
 
@@ -129,7 +128,7 @@
         args.target = self.target
 
         output = XOSProcessor.process(args)
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -149,17 +148,14 @@
         with self.assertRaises(Exception):
             verdict = policy_output_validator(obj, ctx)
 
-        
     def test_exists(self):
-        xproto = \
-"""
+        xproto = """
     policy output < exists Privilege: Privilege.object_id = obj.id >
 """
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -169,16 +165,14 @@
         """
 
         self.assertTrue(policy_output_validator is not None)
-	
+
     def test_python(self):
-        xproto = \
-"""
+        xproto = """
     policy output < {{ "jack" in ["the", "box"] }} = True >
 """
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
         output = XOSProcessor.process(args)
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -192,18 +186,18 @@
             self.assertTrue(policy_output_validator({}, {}) is True)
 
     def test_call_policy(self):
-        xproto = \
-"""
+        xproto = """
     policy sub_policy < ctx.user = obj.user >
     policy output < *sub_policy(child) >
 """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
 
-        exec(output,globals()) # This loads the generated function, which should look like this:
+        exec(
+            output, globals()
+        )  # This loads the generated function, which should look like this:
 
         """
         def policy_sub_policy_validator(obj, ctx):
@@ -229,13 +223,11 @@
 
     def test_forall(self):
         # This one we only parse
-        xproto = \
-"""
+        xproto = """
     policy output < forall Credential: Credential.obj_id = obj_id >
 """
 
-        args = XOSProcessorArgs(inputs = xproto,
-                                target = self.target)
+        args = XOSProcessorArgs(inputs=xproto, target=self.target)
 
         output = XOSProcessor.process(args)
 
@@ -246,7 +238,8 @@
             return i1
         """
 
-        self.assertIn('policy_output_validator', output)
+        self.assertIn("policy_output_validator", output)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/lib/xos-genx/xos-genx-tests/test_generator.py b/lib/xos-genx/xos-genx-tests/test_generator.py
index 3daa594..1de6bd8 100644
--- a/lib/xos-genx/xos-genx-tests/test_generator.py
+++ b/lib/xos-genx/xos-genx-tests/test_generator.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -30,38 +29,64 @@
                 description: "Help Files"
 """
 
-BASE_XPROTO = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xproto/base.xproto")
-TEST_XPROTO = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xproto/test.xproto")
-FIELDTEST_XPROTO = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xproto/fieldtest.xproto")
-REVERSEFIELDTEST_XPROTO = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xproto/reversefieldtest.xproto")
-FILTERTEST_XPROTO = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xproto/filtertest.xproto")
-SKIP_DJANGO_XPROTO = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xproto/skip_django.xproto")
-VROUTER_XPROTO = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xproto/vrouterport.xproto")
-TEST_TARGET = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xtarget/test.xtarget")
-FIELDTEST_TARGET = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xtarget/fieldtest.xtarget")
-FILTERTEST_TARGET = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xtarget/filtertest.xtarget")
-SPLIT_TARGET = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xtarget/split.xtarget")
+BASE_XPROTO = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xproto/base.xproto"
+)
+TEST_XPROTO = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xproto/test.xproto"
+)
+FIELDTEST_XPROTO = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xproto/fieldtest.xproto"
+)
+REVERSEFIELDTEST_XPROTO = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xproto/reversefieldtest.xproto"
+)
+FILTERTEST_XPROTO = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xproto/filtertest.xproto"
+)
+SKIP_DJANGO_XPROTO = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xproto/skip_django.xproto"
+)
+VROUTER_XPROTO = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xproto/vrouterport.xproto"
+)
+TEST_TARGET = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xtarget/test.xtarget"
+)
+FIELDTEST_TARGET = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xtarget/fieldtest.xtarget"
+)
+FILTERTEST_TARGET = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xtarget/filtertest.xtarget"
+)
+SPLIT_TARGET = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xtarget/split.xtarget"
+)
 
 TEST_ATTICS = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/attics/")
 
+
 class XOSProcessorTest(unittest.TestCase):
     """
     Testing the XOS Generative Toolchain
     """
 
     def setUp(self):
-        os.chdir(os.path.join(os.path.abspath(os.path.dirname(os.path.realpath(__file__))), "..")) 
+        os.chdir(
+            os.path.join(
+                os.path.abspath(os.path.dirname(os.path.realpath(__file__))), ".."
+            )
+        )
         filesToRemove = [f for f in os.listdir(OUTPUT_DIR)]
         for f in filesToRemove:
-            if not f.startswith('.'):
-                os.remove(OUTPUT_DIR + '/' + f)
+            if not f.startswith("."):
+                os.remove(OUTPUT_DIR + "/" + f)
 
     def test_generator_custom_target_from_file(self):
         """
         [XOS-GenX] Generate output from base.xproto
         """
-        args = XOSProcessorArgs(files = [TEST_XPROTO],
-                                target = TEST_TARGET)
+        args = XOSProcessorArgs(files=[TEST_XPROTO], target=TEST_TARGET)
         output = XOSProcessor.process(args)
         self.assertEqual(output, TEST_EXPECTED_OUTPUT)
 
@@ -69,8 +94,7 @@
         """
         [XOS-GenX] Generate output from base.xproto
         """
-        args = XOSProcessorArgs(inputs = open(TEST_XPROTO).read(),
-                                target = TEST_TARGET)
+        args = XOSProcessorArgs(inputs=open(TEST_XPROTO).read(), target=TEST_TARGET)
         output = XOSProcessor.process(args)
         self.assertEqual(output, TEST_EXPECTED_OUTPUT)
 
@@ -78,73 +102,79 @@
         """
         [XOS-GenX] Generate django output from test.xproto
         """
-        args = XOSProcessorArgs(files = [TEST_XPROTO, VROUTER_XPROTO],
-                                target = 'django.xtarget',
-                                attic = TEST_ATTICS,
-                                output = OUTPUT_DIR,
-                                dest_extension = 'py',
-                                write_to_file = 'model')
+        args = XOSProcessorArgs(
+            files=[TEST_XPROTO, VROUTER_XPROTO],
+            target="django.xtarget",
+            attic=TEST_ATTICS,
+            output=OUTPUT_DIR,
+            dest_extension="py",
+            write_to_file="model",
+        )
         output = XOSProcessor.process(args)
 
         # xosmodel has custom header attic
-        self.assertIn('from xosmodel_header import *', output['XOSModel'])
-        self.assertIn('class XOSModel(XOSBase):', output['XOSModel'])
+        self.assertIn("from xosmodel_header import *", output["XOSModel"])
+        self.assertIn("class XOSModel(XOSBase):", output["XOSModel"])
 
         # vrouter port use the default header
-        self.assertIn('header import *', output['VRouterPort'])
-        self.assertIn('class VRouterPort(XOSBase):', output['VRouterPort'])
+        self.assertIn("header import *", output["VRouterPort"])
+        self.assertIn("class VRouterPort(XOSBase):", output["VRouterPort"])
 
-        #verify files
-        xosmodel = OUTPUT_DIR + '/xosmodel.py'
+        # verify files
+        xosmodel = OUTPUT_DIR + "/xosmodel.py"
         self.assertTrue(os.path.isfile(xosmodel))
         xmf = open(xosmodel).read()
-        self.assertIn('from xosmodel_header import *', xmf)
-        self.assertIn('class XOSModel(XOSBase):', xmf)
+        self.assertIn("from xosmodel_header import *", xmf)
+        self.assertIn("class XOSModel(XOSBase):", xmf)
 
-        vrouterport = OUTPUT_DIR + '/vrouterport.py'
+        vrouterport = OUTPUT_DIR + "/vrouterport.py"
         self.assertTrue(os.path.isfile(vrouterport))
         vrpf = open(vrouterport).read()
-        self.assertIn('header import *', vrpf)
-        self.assertIn('class VRouterPort(XOSBase):', vrpf)
+        self.assertIn("header import *", vrpf)
+        self.assertIn("class VRouterPort(XOSBase):", vrpf)
 
     def test_django_with_base(self):
-        args = XOSProcessorArgs(files = [TEST_XPROTO, BASE_XPROTO],
-                                target = 'django.xtarget',
-                                attic = TEST_ATTICS,
-                                output = OUTPUT_DIR,
-                                dest_extension = 'py',
-                                write_to_file = 'model')
+        args = XOSProcessorArgs(
+            files=[TEST_XPROTO, BASE_XPROTO],
+            target="django.xtarget",
+            attic=TEST_ATTICS,
+            output=OUTPUT_DIR,
+            dest_extension="py",
+            write_to_file="model",
+        )
         output = XOSProcessor.process(args)
 
         # verify files
-        xosmodel = OUTPUT_DIR + '/xosmodel.py'
+        xosmodel = OUTPUT_DIR + "/xosmodel.py"
         self.assertTrue(os.path.isfile(xosmodel))
         xmf = open(xosmodel).read()
-        self.assertIn('from xosmodel_header import *', xmf)
-        self.assertIn('class XOSModel(XOSBase):', xmf)
+        self.assertIn("from xosmodel_header import *", xmf)
+        self.assertIn("class XOSModel(XOSBase):", xmf)
 
-        xosbase = OUTPUT_DIR + '/xosbase.py'
+        xosbase = OUTPUT_DIR + "/xosbase.py"
         self.assertTrue(os.path.isfile(xosbase))
         xbf = open(xosbase).read()
-        self.assertIn('header import *', xbf)
-        self.assertIn('class XOSBase(models.Model, PlModelMixIn):', xbf)
+        self.assertIn("header import *", xbf)
+        self.assertIn("class XOSBase(models.Model, PlModelMixIn):", xbf)
 
     def test_write_multiple_files(self):
         """
         [XOS-GenX] read multiple models as input, print one file per model
         """
-        args = XOSProcessorArgs(files = [TEST_XPROTO, VROUTER_XPROTO],
-                                target = TEST_TARGET,
-                                output = OUTPUT_DIR,
-                                dest_extension = 'txt',
-                                write_to_file = 'model')
+        args = XOSProcessorArgs(
+            files=[TEST_XPROTO, VROUTER_XPROTO],
+            target=TEST_TARGET,
+            output=OUTPUT_DIR,
+            dest_extension="txt",
+            write_to_file="model",
+        )
         XOSProcessor.process(args)
 
-        generated_files = [f for f in os.listdir(OUTPUT_DIR) if not f.startswith('.')]
+        generated_files = [f for f in os.listdir(OUTPUT_DIR) if not f.startswith(".")]
         self.assertEqual(len(generated_files), 2)
 
-        xosmodel = open(os.path.join(OUTPUT_DIR, 'xosmodel.txt'), "r").read()
-        vrouterport = open(os.path.join(OUTPUT_DIR, 'vrouterport.txt'), "r").read()
+        xosmodel = open(os.path.join(OUTPUT_DIR, "xosmodel.txt"), "r").read()
+        vrouterport = open(os.path.join(OUTPUT_DIR, "vrouterport.txt"), "r").read()
 
         self.assertIn("name: XOSModel", xosmodel)
         self.assertIn("name: VRouterPort", vrouterport)
@@ -153,58 +183,63 @@
         """
         [XOS-GenX] read multiple models as input, print separate files based on +++
         """
-        args = XOSProcessorArgs(files = [TEST_XPROTO, VROUTER_XPROTO],
-                                target = SPLIT_TARGET,
-                                output = OUTPUT_DIR,
-                                write_to_file = 'target')
+        args = XOSProcessorArgs(
+            files=[TEST_XPROTO, VROUTER_XPROTO],
+            target=SPLIT_TARGET,
+            output=OUTPUT_DIR,
+            write_to_file="target",
+        )
         XOSProcessor.process(args)
 
-        generated_files = [f for f in os.listdir(OUTPUT_DIR) if not f.startswith('.')]
+        generated_files = [f for f in os.listdir(OUTPUT_DIR) if not f.startswith(".")]
         self.assertEqual(len(generated_files), 2)
 
-        xosmodel = open(os.path.join(OUTPUT_DIR, 'xosmodel.txt'), "r").read()
-        vrouterport = open(os.path.join(OUTPUT_DIR, 'vrouterport.txt'), "r").read()
+        xosmodel = open(os.path.join(OUTPUT_DIR, "xosmodel.txt"), "r").read()
+        vrouterport = open(os.path.join(OUTPUT_DIR, "vrouterport.txt"), "r").read()
 
         self.assertIn("name: XOSModel", xosmodel)
         self.assertIn("name: VRouterPort", vrouterport)
 
     def test_skip_django(self):
-        args = XOSProcessorArgs(files = [SKIP_DJANGO_XPROTO],
-                                target = 'django.xtarget',
-                                output = OUTPUT_DIR,
-                                dest_extension = 'py',
-                                write_to_file = 'model')
+        args = XOSProcessorArgs(
+            files=[SKIP_DJANGO_XPROTO],
+            target="django.xtarget",
+            output=OUTPUT_DIR,
+            dest_extension="py",
+            write_to_file="model",
+        )
         output = XOSProcessor.process(args)
 
         # should not print a file if options.skip_django = True
-        file = OUTPUT_DIR + '/user.py'
+        file = OUTPUT_DIR + "/user.py"
         self.assertFalse(os.path.isfile(file))
 
     def test_service_order(self):
-        args = XOSProcessorArgs(files = [BASE_XPROTO, TEST_XPROTO, VROUTER_XPROTO],
-                                target = 'service.xtarget',
-                                output = OUTPUT_DIR,
-                                write_to_file = 'target')
+        args = XOSProcessorArgs(
+            files=[BASE_XPROTO, TEST_XPROTO, VROUTER_XPROTO],
+            target="service.xtarget",
+            output=OUTPUT_DIR,
+            write_to_file="target",
+        )
         output = XOSProcessor.process(args)
 
-        model = OUTPUT_DIR + '/models.py'
+        model = OUTPUT_DIR + "/models.py"
         self.assertTrue(os.path.isfile(model))
         line_num = 0
 
         for line in open(model).readlines():
             line_num += 1
-            if line.find('class XOSBase(models.Model, PlModelMixIn):') >= 0:
+            if line.find("class XOSBase(models.Model, PlModelMixIn):") >= 0:
                 base_line = line_num
-            if line.find('XOSModel(XOSBase):') >= 0:
+            if line.find("XOSModel(XOSBase):") >= 0:
                 xosmodel_line = line_num
-            if line.find('class VRouterPort(XOSBase):') >= 0:
+            if line.find("class VRouterPort(XOSBase):") >= 0:
                 vrouter_line = line_num
         self.assertLess(base_line, xosmodel_line)
         self.assertLess(xosmodel_line, vrouter_line)
 
     def test_field_numbers(self):
-        args = XOSProcessorArgs(files = [FIELDTEST_XPROTO],
-                                target = FIELDTEST_TARGET)
+        args = XOSProcessorArgs(files=[FIELDTEST_XPROTO], target=FIELDTEST_TARGET)
         output = XOSProcessor.process(args)
 
         def _assert_field(modelname, fieldname, id):
@@ -224,8 +259,9 @@
         _assert_field("Slice", "site", 102)
 
     def test_field_numbers(self):
-        args = XOSProcessorArgs(files = [REVERSEFIELDTEST_XPROTO],
-                                target = FIELDTEST_TARGET)
+        args = XOSProcessorArgs(
+            files=[REVERSEFIELDTEST_XPROTO], target=FIELDTEST_TARGET
+        )
         output = XOSProcessor.process(args)
 
         def _assert_field(modelname, fieldname, id):
@@ -250,32 +286,31 @@
 
     def test_unfiltered(self):
         """ With no include_* args, should get all models """
-        args = XOSProcessorArgs(files = [FILTERTEST_XPROTO],
-                                target = FILTERTEST_TARGET)
+        args = XOSProcessorArgs(files=[FILTERTEST_XPROTO], target=FILTERTEST_TARGET)
         output = XOSProcessor.process(args)
 
         self.assertEqual(output, "Model1,Model2,Model3,")
 
     def test_filter_models(self):
         """ Should only get models specified by include_models """
-        args = XOSProcessorArgs(files = [FILTERTEST_XPROTO],
-                                target = FILTERTEST_TARGET,
-                                include_models = ["Model1", "Model3"])
+        args = XOSProcessorArgs(
+            files=[FILTERTEST_XPROTO],
+            target=FILTERTEST_TARGET,
+            include_models=["Model1", "Model3"],
+        )
         output = XOSProcessor.process(args)
 
         self.assertEqual(output, "Model1,Model3,")
 
     def test_filter_apps(self):
         """ Should only get models whose apps are specified by include_apps """
-        args = XOSProcessorArgs(files = [FILTERTEST_XPROTO],
-                                target = FILTERTEST_TARGET,
-                                include_apps = ["core"])
+        args = XOSProcessorArgs(
+            files=[FILTERTEST_XPROTO], target=FILTERTEST_TARGET, include_apps=["core"]
+        )
         output = XOSProcessor.process(args)
 
         self.assertEqual(output, "Model1,Model2,")
 
 
-
-
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/lib/xos-genx/xos-genx-tests/test_graph.py b/lib/xos-genx/xos-genx-tests/test_graph.py
index fda3d99..c6cfea7 100644
--- a/lib/xos-genx/xos-genx-tests/test_graph.py
+++ b/lib/xos-genx/xos-genx-tests/test_graph.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,10 +17,11 @@
 from xosgenx.generator import XOSProcessor, XOSProcessorArgs
 from helpers import XProtoTestHelpers
 
+
 class XProtoGraphTests(unittest.TestCase):
     def test_cross_model(self):
         target = XProtoTestHelpers.write_tmp_target(
-"""
+            """
   {% for m in proto.messages %}
   {{ m.name }} {
   {%- for l in m.links %}
@@ -34,10 +34,10 @@
   {% endfor %}
   }
   {% endfor %}
-""")
-
-        proto = \
 """
+        )
+
+        proto = """
 message Port (PlCoreBase,ParameterMixin){
      required manytoone network->Network:links = 1 [db_index = True, null = False, blank = False];
      optional manytoone instance->Instance:ports = 2 [db_index = True, null = True, blank = True];
@@ -116,12 +116,13 @@
         args.inputs = proto
         args.target = target
         output = XOSProcessor.process(args)
-        num_semis = output.count(';')
-        self.assertGreater(num_semis, 3) # 3 is the number of links, each of which contains at least one field
+        num_semis = output.count(";")
+        self.assertGreater(
+            num_semis, 3
+        )  # 3 is the number of links, each of which contains at least one field
 
     def test_base_class_fields(self):
-        target = \
-"""
+        target = """
   {% for m in proto.messages %}
   {{ m.name }} {
   {%- for l in m.links %}
@@ -137,8 +138,7 @@
 """
         xtarget = XProtoTestHelpers.write_tmp_target(target)
 
-        proto = \
-"""
+        proto = """
 message Port (PlCoreBase,ParameterMixin){
      required manytoone network->Network:links = 1 [db_index = True, null = False, blank = False];
      optional manytoone instance->Instance:ports = 2 [db_index = True, null = True, blank = True];
@@ -218,19 +218,17 @@
         args.target = xtarget
         output = XOSProcessor.process(args)
 
-        num_semis = output.count(';')
+        num_semis = output.count(";")
         self.assertGreater(num_semis, 3)
 
     def test_from_base(self):
-        target = \
-"""
+        target = """
   {% for f in xproto_base_fields(proto.messages.3, proto.message_table) %}
         {{ f.type }} {{ f.name }};
   {% endfor %}
 """
         xtarget = XProtoTestHelpers.write_tmp_target(target)
-        proto = \
-"""
+        proto = """
 message Port (PlCoreBase,ParameterMixin){
      required string easter_egg = 1;
      required manytoone network->Network:links = 1 [db_index = True, null = False, blank = False];
@@ -309,9 +307,8 @@
         args.inputs = proto
         args.target = xtarget
         output = XOSProcessor.process(args)
-        self.assertIn('easter_egg', output)
+        self.assertIn("easter_egg", output)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_jinja2_base.py b/lib/xos-genx/xos-genx-tests/test_jinja2_base.py
index 4f26ac9..859d640 100644
--- a/lib/xos-genx/xos-genx-tests/test_jinja2_base.py
+++ b/lib/xos-genx/xos-genx-tests/test_jinja2_base.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -29,6 +28,7 @@
         f["options"]["plural"] = plural
     return f
 
+
 class Jinja2BaseTests(unittest.TestCase):
     def test_xproto_is_true(self):
         self.assertTrue(xproto_is_true(True))
@@ -68,14 +68,16 @@
         self.assertEqual(xproto_singularize_pluralize(_field("sheep")), "sheep")
         self.assertEqual(xproto_singularize_pluralize(_field("slices")), "slices")
         self.assertEqual(xproto_singularize_pluralize(_field("networks")), "networks")
-        self.assertEqual(xproto_singularize_pluralize(_field("omf_friendlies")), "omf_friendlies")
+        self.assertEqual(
+            xproto_singularize_pluralize(_field("omf_friendlies")), "omf_friendlies"
+        )
         # invalid words, should usually return <word>-es
         self.assertEqual(xproto_singularize_pluralize(_field("xxx")), "xxxes")
         # if a field option is set, use that
-        self.assertEqual(xproto_singularize(_field("sheep", singular="turtle")), "turtle")
+        self.assertEqual(
+            xproto_singularize(_field("sheep", singular="turtle")), "turtle"
+        )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_jinja2_django.py b/lib/xos-genx/xos-genx-tests/test_jinja2_django.py
index ab47443..108ae4e 100644
--- a/lib/xos-genx/xos-genx-tests/test_jinja2_django.py
+++ b/lib/xos-genx/xos-genx-tests/test_jinja2_django.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,103 +16,70 @@
 import unittest
 from xosgenx.jinja2_extensions.django import *
 
+
 class Jinja2BaseTests(unittest.TestCase):
     def test_xproto_optioned_fields_to_list(self):
 
         fields = [
-            {
-                'name': 'has_feedback_1',
-                'options': {
-                    'feedback_state': 'True',
-                }
-            },
-            {
-                'name': 'has_feedback_2',
-                'options': {
-                    'feedback_state': 'True',
-                }
-            },
-            {
-                'name': 'no_feedback',
-                'options': {
-                    'feedback_state': 'False',
-                }
-            }
+            {"name": "has_feedback_1", "options": {"feedback_state": "True"}},
+            {"name": "has_feedback_2", "options": {"feedback_state": "True"}},
+            {"name": "no_feedback", "options": {"feedback_state": "False"}},
         ]
 
-        res = xproto_optioned_fields_to_list(fields, 'feedback_state', 'True')
+        res = xproto_optioned_fields_to_list(fields, "feedback_state", "True")
         self.assertEqual(res, ["has_feedback_1", "has_feedback_2"])
 
     def test_xproto_required_to_django(self):
-        field = {
-            'name': 'foo',
-            'options': {
-                'modifier': 'required'
-            }
-        }
+        field = {"name": "foo", "options": {"modifier": "required"}}
 
         res = map_xproto_to_django(field)
-        self.assertEqual(res, {'blank': False, 'null': False})
+        self.assertEqual(res, {"blank": False, "null": False})
 
     def test_xproto_optional_to_django(self):
-        field = {
-            'name': 'foo',
-            'options': {
-                'modifier': 'optional'
-            }
-        }
+        field = {"name": "foo", "options": {"modifier": "optional"}}
 
         res = map_xproto_to_django(field)
-        self.assertEqual(res, {'blank': True, 'null': True})
-
+        self.assertEqual(res, {"blank": True, "null": True})
 
     def test_map_xproto_to_django(self):
 
         options = {
-            'help_text': 'bar',
-            'default': 'default_value',
-            'null':  True,
-            'db_index': False,
-            'blank': False,
-            'min_value': 16,
-            'max_value': 16
+            "help_text": "bar",
+            "default": "default_value",
+            "null": True,
+            "db_index": False,
+            "blank": False,
+            "min_value": 16,
+            "max_value": 16,
         }
 
-        field = {
-            'name': 'foo',
-            'options': options
-        }
+        field = {"name": "foo", "options": options}
 
         res = map_xproto_to_django(field)
         self.assertEqual(res, options)
 
     def test_format_options_string(self):
 
-        options = {
-            'null':  True,
-            'min_value': 16,
-            'max_value': 16
-        }
+        options = {"null": True, "min_value": 16, "max_value": 16}
 
         res = format_options_string(options)
-        self.assertEqual(res, "null = True, validators=[MaxValueValidator(16), MinValueValidator(16)]")
+        self.assertEqual(
+            res,
+            "null = True, validators=[MaxValueValidator(16), MinValueValidator(16)]",
+        )
 
-        options = {
-            'min_value': 16,
-            'max_value': 16
-        }
+        options = {"min_value": 16, "max_value": 16}
 
         res = format_options_string(options)
-        self.assertEqual(res, "validators=[MaxValueValidator(16), MinValueValidator(16)]")
+        self.assertEqual(
+            res, "validators=[MaxValueValidator(16), MinValueValidator(16)]"
+        )
 
-        options = {
-            'null': True,
-        }
+        options = {"null": True}
 
         res = format_options_string(options)
         self.assertEqual(res, "null = True")
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_optimize.py b/lib/xos-genx/xos-genx-tests/test_optimize.py
index e31deb8..c86b736 100644
--- a/lib/xos-genx/xos-genx-tests/test_optimize.py
+++ b/lib/xos-genx/xos-genx-tests/test_optimize.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,81 +16,120 @@
 import unittest
 from xosgenx.jinja2_extensions.fol2 import FOL2Python
 
+
 class XProtoOptimizeTest(unittest.TestCase):
     def setUp(self):
         self.f2p = FOL2Python()
-        self.maxDiff=None
+        self.maxDiff = None
 
     def test_constant(self):
-        input = 'True'
+        input = "True"
         output = self.f2p.hoist_outer(input)
         self.assertEqual(output, input)
 
     def test_exists(self):
-        input = {'exists': ['X',{'|':['X.foo','y']}]}
+        input = {"exists": ["X", {"|": ["X.foo", "y"]}]}
 
         output = self.f2p.hoist_outer(input)
-        expected = {'|': ['y', {'&': [{'not': 'y'}, {'exists': ['X', 'X.foo']}]}]}
+        expected = {"|": ["y", {"&": [{"not": "y"}, {"exists": ["X", "X.foo"]}]}]}
         self.assertEqual(output, expected)
-        
+
     def test_exists_implies(self):
-        input = {'exists': ['Foo', {'&': [{'=': ('Foo.a', '1')}, {'->': ['write_access', {'=': ('Foo.b', '1')}]}]}]}
+        input = {
+            "exists": [
+                "Foo",
+                {
+                    "&": [
+                        {"=": ("Foo.a", "1")},
+                        {"->": ["write_access", {"=": ("Foo.b", "1")}]},
+                    ]
+                },
+            ]
+        }
 
         output = self.f2p.hoist_outer(input)
-        expected = {'|': [{'&': ['write_access', {'exists': ['Foo', {'&': [{'=': ['Foo.a', '1']}, {'=': ['Foo.b', '1']}]}]}]}, {'&': [{'not': 'write_access'}, {'exists': ['Foo', {'=': ['Foo.a', '1']}]}]}]}
+        expected = {
+            "|": [
+                {
+                    "&": [
+                        "write_access",
+                        {
+                            "exists": [
+                                "Foo",
+                                {"&": [{"=": ["Foo.a", "1"]}, {"=": ["Foo.b", "1"]}]},
+                            ]
+                        },
+                    ]
+                },
+                {
+                    "&": [
+                        {"not": "write_access"},
+                        {"exists": ["Foo", {"=": ["Foo.a", "1"]}]},
+                    ]
+                },
+            ]
+        }
         self.assertEqual(output, expected)
 
     def test_forall(self):
-        input = {'forall': ['X',{'|':['X.foo','y']}]}
+        input = {"forall": ["X", {"|": ["X.foo", "y"]}]}
 
         output = self.f2p.hoist_outer(input)
-        expected = {'|': ['y', {'&': [{'not': 'y'}, {'forall': ['X', 'X.foo']}]}]}
+        expected = {"|": ["y", {"&": [{"not": "y"}, {"forall": ["X", "X.foo"]}]}]}
         self.assertEqual(output, expected)
 
     def test_exists_embedded(self):
-        input = {'&':['True',{'exists': ['X',{'|':['X.foo','y']}]}]}
+        input = {"&": ["True", {"exists": ["X", {"|": ["X.foo", "y"]}]}]}
 
         output = self.f2p.hoist_outer(input)
-        expected = {'|': ['y', {'&': [{'not': 'y'}, {'exists': ['X', 'X.foo']}]}]}
+        expected = {"|": ["y", {"&": [{"not": "y"}, {"exists": ["X", "X.foo"]}]}]}
         self.assertEqual(output, expected)
-    
+
     def test_exists_equals(self):
-        input = {'&':['True',{'exists': ['X',{'|':['X.foo',{'=':['y','z']}]}]}]}
+        input = {"&": ["True", {"exists": ["X", {"|": ["X.foo", {"=": ["y", "z"]}]}]}]}
 
         output = self.f2p.hoist_outer(input)
-        expected = {'|': [{'=': ['y', 'z']}, {'&': [{'not': {'=': ['y', 'z']}}, {'exists': ['X', 'X.foo']}]}]}
+        expected = {
+            "|": [
+                {"=": ["y", "z"]},
+                {"&": [{"not": {"=": ["y", "z"]}}, {"exists": ["X", "X.foo"]}]},
+            ]
+        }
         self.assertEqual(output, expected)
 
     def test_exists_nested_constant(self):
-        input = {'&':['True',{'exists': ['X',{'|':['y',{'=':['y','X.foo']}]}]}]}
+        input = {"&": ["True", {"exists": ["X", {"|": ["y", {"=": ["y", "X.foo"]}]}]}]}
 
         output = self.f2p.hoist_outer(input)
-        expected = {'|': ['y', {'&': [{'not': 'y'}, {'exists': ['X', {'=': ['False', 'X.foo']}]}]}]}
+        expected = {
+            "|": [
+                "y",
+                {"&": [{"not": "y"}, {"exists": ["X", {"=": ["False", "X.foo"]}]}]},
+            ]
+        }
         self.assertEqual(output, expected)
 
     def test_exists_nested(self):
-        input = {'exists': ['X',{'exists':['Y',{'=':['Y.foo','X.foo']}]}]}
+        input = {"exists": ["X", {"exists": ["Y", {"=": ["Y.foo", "X.foo"]}]}]}
 
         output = self.f2p.hoist_outer(input)
         expected = input
         self.assertEqual(input, output)
 
     def test_exists_nested2(self):
-        input = {'exists': ['X',{'exists':['Y',{'=':['Z','Y']}]}]}
+        input = {"exists": ["X", {"exists": ["Y", {"=": ["Z", "Y"]}]}]}
 
         output = self.f2p.hoist_outer(input)
-        expected = {'exists': ['Y', {'=': ['Z', 'Y']}]}
+        expected = {"exists": ["Y", {"=": ["Z", "Y"]}]}
         self.assertEqual(output, expected)
 
     def test_exists_nested3(self):
-        input = {'exists': ['X',{'exists':['Y',{'=':['Z','X']}]}]}
+        input = {"exists": ["X", {"exists": ["Y", {"=": ["Z", "X"]}]}]}
 
         output = self.f2p.hoist_outer(input)
-        expected = {'exists': ['X', {'=': ['Z', 'X']}]}
+        expected = {"exists": ["X", {"=": ["Z", "X"]}]}
         self.assertEqual(output, expected)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_package.py b/lib/xos-genx/xos-genx-tests/test_package.py
index f1cf426..03911bd 100644
--- a/lib/xos-genx/xos-genx-tests/test_package.py
+++ b/lib/xos-genx/xos-genx-tests/test_package.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,18 +18,19 @@
 from xosgenx.generator import XOSProcessor, XOSProcessorArgs
 from helpers import XProtoTestHelpers
 
+
 class XProtoPackageTest(unittest.TestCase):
     def test_package_fqn(self):
         args = XOSProcessorArgs()
         target = XProtoTestHelpers.write_tmp_target(
-"""
+            """
   {% for m in proto.messages %}
   {{ m.name }},{{ m.package }},{{ m.fqn }}
   {% endfor %}
-""")
-
-        xproto =\
 """
+        )
+
+        xproto = """
 package xos.core;
 
 message Port (PlCoreBase,ParameterMixin) {
@@ -48,11 +48,11 @@
 
         output = XOSProcessor.process(args)
 
-        self.assertIn('Port,xos.core,xos.core.Port', output)
+        self.assertIn("Port,xos.core,xos.core.Port", output)
 
     def test_cross_model(self):
-        target = XProtoTestHelpers.write_tmp_target( \
-"""
+        target = XProtoTestHelpers.write_tmp_target(
+            """
   {% for m in proto.messages %}
   {{ m.fqn }} {
   {%- for l in m.links %}
@@ -75,10 +75,10 @@
   {% endfor %}
   }
   {% endfor %}
-""")
-
-        xproto = \
 """
+        )
+
+        xproto = """
 package xos.network;
 
 message Port (PlCoreBase,ParameterMixin){
@@ -164,13 +164,17 @@
         args.target = target
         output = XOSProcessor.process(args)
 
-        self.assertIn('numberCores', output) # Instance showed up via cross-package call
-        self.assertIn('ip;', output) # Network showed up via cross-package call
-        self.assertIn('max_instances', output) # Slice showed up via implicit in-package call
+        self.assertIn(
+            "numberCores", output
+        )  # Instance showed up via cross-package call
+        self.assertIn("ip;", output)  # Network showed up via cross-package call
+        self.assertIn(
+            "max_instances", output
+        )  # Slice showed up via implicit in-package call
 
     def test_base_class_fields(self):
         target = XProtoTestHelpers.write_tmp_target(
-"""
+            """
   {% for m in proto.messages %}
   {{ m.name }} {
   {%- for b in m.bases %}
@@ -183,10 +187,10 @@
   {% endfor %}
   }
   {% endfor %}
-""")
-
-        xproto =\
 """
+        )
+
+        xproto = """
 package xos.network;
 
 message Port (PlCoreBase,ParameterMixin){
@@ -225,18 +229,18 @@
         args.target = target
         output = XOSProcessor.process(args)
 
-        self.assertIn('xos_created', output)
+        self.assertIn("xos_created", output)
 
     def test_from_base(self):
-        target = XProtoTestHelpers.write_tmp_target( \
-"""
+        target = XProtoTestHelpers.write_tmp_target(
+            """
   {% for f in xproto_base_fields(proto.messages.3, proto.message_table) %}
         {{ f.type }} {{ f.name }};
   {% endfor %}
-""")
-
-        xproto =\
 """
+        )
+
+        xproto = """
 option app_name = "firstapp";
 
 message Port (PlCoreBase,ParameterMixin){
@@ -326,21 +330,21 @@
         args.target = target
         output = XOSProcessor.process(args)
 
-        self.assertIn('easter_egg', output)
+        self.assertIn("easter_egg", output)
 
     def test_model_options(self):
         target = XProtoTestHelpers.write_tmp_target(
-"""
+            """
   Options:
 
   {{ proto.options }}
   {% for m in proto.messages %}
         {{ m.options.app_name }}
   {% endfor %}
-""")
-
-        xproto =\
 """
+        )
+
+        xproto = """
 option app_name = "firstapp";
 
 message Port (PlCoreBase,ParameterMixin){
@@ -426,17 +430,15 @@
      required manytomany tags->Tag = 18 [db_index = False, null = False, blank = True];
 }
 """
-         
+
         args = XOSProcessorArgs()
         args.inputs = xproto
         args.target = target
         output = XOSProcessor.process(args)
 
-        self.assertEqual(output.count('firstapp'), 2)
-        self.assertEqual(output.count('networkapp'), 2)
+        self.assertEqual(output.count("firstapp"), 2)
+        self.assertEqual(output.count("networkapp"), 2)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_parse.py b/lib/xos-genx/xos-genx-tests/test_parse.py
index d7edcb7..8d1ccf5 100644
--- a/lib/xos-genx/xos-genx-tests/test_parse.py
+++ b/lib/xos-genx/xos-genx-tests/test_parse.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,13 +17,13 @@
 from xosgenx.generator import XOSProcessor, XOSProcessorArgs
 from helpers import XProtoTestHelpers
 
+
 class XProtoParseTests(unittest.TestCase):
     def test_global_options(self):
 
         xtarget = XProtoTestHelpers.write_tmp_target("{{ options }}")
 
-        xproto = \
-"""
+        xproto = """
     option kind = "vsg";
     option verbose_name = "vSG Service";
 """
@@ -38,8 +37,7 @@
     def test_basic_proto(self):
         xtarget = XProtoTestHelpers.write_tmp_target("{{ proto }}")
 
-        xproto = \
-"""
+        xproto = """
 message Person {
   required string name = 1;
   required int32 id = 2;  // Unique ID number for this person.
@@ -66,8 +64,7 @@
     def test_link_extensions(self):
 
         xtarget = XProtoTestHelpers.write_tmp_target("{{ proto.messages.0.links }}")
-        xproto = \
-"""
+        xproto = """
 message links {
     required manytoone vrouter_service->VRouterService:device_ports = 4 [db_index = True, null = False, blank = False];
 }
@@ -79,9 +76,10 @@
         self.assertIn("VRouterService", output)
 
     def test_through_extensions(self):
-        xtarget = XProtoTestHelpers.write_tmp_target("{{ proto.messages.0.links.0.through }}")
-        xproto = \
-"""
+        xtarget = XProtoTestHelpers.write_tmp_target(
+            "{{ proto.messages.0.links.0.through }}"
+        )
+        xproto = """
 message links {
     required manytomany vrouter_service->VRouterService/ServiceProxy:device_ports = 4 [db_index = True, null = False, blank = False];
 }
@@ -93,9 +91,10 @@
         self.assertIn("ServiceProxy", output)
 
     def test_message_options(self):
-        xtarget = XProtoTestHelpers.write_tmp_target("{{ proto.messages.0.options.type }}")
-        xproto = \
-"""
+        xtarget = XProtoTestHelpers.write_tmp_target(
+            "{{ proto.messages.0.options.type }}"
+        )
+        xproto = """
 message link {
     option type = "e1000";
 }
@@ -108,8 +107,7 @@
 
     def test_message_base(self):
         xtarget = XProtoTestHelpers.write_tmp_target("{{ proto.messages.0.bases }}")
-        xproto = \
-"""
+        xproto = """
 message base(Base) {
 }
 """
@@ -120,7 +118,6 @@
         output = XOSProcessor.process(args)
         self.assertIn("Base", output)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_policy.py b/lib/xos-genx/xos-genx-tests/test_policy.py
index d83ab6c..e8b5a76 100644
--- a/lib/xos-genx/xos-genx-tests/test_policy.py
+++ b/lib/xos-genx/xos-genx-tests/test_policy.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -24,10 +23,10 @@
 into Python, set up an appropriate environment and execute the Python.
 """
 
+
 class XProtoPolicyTest(unittest.TestCase):
     def test_annotation(self):
-        xproto = \
-"""
+        xproto = """
     policy true_policy < True >
 
     message always::true_policy {
@@ -45,8 +44,7 @@
         self.assertIn("true_policy", output)
 
     def test_constant(self):
-        xproto = \
-"""
+        xproto = """
     policy true_policy < True >
 """
 
@@ -56,12 +54,11 @@
         args.inputs = xproto
         args.target = target
 
-        output = XOSProcessor.process(args).replace('t','T')
-        self.assertTrue(eval(output)) 
+        output = XOSProcessor.process(args).replace("t", "T")
+        self.assertTrue(eval(output))
 
     def test_function_term(self):
-        xproto = \
-"""
+        xproto = """
     policy slice_user < slice.user.compute_is_admin() >
 """
 
@@ -71,7 +68,7 @@
         args.target = target
 
         output = XOSProcessor.process(args)
-       
+
         slice = FakeObject()
         slice.user = FakeObject()
         slice.user.compute_is_admin = lambda: True
@@ -80,8 +77,7 @@
         self.assertTrue(expr)
 
     def test_term(self):
-        xproto = \
-"""
+        xproto = """
     policy slice_user < slice.user.is_admin >
 """
 
@@ -91,7 +87,7 @@
         args.target = target
 
         output = XOSProcessor.process(args)
-       
+
         slice = FakeObject()
         slice.user = FakeObject()
         slice.user.is_admin = True
@@ -100,8 +96,7 @@
         self.assertTrue(expr)
 
     def test_num_constant(self):
-        xproto = \
-"""
+        xproto = """
     policy slice_user < slice.user.age = 57 >
 """
 
@@ -111,7 +106,7 @@
         args.target = target
 
         output = XOSProcessor.process(args)
-       
+
         slice = FakeObject()
         slice.user = FakeObject()
         slice.user.is_admin = True
@@ -120,8 +115,7 @@
         self.assertTrue(expr)
 
     def test_string_constant(self):
-        xproto = \
-"""
+        xproto = """
     policy slice_user < slice.user.email = "admin@opencord.org" >
 """
 
@@ -131,7 +125,7 @@
         args.target = target
 
         output = XOSProcessor.process(args)
-       
+
         slice = FakeObject()
         slice.user = FakeObject()
         slice.user.is_admin = True
@@ -140,8 +134,7 @@
         self.assertTrue(expr)
 
     def test_equal(self):
-        xproto = \
-"""
+        xproto = """
     policy slice_user < slice.user = obj.user >
 """
 
@@ -151,20 +144,19 @@
         args.target = target
 
         output = XOSProcessor.process(args)
-       
+
         slice = FakeObject()
-        slice.user = 'twin'
+        slice.user = "twin"
         obj = FakeObject()
-        obj.user = 'twin'
+        obj.user = "twin"
 
         (op, operands), = eval(output).items()
-        expr = op.join(operands).replace('=','==')
+        expr = op.join(operands).replace("=", "==")
 
         self.assertTrue(eval(expr))
 
     def test_bin(self):
-        xproto = \
-"""
+        xproto = """
     policy slice_admin < slice.is_admin | obj.empty >
 """
         target = XProtoTestHelpers.write_tmp_target("{{ proto.policies.slice_admin }}")
@@ -180,13 +172,12 @@
         obj.empty = []
 
         (op, operands), = eval(output).items()
-        expr = op.join(operands).replace('|',' or ')
+        expr = op.join(operands).replace("|", " or ")
 
         self.assertFalse(eval(expr))
 
     def test_implies(self):
-        xproto = \
-"""
+        xproto = """
     policy implies < obj.name -> obj.creator >
 """
         target = XProtoTestHelpers.write_tmp_target("{{ proto.policies.implies }}")
@@ -199,17 +190,16 @@
         slice = FakeObject()
         slice.is_admin = False
         obj = FakeObject()
-        obj.name = 'Thing 1'
+        obj.name = "Thing 1"
         obj.creator = None
 
         (op, operands), = eval(output).items()
-        expr = 'not ' + op.join(operands).replace('->',' or ')
+        expr = "not " + op.join(operands).replace("->", " or ")
 
         self.assertFalse(eval(expr))
-   
+
     def test_exists(self):
-        xproto = \
-"""
+        xproto = """
     policy privilege < exists Privilege: Privilege.object_id = obj.id >
 """
 
@@ -219,7 +209,7 @@
         args.target = target
 
         output = XOSProcessor.process(args)
-        
+
         Privilege = FakeObject()
         Privilege.object_id = 1
         obj = FakeObject()
@@ -227,49 +217,49 @@
 
         (op, operands), = eval(output).items()
         (op2, operands2), = operands[1].items()
-        expr = op2.join(operands2).replace('=','==')
+        expr = op2.join(operands2).replace("=", "==")
 
         self.assertTrue(eval(expr))
 
     def test_policy_function(self):
-        xproto = \
-"""
+        xproto = """
     policy slice_policy < exists Privilege: Privilege.object_id = obj.id >
     policy network_slice_policy < *slice_policy(slice) >
 """
 
-        target = XProtoTestHelpers.write_tmp_target("{{ proto.policies.network_slice_policy }} ")
+        target = XProtoTestHelpers.write_tmp_target(
+            "{{ proto.policies.network_slice_policy }} "
+        )
         args = XOSProcessorArgs()
         args.inputs = xproto
         args.target = target
 
         output = XOSProcessor.process(args)
-        
+
         (op, operands), = eval(output).items()
 
-        self.assertIn('slice_policy', operands)
-        self.assertIn('slice', operands)
+        self.assertIn("slice_policy", operands)
+        self.assertIn("slice", operands)
 
     def test_policy_missing_function(self):
-        xproto = \
-"""
+        xproto = """
     policy slice_policy < exists Privilege: Privilege.object_id = obj.id >
     policy network_slice_policy < *slice_policyX(slice) >
 """
 
-        target = XProtoTestHelpers.write_tmp_target("{{ proto.policies.network_slice_policy }} ")
+        target = XProtoTestHelpers.write_tmp_target(
+            "{{ proto.policies.network_slice_policy }} "
+        )
         args = XOSProcessorArgs()
         args.inputs = xproto
         args.target = target
 
         with self.assertRaises(Exception):
             output = XOSProcessor.process(args)
-        
 
     def test_forall(self):
         # This one we only parse
-        xproto = \
-"""
+        xproto = """
     policy instance < forall Instance: exists Credential: Credential.obj_id = Instance.obj_id >
 """
 
@@ -282,10 +272,8 @@
         output = XOSProcessor.process(args)
         (op, operands), = eval(output).items()
 
-        self.assertEqual(op,'forall')
+        self.assertEqual(op, "forall")
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_pure_proto.py b/lib/xos-genx/xos-genx-tests/test_pure_proto.py
index ade4957..c4f680d 100644
--- a/lib/xos-genx/xos-genx-tests/test_pure_proto.py
+++ b/lib/xos-genx/xos-genx-tests/test_pure_proto.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,16 +13,16 @@
 # limitations under the License.
 
 
-
 import unittest
 from xosgenx.generator import XOSProcessor, XOSProcessorArgs
 from helpers import XProtoTestHelpers
 
 # Generate from xproto, then generate from equivalent proto
+
+
 class XPureProtobufGenerator(unittest.TestCase):
     def test_pure_proto(self):
-		xproto = \
-"""
+        xproto = """
 message VRouterPort (XOSBase){
      optional string name = 1 [help_text = "port friendly name", max_length = 20, null = True, db_index = False, blank = True];
      required string openflow_id = 2 [help_text = "port identifier in ONOS", max_length = 21, null = False, db_index = False, blank = False];
@@ -32,8 +31,7 @@
 }
 """
 
-		proto = \
-"""
+        proto = """
 message VRouterPort {
   option bases = "XOSBase";
   optional string name = 1 [ null = "True",  max_length = "20",  blank = "True",  help_text = "port friendly name",  modifier = "optional",  db_index = "False" ];
@@ -42,8 +40,8 @@
   required int32 vrouter_service = 4 [ null = "False",  blank = "False",  model = "VRouterService",  modifier = "required",  type = "link",  port = "device_ports",  db_index = "True", link = "manytoone"];
 }
 """
-		target = XProtoTestHelpers.write_tmp_target(
-"""
+        target = XProtoTestHelpers.write_tmp_target(
+            """
 from header import *
 {% for m in proto.messages %}
 {% if file_exists(xproto_base_name(m.name)|lower+'_header.py') -%}from {{xproto_base_name(m.name)|lower }}_header import *{% endif %}
@@ -81,53 +79,52 @@
 
 {% if file_exists(xproto_base_name(m.name)|lower+'_bottom.py') -%}{{ include_file(xproto_base_name(m.name)|lower+'_bottom.py') }}{% endif %}
 {% endfor %}
-""")
+"""
+        )
 
-		args_xproto = XOSProcessorArgs()
-		args_xproto.inputs = xproto
-		args_xproto.target = target
-		xproto_gen = XOSProcessor.process(args_xproto)
+        args_xproto = XOSProcessorArgs()
+        args_xproto.inputs = xproto
+        args_xproto.target = target
+        xproto_gen = XOSProcessor.process(args_xproto)
 
-		count1 = len(xproto_gen.split('\n'))
+        count1 = len(xproto_gen.split("\n"))
 
-		args_proto = XOSProcessorArgs()
-		args_proto.inputs = proto
-		args_proto.target = target
-		args_proto.rev = True
-		proto_gen = XOSProcessor.process(args_proto)
-		count2 = len(proto_gen.split('\n'))
+        args_proto = XOSProcessorArgs()
+        args_proto.inputs = proto
+        args_proto.target = target
+        args_proto.rev = True
+        proto_gen = XOSProcessor.process(args_proto)
+        count2 = len(proto_gen.split("\n"))
 
-		self.assertEqual(count1, count2)
+        self.assertEqual(count1, count2)
 
     def test_pure_policies(self):
-		xproto = \
-"""
+        xproto = """
 policy my_policy < exists x:a=b >
 """
 
-		proto = \
-"""
+        proto = """
 option my_policy = "policy:< exists x:a=b >";
 """
-		target = XProtoTestHelpers.write_tmp_target(
-"""
+        target = XProtoTestHelpers.write_tmp_target(
+            """
 {{ policies }}
-""")
+"""
+        )
 
-		args_xproto = XOSProcessorArgs()
-		args_xproto.inputs = xproto
-		args_xproto.target = target
-		xproto_gen = XOSProcessor.process(args_xproto)
+        args_xproto = XOSProcessorArgs()
+        args_xproto.inputs = xproto
+        args_xproto.target = target
+        xproto_gen = XOSProcessor.process(args_xproto)
 
-		args_proto = XOSProcessorArgs()
-		args_proto.inputs = proto
-		args_proto.target = target
-		args_proto.rev = True
-		proto_gen = XOSProcessor.process(args_proto)
+        args_proto = XOSProcessorArgs()
+        args_proto.inputs = proto
+        args_proto.target = target
+        args_proto.rev = True
+        proto_gen = XOSProcessor.process(args_proto)
 
-		self.assertEqual(proto_gen, xproto_gen)
+        self.assertEqual(proto_gen, xproto_gen)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_rlinks.py b/lib/xos-genx/xos-genx-tests/test_rlinks.py
index 635c81f..c0ad406 100644
--- a/lib/xos-genx/xos-genx-tests/test_rlinks.py
+++ b/lib/xos-genx/xos-genx-tests/test_rlinks.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,18 +17,20 @@
 from xosgenx.generator import XOSProcessor, XOSProcessorArgs
 from helpers import XProtoTestHelpers
 
+
 class XProtoRlinkTests(unittest.TestCase):
     def test_proto_generator(self):
-        target = XProtoTestHelpers.write_tmp_target("""
+        target = XProtoTestHelpers.write_tmp_target(
+            """
 {% for m in proto.messages %}
    {% for r in m.rlinks %}
        {{ r }}
    {% endfor %}
 {% endfor %}
-""")
-
-        xproto = \
 """
+        )
+
+        xproto = """
 message VRouterPort (PlCoreBase){
      optional string name = 1 [help_text = "port friendly name", max_length = 20, null = True, db_index = False, blank = True];
      required string openflow_id = 2 [help_text = "port identifier in ONOS", max_length = 21, null = False, db_index = False, blank = False];
@@ -60,7 +61,6 @@
         self.assertIn("'src_port': 'device_ports'", output)
         self.assertIn("'src_port': 'ports'", output)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_swagger.py b/lib/xos-genx/xos-genx-tests/test_swagger.py
index 00b0684..3af997e 100644
--- a/lib/xos-genx/xos-genx-tests/test_swagger.py
+++ b/lib/xos-genx/xos-genx-tests/test_swagger.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,11 +19,12 @@
 from xosgenx.generator import XOSProcessor, XOSProcessorArgs
 from helpers import OUTPUT_DIR
 
+
 class Args:
     pass
 
-class XOSProcessorTest(unittest.TestCase):
 
+class XOSProcessorTest(unittest.TestCase):
     def test_swagger_target(self):
         """
         [XOS-GenX] The swagger xtarget should generate the appropriate json
@@ -32,10 +32,9 @@
 
         # xosgenx --output . --target xosgenx/targets/swagger.xtarget --write-to-file single  --dest-file swagger.yaml ../../xos/core/models/core.xproto
         # http-server --cors Users/teone/Sites/opencord/orchestration/xos/lib/xos-genx/
-        xproto = \
-            """
+        xproto = """
             option app_label = "core";
-    
+
             message Instance::instance_policy (XOSBase) {
                  option validators = "instance_creator:Instance has no creator, instance_isolation: Container instance {obj.name} must use container image, instance_isolation_container_vm_parent:Container-vm instance {obj.name} must have a parent, instance_parent_isolation_container_vm:Parent field can only be set on Container-vm instances ({obj.name}), instance_isolation_vm: VM Instance {obj.name} must use VM image, instance_creator_privilege: instance creator has no privileges on slice";
                  optional string instance_id = 1 [max_length = 200, content_type = "stripped", blank = True, help_text = "Nova instance id", null = True, db_index = False];
@@ -58,7 +57,7 @@
             """
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'swagger.xtarget'
+        args.target = "swagger.xtarget"
         args.output = OUTPUT_DIR
         args.write_to_file = "single"
         args.dest_file = "swagger.yaml"
@@ -68,5 +67,6 @@
         self.assertIn("/xosapi/v1/core/instances/{id}:", output)
         self.assertIn("Instance:", output)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/lib/xos-genx/xos-genx-tests/test_target.py b/lib/xos-genx/xos-genx-tests/test_target.py
index d729be7..c468018 100644
--- a/lib/xos-genx/xos-genx-tests/test_target.py
+++ b/lib/xos-genx/xos-genx-tests/test_target.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -23,24 +22,25 @@
 
 TEST_OUTPUT = "Do re mi fa so la ti do"
 
-class XProtoTargetTests(unittest.TestCase):
 
+class XProtoTargetTests(unittest.TestCase):
     def setUp(self):
-        test_file = open(os.path.join(OUTPUT_DIR, TEST_FILE), 'w')
+        test_file = open(os.path.join(OUTPUT_DIR, TEST_FILE), "w")
         test_file.write(TEST_OUTPUT)
         test_file.close()
 
     def test_file_methods(self):
         target = XProtoTestHelpers.write_tmp_target(
-"""
+            """
   {%% if file_exists("%s") %%}
     {{ include_file("%s") }}
   {%% endif %%}
-"""%(TEST_FILE, TEST_FILE)
+"""
+            % (TEST_FILE, TEST_FILE)
         )
 
         args = XOSProcessorArgs()
-        args.inputs = ''
+        args.inputs = ""
         args.target = target
         args.attic = OUTPUT_DIR
         output = XOSProcessor.process(args)
@@ -48,30 +48,31 @@
 
     def test_xproto_lib(self):
         target = XProtoTestHelpers.write_tmp_target(
-"""
+            """
   {{ xproto_first_non_empty([None, None, None, None, None, None, "Eureka"]) }}
-""")
+"""
+        )
         args = XOSProcessorArgs()
-        args.inputs = ''
+        args.inputs = ""
         args.target = target
         output = XOSProcessor.process(args)
         self.assertIn("Eureka", output)
 
     def test_context(self):
         target = XProtoTestHelpers.write_tmp_target(
-"""
+            """
   {{ context.what }}
-""")
+"""
+        )
         args = XOSProcessorArgs()
-        args.inputs = ''
+        args.inputs = ""
         args.target = target
-        args.kv='what:what is what'
+        args.kv = "what:what is what"
         output = XOSProcessor.process(args)
         self.assertIn("what is what", output)
 
     def test_singularize(self):
-        proto = \
-"""
+        proto = """
   message TestSingularize {
       // The following field has an explicitly specified singular
       required int many = 1 [singular = "one"];
@@ -84,20 +85,22 @@
 """
 
         target = XProtoTestHelpers.write_tmp_target(
-"""
+            """
 {% for m in proto.messages.0.fields -%}
 {{ xproto_singularize(m) }},
 {%- endfor %}
-""")
+"""
+        )
         args = XOSProcessorArgs()
         args.inputs = proto
         args.target = target
         output = XOSProcessor.process(args)
-        self.assertEqual("one,sheep,slice,network,omf_friendly", output.lstrip().rstrip().rstrip(','))
+        self.assertEqual(
+            "one,sheep,slice,network,omf_friendly", output.lstrip().rstrip().rstrip(",")
+        )
 
     def test_pluralize(self):
-        proto = \
-"""
+        proto = """
   message TestPluralize {
       // The following field has an explicitly specified plural
       required int anecdote = 1 [plural = "data"];
@@ -110,18 +113,21 @@
 """
 
         target = XProtoTestHelpers.write_tmp_target(
-"""
+            """
 {% for m in proto.messages.0.fields -%}
 {{ xproto_pluralize(m) }},
 {%- endfor %}
-""")
+"""
+        )
         args = XOSProcessorArgs()
         args.inputs = proto
         args.target = target
         output = XOSProcessor.process(args)
-        self.assertEqual("data,sheep,slices,networks,omf_friendlies", output.lstrip().rstrip().rstrip(','))
+        self.assertEqual(
+            "data,sheep,slices,networks,omf_friendlies",
+            output.lstrip().rstrip().rstrip(","),
+        )
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_tosca.py b/lib/xos-genx/xos-genx-tests/test_tosca.py
index 270d126..c5a0f17 100644
--- a/lib/xos-genx/xos-genx-tests/test_tosca.py
+++ b/lib/xos-genx/xos-genx-tests/test_tosca.py
@@ -18,7 +18,6 @@
 
 
 class XProtoToscaTypeTest(unittest.TestCase):
-
     def setUp(self):
         self.target_tosca_type = XProtoTestHelpers.write_tmp_target(
             """
@@ -27,13 +26,14 @@
                 {{ xproto_tosca_field_type(f.type) }}
             {% endfor -%}
             {% endfor -%}
-            """)
+            """
+        )
+
     def test_tosca_fields(self):
         """
         [XOS-GenX] should convert xproto types to tosca know types
         """
-        xproto = \
-        """
+        xproto = """
         option app_label = "test";
 
         message Foo {
@@ -47,26 +47,26 @@
         args.inputs = xproto
         args.target = self.target_tosca_type
         output = XOSProcessor.process(args)
-        self.assertIn('string', output)
-        self.assertIn('boolean', output)
-        self.assertIn('integer', output)
+        self.assertIn("string", output)
+        self.assertIn("boolean", output)
+        self.assertIn("integer", output)
+
 
 class XProtoToscaKeyTest(unittest.TestCase):
-
     def setUp(self):
         self.target_tosca_keys = XProtoTestHelpers.write_tmp_target(
             """
             {%- for m in proto.messages %}
                 {{ xproto_fields_to_tosca_keys(m.fields, m) }}
             {% endfor -%}
-            """)
+            """
+        )
 
     def test_xproto_fields_to_tosca_keys_default(self):
         """
         [XOS-GenX] if no "tosca_key" is specified, and a name attribute is present in the model, use that
         """
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message Foo {
@@ -78,16 +78,15 @@
         args.inputs = xproto
         args.target = self.target_tosca_keys
         output = XOSProcessor.process(args)
-        self.assertIn('name', output)
+        self.assertIn("name", output)
 
     def test_xproto_fields_to_tosca_keys_custom(self):
         """
         [XOS-GenX] if "tosca_key" is specified, use it
         """
-        xproto = \
-            """
+        xproto = """
             option app_label = "test";
-        
+
             message Foo {
                 required string name = 1 [ null = "False", blank="False"];
                 required string key_1 = 2 [ null = "False", blank="False", tosca_key=True];
@@ -99,16 +98,15 @@
         args.inputs = xproto
         args.target = self.target_tosca_keys
         output = XOSProcessor.process(args)
-        self.assertNotIn('name', output)
-        self.assertIn('key_1', output)
-        self.assertIn('key_2', output)
+        self.assertNotIn("name", output)
+        self.assertIn("key_1", output)
+        self.assertIn("key_2", output)
 
     def test_xproto_fields_link_to_tosca_keys_custom(self):
         """
         [XOS-GenX] if "tosca_key" is specified, use it
         """
-        xproto = \
-            """
+        xproto = """
             option app_label = "test";
 
             message Foo {
@@ -121,21 +119,20 @@
         args.inputs = xproto
         args.target = self.target_tosca_keys
         output = XOSProcessor.process(args)
-        self.assertNotIn('name', output)
-        self.assertIn('provider_service_instance_id', output)
+        self.assertNotIn("name", output)
+        self.assertIn("provider_service_instance_id", output)
 
     def test_xproto_model_to_oneof_key(self):
         """
         [XOS-GenX] in some models we need to have a combine key on variable fields, for example, keys can be subscriber_service_id + oneof(provider_service_id, provider_network_id)
         """
-        xproto = \
-            """
+        xproto = """
             option app_label = "test";
 
             message Foo {
-            
+
                 option tosca_key = "key1, oneof(key_2, key_3)";
-            
+
                 required string name = 1 [ null = "False", blank="False"];
                 required string key_1 = 2 [ null = "False", blank="False", tosca_key_one_of = "key_2"];
                 required string key_2 = 3 [ null = "False", blank="False", tosca_key_one_of = "key_1"];
@@ -150,8 +147,7 @@
         output = XOSProcessor.process(args)
         self.assertIn("['name', ['key_4', 'key_3'], ['key_1', 'key_2']]", output)
 
-        xproto = \
-            """
+        xproto = """
             option app_label = "test";
 
             message Foo {
diff --git a/lib/xos-genx/xos-genx-tests/test_translator.py b/lib/xos-genx/xos-genx-tests/test_translator.py
index f3476cd..320021b 100644
--- a/lib/xos-genx/xos-genx-tests/test_translator.py
+++ b/lib/xos-genx/xos-genx-tests/test_translator.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -28,20 +27,23 @@
   required int32 vrouter_service = 4 [ null = "False",  blank = "False",  model = "VRouterService",  modifier = "required",  type = "link",  port = "device_ports",  link_type = "manytoone",  db_index = "True" ];
 }
 """
-VROUTER_XPROTO = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/xproto/vrouterport.xproto")
+VROUTER_XPROTO = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/xproto/vrouterport.xproto"
+)
 
 # Generate other formats from xproto
+
+
 class XProtoTranslatorTest(unittest.TestCase):
     def _test_proto_generator(self):
         args = XOSProcessorArgs()
         args.files = [VROUTER_XPROTO]
-        args.target = 'proto.xtarget'
+        args.target = "proto.xtarget"
         output = XOSProcessor.process(args)
         self.assertEqual(output, PROTO_EXPECTED_OUTPUT)
 
     def test_yaml_generator(self):
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message Port (PlCoreBase,ParameterMixin){
@@ -120,15 +122,14 @@
 
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'modeldefs.xtarget'
+        args.target = "modeldefs.xtarget"
         output = XOSProcessor.process(args)
 
         yaml_ir = yaml.load(output)
-        self.assertEqual(len(yaml_ir['items']), 4)
+        self.assertEqual(len(yaml_ir["items"]), 4)
 
     def test_gui_hidden_models(self):
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message Foo {
@@ -143,16 +144,15 @@
 """
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'modeldefs.xtarget'
+        args.target = "modeldefs.xtarget"
         output = XOSProcessor.process(args)
         yaml_ir = yaml.load(output)
-        self.assertEqual(len(yaml_ir['items']), 1)
-        self.assertIn('Bar', output)
-        self.assertNotIn('Foo', output)
+        self.assertEqual(len(yaml_ir["items"]), 1)
+        self.assertIn("Bar", output)
+        self.assertNotIn("Foo", output)
 
     def test_gui_hidden_model_fields(self):
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message Foo {
@@ -162,16 +162,15 @@
 """
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'modeldefs.xtarget'
+        args.target = "modeldefs.xtarget"
         output = XOSProcessor.process(args)
         yaml_ir = yaml.load(output)
-        self.assertEqual(len(yaml_ir['items']), 1)
-        self.assertIn('name', output)
-        self.assertNotIn('secret', output)
+        self.assertEqual(len(yaml_ir["items"]), 1)
+        self.assertIn("name", output)
+        self.assertNotIn("secret", output)
 
     def test_static_options(self):
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message Foo {
@@ -182,14 +181,13 @@
 
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'modeldefs.xtarget'
+        args.target = "modeldefs.xtarget"
         output = XOSProcessor.process(args)
         self.assertIn("options:", output)
         self.assertIn(" {'id': 'container_vm', 'label': 'Container In VM'}", output)
 
     def test_not_static_options(self):
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message Foo {
@@ -199,13 +197,12 @@
 
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'modeldefs.xtarget'
+        args.target = "modeldefs.xtarget"
         output = XOSProcessor.process(args)
         self.assertNotIn("options:", output)
 
     def test_default_value_in_modeldef(self):
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message Foo {
@@ -219,7 +216,7 @@
 
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'modeldefs.xtarget'
+        args.target = "modeldefs.xtarget"
         output = XOSProcessor.process(args)
         self.assertIn('default: "bar"', output)
         self.assertIn('default: "false"', output)
@@ -228,8 +225,7 @@
         self.assertIn('default: "0"', output)
 
     def test_not_default_value_in_modeldef(self):
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message Foo {
@@ -239,13 +235,12 @@
 
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'modeldefs.xtarget'
+        args.target = "modeldefs.xtarget"
         output = XOSProcessor.process(args)
-        self.assertNotIn('default:', output)
+        self.assertNotIn("default:", output)
 
     def test_one_to_many_in_modeldef(self):
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message ServiceDependency {
@@ -260,19 +255,28 @@
 
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'modeldefs.xtarget'
+        args.target = "modeldefs.xtarget"
         output = XOSProcessor.process(args)
         # Service deps model
-        self.assertIn('{model: Service, type: manytoone, on_field: provider_service}', output)
-        self.assertIn('{model: Service, type: manytoone, on_field: provider_service}', output)
+        self.assertIn(
+            "{model: Service, type: manytoone, on_field: provider_service}", output
+        )
+        self.assertIn(
+            "{model: Service, type: manytoone, on_field: provider_service}", output
+        )
 
         # Service model
-        self.assertIn('{model: ServiceDependency, type: onetomany, on_field: provider_service}', output)
-        self.assertIn('{model: ServiceDependency, type: onetomany, on_field: provider_service}', output)
+        self.assertIn(
+            "{model: ServiceDependency, type: onetomany, on_field: provider_service}",
+            output,
+        )
+        self.assertIn(
+            "{model: ServiceDependency, type: onetomany, on_field: provider_service}",
+            output,
+        )
 
     def test_model_description(self):
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message Foo {
@@ -288,13 +292,12 @@
 
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'modeldefs.xtarget'
+        args.target = "modeldefs.xtarget"
         output = XOSProcessor.process(args)
         self.assertIn('description: "This is the Foo model"', output)
 
     def test_model_verbose_name(self):
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message Foo {
@@ -310,13 +313,12 @@
 
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'modeldefs.xtarget'
+        args.target = "modeldefs.xtarget"
         output = XOSProcessor.process(args)
         self.assertIn('verbose_name: "Verbose Foo Name"', output)
 
     def test_feedback_field(self):
-        xproto = \
-"""
+        xproto = """
 option app_label = "test";
 
 message ParentFoo {
@@ -330,13 +332,12 @@
 
         args = XOSProcessorArgs()
         args.inputs = xproto
-        args.target = 'modeldefs.xtarget'
+        args.target = "modeldefs.xtarget"
         output = XOSProcessor.process(args)
 
-        read_only = filter(lambda s: 'read_only: True' in s, output.splitlines())
-        self.assertEqual(len(read_only), 3) # readonly is 1 for ParentFoo and 2 for Foo
+        read_only = filter(lambda s: "read_only: True" in s, output.splitlines())
+        self.assertEqual(len(read_only), 3)  # readonly is 1 for ParentFoo and 2 for Foo
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
-
-
diff --git a/lib/xos-genx/xos-genx-tests/test_xos_security.py b/lib/xos-genx/xos-genx-tests/test_xos_security.py
index 766e102..3bd4653 100644
--- a/lib/xos-genx/xos-genx-tests/test_xos_security.py
+++ b/lib/xos-genx/xos-genx-tests/test_xos_security.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,25 +20,32 @@
 """The function below is for eliminating warnings arising due to the missing policy_output_enforcer,
 which is generated and loaded dynamically.
 """
+
+
 def policy_output_enforcer(x, y):
     raise Exception("Security enforcer not generated. Test failed.")
     return False
 
+
 """
-The tests below use the Python code target to generate 
+The tests below use the Python code target to generate
 Python security policies, set up an appropriate environment and execute the Python.
 The security policies here deliberately made complex in order to stress the processor.
 """
+
+
 class XProtoXOSSecurityTest(unittest.TestCase):
     def setUp(self):
-        self.target = XProtoTestHelpers.write_tmp_target("{{ xproto_fol_to_python_test('output',proto.policies.test_policy, None, '0') }}")
+        self.target = XProtoTestHelpers.write_tmp_target(
+            "{{ xproto_fol_to_python_test('output',proto.policies.test_policy, None, '0') }}"
+        )
 
     """
     This is the security policy for controllers
     """
+
     def test_controller_policy(self):
-        xproto = \
-"""
+        xproto = """
     policy test_policy < ctx.user.is_admin | exists Privilege: Privilege.accessor_id = ctx.user.id & Privilege.object_type = "Deployment" & Privilege.permission = "role:admin" & Privilege.object_id = obj.id >
 """
         args = XOSProcessorArgs()
@@ -48,7 +54,7 @@
 
         output = XOSProcessor.process(args)
 
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_enforcer(obj, ctx):
@@ -64,10 +70,10 @@
     """
     This is the security policy for ControllerNetworks
     """
+
     def test_controller_network_policy(self):
-        xproto = \
-"""
-    policy test_policy < 
+        xproto = """
+    policy test_policy <
          ctx.user.is_admin
          | (exists Privilege:
              Privilege.accessor_id = ctx.user.id
@@ -86,7 +92,7 @@
         args.target = self.target
 
         output = XOSProcessor.process(args)
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_enforcer(obj, ctx):
@@ -104,9 +110,9 @@
     """
     This is the security policy for Slices
     """
+
     def test_slice_policy(self):
-        xproto = \
-"""
+        xproto = """
    policy site_policy <
             ctx.user.is_admin
             | (ctx.write_access -> exists Privilege: Privilege.object_type = "Site" & Privilege.object_id = obj.id & Privilege.accessor_id = ctx.user.id & Privilege.permission_id = "role:admin") >
@@ -127,7 +133,7 @@
              & Privilege.object_id = obj.site.id
              & Privilege.permission = "role:admin"))
             )>
-    
+
 """
         args = XOSProcessorArgs()
         args.inputs = xproto
@@ -135,25 +141,25 @@
 
         output = XOSProcessor.process(args)
 
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_enforcer(obj, ctx):
-	    i2 = ctx.user.is_admin
-	    i4 = policy_site_policy_enforcer(obj.site, ctx)
-	    i10 = ctx.write_access
-	    i11 = (not (not Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(object_type='Slice'), Q(object_id=obj.id), Q(permission='role:admin'))))
-	    i8 = (i10 and i11)
-	    i14 = ctx.write_access
-	    i12 = (not i14)
-	    i13 = (not (not Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(object_type='Slice'), Q(object_id=obj.id))))
-	    i9 = (i12 and i13)
-	    i6 = (i8 or i9)
-	    i7 = (not (not Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(object_type='Site'), Q(object_id=obj.site.id), Q(permission='role:admin'))))
-	    i5 = (i6 or i7)
-	    i3 = (i4 and i5)
-	    i1 = (i2 or i3)
-	    return i1
+            i2 = ctx.user.is_admin
+            i4 = policy_site_policy_enforcer(obj.site, ctx)
+            i10 = ctx.write_access
+            i11 = (not (not Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(object_type='Slice'), Q(object_id=obj.id), Q(permission='role:admin'))))
+            i8 = (i10 and i11)
+            i14 = ctx.write_access
+            i12 = (not i14)
+            i13 = (not (not Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(object_type='Slice'), Q(object_id=obj.id))))
+            i9 = (i12 and i13)
+            i6 = (i8 or i9)
+            i7 = (not (not Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(object_type='Site'), Q(object_id=obj.site.id), Q(permission='role:admin'))))
+            i5 = (i6 or i7)
+            i3 = (i4 and i5)
+            i1 = (i2 or i3)
+            return i1
         """
 
         # FIXME: Test this policy by executing it
@@ -162,13 +168,13 @@
     """
     This is the security policy for Users
     """
+
     def test_user_policy(self):
-        xproto = \
-"""
+        xproto = """
     policy test_policy <
          ctx.user.is_admin
          | ctx.user.id = obj.id
-         | (exists Privilege: 
+         | (exists Privilege:
              Privilege.accessor_id = ctx.user.id
              & Privilege.accessor_type = "User"
              & Privilege.permission = "role:admin"
@@ -181,7 +187,7 @@
 
         output = XOSProcessor.process(args)
 
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_enforcer(obj, ctx):
@@ -196,5 +202,6 @@
         # FIXME: Test this policy by executing it
         self.assertTrue(policy_output_enforcer is not None)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/lib/xos-genx/xos-genx-tests/test_xos_validation.py b/lib/xos-genx/xos-genx-tests/test_xos_validation.py
index f2f8ce3..257eb4d 100644
--- a/lib/xos-genx/xos-genx-tests/test_xos_validation.py
+++ b/lib/xos-genx/xos-genx-tests/test_xos_validation.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,21 +20,27 @@
 """The function below is for eliminating warnings arising due to the missing policy_output_validator,
 which is generated and loaded dynamically.
 """
+
+
 def policy_output_validator(x, y):
     raise Exception("Validator not generated. Test failed.")
     return False
 
+
 """
-The tests below use the Python code target to generate 
+The tests below use the Python code target to generate
 Python validation policies, set up an appropriate environment and execute the Python.
 """
+
+
 class XProtoXOSModelValidationTest(unittest.TestCase):
     def setUp(self):
-        self.target = XProtoTestHelpers.write_tmp_target("{{ xproto_fol_to_python_validator('output', proto.policies.test_policy, None, 'Necessary Failure') }}")
+        self.target = XProtoTestHelpers.write_tmp_target(
+            "{{ xproto_fol_to_python_validator('output', proto.policies.test_policy, None, 'Necessary Failure') }}"
+        )
 
     def test_instance_container(self):
-        xproto = \
-"""
+        xproto = """
     policy test_policy < (obj.isolation = "container" | obj.isolation = "container_vm" ) -> (obj.image.kind = "container") >
 """
         args = XOSProcessorArgs()
@@ -45,10 +50,10 @@
         output = XOSProcessor.process(args)
 
         obj = FakeObject()
-        obj.isolation = 'container'
-        obj.kind = 'not a container'
+        obj.isolation = "container"
+        obj.kind = "not a container"
 
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -61,11 +66,10 @@
         """
 
         with self.assertRaises(Exception):
-           policy_output_validator(obj, {})
-    
+            policy_output_validator(obj, {})
+
     def test_slice_name_validation(self):
-        xproto = \
-"""
+        xproto = """
     policy test_policy < not obj.id -> {{ obj.name.startswith(obj.site.login_base) }} >
 """
         args = XOSProcessorArgs()
@@ -75,10 +79,10 @@
         output = XOSProcessor.process(args)
 
         obj = FakeObject()
-        obj.isolation = 'container'
-        obj.kind = 'not a container'
+        obj.isolation = "container"
+        obj.kind = "not a container"
 
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -91,11 +95,10 @@
         """
 
         with self.assertRaises(Exception):
-           policy_output_validator(obj, {})
+            policy_output_validator(obj, {})
 
     def test_equal(self):
-        xproto = \
-"""
+        xproto = """
     policy test_policy < not (ctx.user = obj.user) >
 """
 
@@ -105,7 +108,7 @@
 
         output = XOSProcessor.process(args)
 
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -121,11 +124,10 @@
         ctx.user = 1
 
         with self.assertRaises(Exception):
-           policy_output_validator(obj, ctx)
+            policy_output_validator(obj, ctx)
 
     def test_bin(self):
-        xproto = \
-"""
+        xproto = """
     policy test_policy < not (ctx.is_admin = True | obj.empty = True) | False>
 """
 
@@ -134,7 +136,7 @@
         args.target = self.target
 
         output = XOSProcessor.process(args)
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -154,10 +156,8 @@
         with self.assertRaises(Exception):
             verdict = policy_output_validator(obj, ctx)
 
-        
     def test_exists(self):
-        xproto = \
-"""
+        xproto = """
     policy test_policy < exists Privilege: Privilege.object_id = obj.id >
 """
         args = XOSProcessorArgs()
@@ -165,7 +165,7 @@
         args.target = self.target
 
         output = XOSProcessor.process(args)
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -175,17 +175,16 @@
         """
 
         self.assertTrue(policy_output_validator is not None)
-	
+
     def test_python(self):
-        xproto = \
-"""
+        xproto = """
     policy test_policy < {{ "jack" in ["the", "box"] }} = True >
 """
         args = XOSProcessorArgs()
         args.inputs = xproto
         args.target = self.target
         output = XOSProcessor.process(args)
-        exec(output) # This loads the generated function, which should look like this:
+        exec(output)  # This loads the generated function, which should look like this:
 
         """
         def policy_output_validator(obj, ctx):
@@ -200,8 +199,7 @@
 
     def test_forall(self):
         # This one we only parse
-        xproto = \
-"""
+        xproto = """
     policy test_policy < forall Credential: Credential.obj_id = obj_id >
 """
 
@@ -218,7 +216,8 @@
             return i1
         """
 
-        self.assertIn('policy_output_validator', output)
+        self.assertIn("policy_output_validator", output)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/lib/xos-genx/xosgenx/__init__.py b/lib/xos-genx/xosgenx/__init__.py
index d4e8062..b0fb0b2 100644
--- a/lib/xos-genx/xosgenx/__init__.py
+++ b/lib/xos-genx/xosgenx/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/lib/xos-genx/xosgenx/generator.py b/lib/xos-genx/xosgenx/generator.py
index 3355fb5..3e650be 100644
--- a/lib/xos-genx/xosgenx/generator.py
+++ b/lib/xos-genx/xosgenx/generator.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import print_function
 import plyxproto.parser as plyxproto
 import jinja2
 import os
@@ -23,9 +23,10 @@
 import yaml
 from colorama import Fore
 
-loader = jinja2.PackageLoader(__name__, 'templates')
+loader = jinja2.PackageLoader(__name__, "templates")
 env = jinja2.Environment(loader=loader)
 
+
 class XOSProcessorArgs:
     """ Helper class for use cases that want to call XOSProcessor directly, rather than executing xosgenx from the
         command line.
@@ -40,9 +41,13 @@
     default_dest_extension = None
     default_target = None
     default_checkers = None
-    default_verbosity = 0         # Higher numbers = more verbosity, lower numbers = less verbosity
-    default_include_models = []   # If neither include_models nor include_apps is specified, then all models will
-    default_include_apps = []     # be included.
+    default_verbosity = (
+        0
+    )  # Higher numbers = more verbosity, lower numbers = less verbosity
+    default_include_models = (
+        []
+    )  # If neither include_models nor include_apps is specified, then all models will
+    default_include_apps = []  # be included.
 
     def __init__(self, **kwargs):
         # set defaults
@@ -60,14 +65,14 @@
         self.include_apps = XOSProcessorArgs.default_include_apps
 
         # override defaults with kwargs
-        for (k,v) in kwargs.items():
+        for (k, v) in kwargs.items():
             setattr(self, k, v)
 
-class XOSProcessor:
 
+class XOSProcessor:
     @staticmethod
     def _read_input_from_files(files):
-        input = ''
+        input = ""
         for fname in files:
             with open(fname) as infile:
                 input += infile.read()
@@ -75,7 +80,7 @@
 
     @staticmethod
     def _attach_parser(ast, args):
-        if hasattr(args, 'rev') and args.rev:
+        if hasattr(args, "rev") and args.rev:
             v = Proto2XProto()
             ast.accept(v)
 
@@ -86,7 +91,9 @@
     @staticmethod
     def _get_template(target):
         if not os.path.isabs(target):
-            return os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/targets/' + target)
+            return os.path.abspath(
+                os.path.dirname(os.path.realpath(__file__)) + "/targets/" + target
+            )
         return target
 
     @staticmethod
@@ -94,10 +101,10 @@
         # NOTE this method can be used in the jinja template
         def file_exists2(name):
             if attic is not None:
-                path = attic + '/' + name
+                path = attic + "/" + name
             else:
                 path = name
-            return (os.path.exists(path))
+            return os.path.exists(path)
 
         return file_exists2
 
@@ -106,60 +113,65 @@
         # NOTE this method can be used in the jinja template
         def include_file2(name):
             if attic is not None:
-                path = attic + '/' + name
+                path = attic + "/" + name
             else:
                 path = name
             return open(path).read()
+
         return include_file2
 
     @staticmethod
     def _load_jinja2_extensions(os_template_env, attic):
 
-        os_template_env.globals['include_file'] = XOSProcessor._include_file(attic)  # Generates a function
-        os_template_env.globals['file_exists'] = XOSProcessor._file_exists(attic)  # Generates a function
+        os_template_env.globals["include_file"] = XOSProcessor._include_file(
+            attic
+        )  # Generates a function
+        os_template_env.globals["file_exists"] = XOSProcessor._file_exists(
+            attic
+        )  # Generates a function
 
-        os_template_env.filters['yaml'] = yaml.dump
+        os_template_env.filters["yaml"] = yaml.dump
         for f in dir(jinja2_extensions):
-            if f.startswith('xproto'):
+            if f.startswith("xproto"):
                 os_template_env.globals[f] = getattr(jinja2_extensions, f)
         return os_template_env
 
     @staticmethod
     def _add_context(args):
-        if not hasattr(args, 'kv') or not args.kv:
+        if not hasattr(args, "kv") or not args.kv:
             return
         try:
             context = {}
-            for s in args.kv.split(','):
-                k, val = s.split(':')
+            for s in args.kv.split(","):
+                k, val = s.split(":")
                 context[k] = val
             return context
-        except Exception, e:
-            print e.message
+        except Exception as e:
+            print(e.message)
 
     @staticmethod
     def _write_single_file(rendered, dir, dest_file, quiet):
 
         file_name = "%s/%s" % (dir, dest_file)
-        file = open(file_name, 'w')
+        file = open(file_name, "w")
         file.write(rendered)
         file.close()
-        if quiet == False:
-            print "Saved: %s" % file_name
+        if not quiet:
+            print("Saved: %s" % file_name)
 
     @staticmethod
     def _write_file_per_model(rendered, dir, suffix, quiet):
         for m in rendered:
             file_name = "%s/%s%s" % (dir, m.lower(), suffix)
             if not rendered[m]:
-                if quiet == False:
-                    print "Not saving %s as it is empty" % file_name
+                if not quiet:
+                    print("Not saving %s as it is empty" % file_name)
             else:
-                file = open(file_name, 'w')
+                file = open(file_name, "w")
                 file.write(rendered[m])
                 file.close()
-                if quiet == False:
-                    print "Saved: %s" % file_name
+                if not quiet:
+                    print("Saved: %s" % file_name)
 
     @staticmethod
     def _write_split_target(rendered, dir, quiet):
@@ -167,21 +179,21 @@
         lines = rendered.splitlines()
         current_buffer = []
         for l in lines:
-            if (l.startswith('+++')):
+            if l.startswith("+++"):
 
                 if dir:
-                    path = dir + '/' + l[4:].lower()
+                    path = dir + "/" + l[4:].lower()
 
-                fil = open(path, 'w')
-                buf = '\n'.join(current_buffer)
+                fil = open(path, "w")
+                buf = "\n".join(current_buffer)
 
                 obuf = buf
 
                 fil.write(obuf)
                 fil.close()
 
-                if quiet == False:
-                    print "Save file to: %s" % path
+                if not quiet:
+                    print("Save file to: %s" % path)
 
                 current_buffer = []
             else:
@@ -189,50 +201,55 @@
 
     @staticmethod
     def _find_message_by_model_name(messages, model):
-        return next((x for x in messages if x['name'] == model), None)
+        return next((x for x in messages if x["name"] == model), None)
 
     @staticmethod
     def _find_last_nonempty_line(text, pointer):
         ne_pointer = pointer
         found = False
-        while ne_pointer!=0 and not found:
-            ne_pointer = text[:(ne_pointer-1)].rfind('\n')
-            if ne_pointer<0: ne_pointer = 0
-            if text[ne_pointer-1]!='\n':
+        while ne_pointer != 0 and not found:
+            ne_pointer = text[: (ne_pointer - 1)].rfind("\n")
+            if ne_pointer < 0:
+                ne_pointer = 0
+            if text[ne_pointer - 1] != "\n":
                 found = True
 
         return ne_pointer
 
     @staticmethod
-    def process(args, operator = None):
+    def process(args, operator=None):
         # Setting defaults
-        if not hasattr(args, 'attic'):
+        if not hasattr(args, "attic"):
             args.attic = None
-        if not hasattr(args, 'write_to_file'):
+        if not hasattr(args, "write_to_file"):
             args.write_to_file = None
-        if not hasattr(args, 'dest_file'):
+        if not hasattr(args, "dest_file"):
             args.dest_file = None
-        if not hasattr(args, 'dest_extension'):
+        if not hasattr(args, "dest_extension"):
             args.dest_extension = None
-        if not hasattr(args, 'output'):
+        if not hasattr(args, "output"):
             args.output = None
-        if not hasattr(args, 'quiet'):
+        if not hasattr(args, "quiet"):
             args.quiet = True
 
         # Validating
-        if args.write_to_file == 'single' and args.dest_file is None:
-            raise Exception("[XosGenX] write_to_file option is specified as 'single' but no dest_file is provided")
-        if args.write_to_file == 'model' and (args.dest_extension is None):
-            raise Exception("[XosGenX] write_to_file option is specified as 'model' but no dest_extension is provided")
+        if args.write_to_file == "single" and args.dest_file is None:
+            raise Exception(
+                "[XosGenX] write_to_file option is specified as 'single' but no dest_file is provided"
+            )
+        if args.write_to_file == "model" and (args.dest_extension is None):
+            raise Exception(
+                "[XosGenX] write_to_file option is specified as 'model' but no dest_extension is provided"
+            )
 
         if args.output is not None and not os.path.isabs(args.output):
             raise Exception("[XosGenX] The output dir must be an absolute path!")
         if args.output is not None and not os.path.isdir(args.output):
             raise Exception("[XosGenX] The output dir must be a directory!")
 
-        if hasattr(args, 'files'):
+        if hasattr(args, "files"):
             inputs = XOSProcessor._read_input_from_files(args.files)
-        elif hasattr(args, 'inputs'):
+        elif hasattr(args, "inputs"):
             inputs = args.inputs
         else:
             raise Exception("[XosGenX] No inputs provided!")
@@ -243,28 +260,29 @@
         else:
             template_path = operator
 
-
         [template_folder, template_name] = os.path.split(template_path)
         os_template_loader = jinja2.FileSystemLoader(searchpath=[template_folder])
         os_template_env = jinja2.Environment(loader=os_template_loader)
-        os_template_env = XOSProcessor._load_jinja2_extensions(os_template_env, args.attic)
+        os_template_env = XOSProcessor._load_jinja2_extensions(
+            os_template_env, args.attic
+        )
         template = os_template_env.get_template(template_name)
         context = XOSProcessor._add_context(args)
 
         parser = plyxproto.ProtobufAnalyzer()
         try:
             ast = parser.parse_string(inputs, debug=0)
-        except plyxproto.ParsingError, e:
+        except plyxproto.ParsingError as e:
             line, start, end = e.error_range
 
             ptr = XOSProcessor._find_last_nonempty_line(inputs, start)
 
             if start == 0:
-                beginning = ''
+                beginning = ""
             else:
-                beginning = inputs[ptr:start-1]
+                beginning = inputs[ptr: start - 1]
 
-            line_end_char = inputs[start+end:].find('\n')
+            line_end_char = inputs[start + end:].find("\n")
             line_end = inputs[line_end_char]
 
             if e.message:
@@ -272,11 +290,16 @@
             else:
                 error = "xproto parsing error"
 
-            print error + "\n" + Fore.YELLOW + "Line %d:"%line + Fore.WHITE
-            print beginning + Fore.YELLOW + inputs[start-1:start+end] + Fore.WHITE + line_end
+            print(error + "\n" + Fore.YELLOW + "Line %d:" % line + Fore.WHITE)
+            print(
+                beginning
+                + Fore.YELLOW
+                + inputs[start - 1: start + end]
+                + Fore.WHITE
+                + line_end
+            )
             exit(1)
 
-
         v = XOSProcessor._attach_parser(ast, args)
 
         if args.include_models or args.include_apps:
@@ -300,38 +323,42 @@
                 messages = [XOSProcessor._find_message_by_model_name(v.messages, model)]
 
                 rendered[model] = template.render(
-                    {"proto":
-                        {
-                            'message_table': models,
-                            'messages': messages,
-                            'policies': v.policies,
-                            'message_names': [m['name'] for m in v.messages]
+                    {
+                        "proto": {
+                            "message_table": models,
+                            "messages": messages,
+                            "policies": v.policies,
+                            "message_names": [m["name"] for m in v.messages],
                         },
                         "context": context,
-                        "options": v.options
+                        "options": v.options,
                     }
                 )
-            if (str(v.options.get("legacy", "false")).strip('"').lower() == "true"):
+            if str(v.options.get("legacy", "false")).strip('"').lower() == "true":
                 suffix = "_decl." + args.dest_extension
             else:
                 suffix = "." + args.dest_extension
-            XOSProcessor._write_file_per_model(rendered, args.output, suffix, args.quiet)
+            XOSProcessor._write_file_per_model(
+                rendered, args.output, suffix, args.quiet
+            )
         else:
             rendered = template.render(
-                {"proto":
-                    {
-                        'message_table': v.models,
-                        'messages': v.messages,
-                        'policies': v.policies,
-                        'message_names': [m['name'] for m in v.messages]
+                {
+                    "proto": {
+                        "message_table": v.models,
+                        "messages": v.messages,
+                        "policies": v.policies,
+                        "message_names": [m["name"] for m in v.messages],
                     },
                     "context": context,
-                    "options": v.options
+                    "options": v.options,
                 }
             )
             if args.output is not None and args.write_to_file == "target":
                 XOSProcessor._write_split_target(rendered, args.output, args.quiet)
             elif args.output is not None and args.write_to_file == "single":
-                XOSProcessor._write_single_file(rendered, args.output, args.dest_file, args.quiet)
+                XOSProcessor._write_single_file(
+                    rendered, args.output, args.dest_file, args.quiet
+                )
 
         return rendered
diff --git a/lib/xos-genx/xosgenx/jinja2_extensions/__init__.py b/lib/xos-genx/xosgenx/jinja2_extensions/__init__.py
index 859594c..bf7a812 100644
--- a/lib/xos-genx/xosgenx/jinja2_extensions/__init__.py
+++ b/lib/xos-genx/xosgenx/jinja2_extensions/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,10 +12,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
 from .django import *
 from .base import *
 from .fol2 import *
 from .gui import *
 from .tosca import *
 from .checklib import *
+
+__all__ = ["django", "base", "fol2", "gui", "tosca", "checklib"]
diff --git a/lib/xos-genx/xosgenx/jinja2_extensions/base.py b/lib/xos-genx/xosgenx/jinja2_extensions/base.py
index e11d2ec..96e8dc2 100644
--- a/lib/xos-genx/xosgenx/jinja2_extensions/base.py
+++ b/lib/xos-genx/xosgenx/jinja2_extensions/base.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,49 +13,56 @@
 # limitations under the License.
 
 
+from __future__ import print_function
 import pdb
 import re
 from inflect import engine as inflect_engine_class
 
 inflect_engine = inflect_engine_class()
 
+
 class FieldNotFound(Exception):
     def __init__(self, message):
         super(FieldNotFound, self).__init__(message)
 
+
 def xproto_debug(**kwargs):
-    print kwargs
+    print(kwargs)
     pdb.set_trace()
 
+
 def xproto_unquote(s):
     return unquote(s)
 
+
 def unquote(s):
-    if (s.startswith('"') and s.endswith('"')):
+    if s.startswith('"') and s.endswith('"'):
         return s[1:-1]
     else:
         return s
 
+
 def xproto_singularize(field):
     try:
         # The user has set a singular, as an exception that cannot be handled automatically
-        singular = field['options']['singular']
+        singular = field["options"]["singular"]
         singular = unquote(singular)
     except KeyError:
-        singular = inflect_engine.singular_noun(field['name'])
+        singular = inflect_engine.singular_noun(field["name"])
         if singular is False:
             # singular_noun returns False on a noun it can't singularize
             singular = field["name"]
 
     return singular
 
+
 def xproto_singularize_pluralize(field):
     try:
         # The user has set a plural, as an exception that cannot be handled automatically
-        plural = field['options']['plural']
+        plural = field["options"]["plural"]
         plural = unquote(plural)
     except KeyError:
-        singular = inflect_engine.singular_noun(field['name'])
+        singular = inflect_engine.singular_noun(field["name"])
         if singular is False:
             # singular_noun returns False on a noun it can't singularize
             singular = field["name"]
@@ -65,86 +71,103 @@
 
     return plural
 
+
 def xproto_pluralize(field):
     try:
         # The user has set a plural, as an exception that cannot be handled automatically
-        plural = field['options']['plural']
+        plural = field["options"]["plural"]
         plural = unquote(plural)
     except KeyError:
-        plural = inflect_engine.plural_noun(field['name'])
+        plural = inflect_engine.plural_noun(field["name"])
 
     return plural
 
-def xproto_base_def(model_name, base, suffix='', suffix_list=[]):
-    if (model_name=='XOSBase'):
-        return '(models.Model, PlModelMixIn)'
-    elif (not base):
-        return ''
+
+def xproto_base_def(model_name, base, suffix="", suffix_list=[]):
+    if model_name == "XOSBase":
+        return "(models.Model, PlModelMixIn)"
+    elif not base:
+        return ""
     else:
-        int_base = [i['name']+suffix for i in base if i['name'] in suffix_list]
-        ext_base = [i['name'] for i in base if i['name'] not in suffix_list]
-        return '(' + ','.join(int_base + ext_base) + ')'
+        int_base = [i["name"] + suffix for i in base if i["name"] in suffix_list]
+        ext_base = [i["name"] for i in base if i["name"] not in suffix_list]
+        return "(" + ",".join(int_base + ext_base) + ")"
+
 
 def xproto_first_non_empty(lst):
     for l in lst:
-        if l: return l
+        if l:
+            return l
+
 
 def xproto_api_type(field):
     try:
-        if (unquote(field['options']['content_type'])=='date'):
-            return 'double'
+        if unquote(field["options"]["content_type"]) == "date":
+            return "double"
     except KeyError:
         pass
 
-    return field['type']
+    return field["type"]
 
 
 def xproto_base_name(n):
     # Hack - Refactor NetworkParameter* to make this go away
-    if (n.startswith('NetworkParameter')):
-        return '_'
+    if n.startswith("NetworkParameter"):
+        return "_"
 
-    expr = r'^[A-Z]+[a-z]*'
+    expr = r"^[A-Z]+[a-z]*"
 
     try:
         match = re.findall(expr, n)[0]
-    except:
-        return '_'
+    except BaseException:
+        return "_"
 
     return match
 
+
 def xproto_base_fields(m, table):
     fields = []
 
-    for b in m['bases']:
-        option1 = b['fqn']
+    for b in m["bases"]:
+        option1 = b["fqn"]
         try:
-            option2 = m['package'] + '.' + b['name']
+            option2 = m["package"] + "." + b["name"]
         except TypeError:
             option2 = option1
 
         accessor = None
-        if option1 in table: accessor = option1
-        elif option2 in table: accessor = option2
+        if option1 in table:
+            accessor = option1
+        elif option2 in table:
+            accessor = option2
 
         if accessor:
             base_fields = xproto_base_fields(table[accessor], table)
 
-            model_fields = [x.copy() for x in table[accessor]['fields']]
+            model_fields = [x.copy() for x in table[accessor]["fields"]]
             for field in model_fields:
                 field["accessor"] = accessor
 
             fields.extend(base_fields)
             fields.extend(model_fields)
 
-    if 'no_sync' in m['options'] and m['options']['no_sync']:
-        fields = [f for f in fields if f['name'] != 'backend_status' and f['name'] != 'backend_code']
+    if "no_sync" in m["options"] and m["options"]["no_sync"]:
+        fields = [
+            f
+            for f in fields
+            if f["name"] != "backend_status" and f["name"] != "backend_code"
+        ]
 
-    if 'no_policy' in m['options'] and m['options']['no_policy']:
-        fields = [f for f in fields if f['name'] != 'policy_status' and f['name'] != 'policy_code']
+    if "no_policy" in m["options"] and m["options"]["no_policy"]:
+        fields = [
+            f
+            for f in fields
+            if f["name"] != "policy_status" and f["name"] != "policy_code"
+        ]
 
     return fields
 
+
 def xproto_fields(m, table):
     """ Generate the full list of models for the xproto message `m` including fields from the classes it inherits.
 
@@ -165,9 +188,17 @@
     # The "id" field is a special field. Every model has one. Put it up front and pretend it's part of the
 
     if not fields:
-        raise Exception("Model %s has no fields. Check for missing base class." % m["name"])
+        raise Exception(
+            "Model %s has no fields. Check for missing base class." % m["name"]
+        )
 
-    id_field = {'type': 'int32', 'name': 'id', 'options': {}, "id": "1", "accessor": fields[0]["accessor"]}
+    id_field = {
+        "type": "int32",
+        "name": "id",
+        "options": {},
+        "id": "1",
+        "accessor": fields[0]["accessor"],
+    }
 
     fields = [id_field] + fields
 
@@ -176,12 +207,15 @@
     offset = 0
     last_accessor = fields[0]["accessor"]
     for field in fields:
-        if (field["accessor"] != last_accessor):
+        if field["accessor"] != last_accessor:
             last_accessor = field["accessor"]
             offset += 100
         field_id = int(field["id"])
         if (field_id < 1) or (field_id >= 100):
-            raise Exception("Only field numbers from 1 to 99 are permitted, field %s in model %s" % (field["name"], field["accessor"]))
+            raise Exception(
+                "Only field numbers from 1 to 99 are permitted, field %s in model %s"
+                % (field["name"], field["accessor"])
+            )
         field["id"] = int(field["id"]) + offset
 
     # Check for duplicates
@@ -190,20 +224,24 @@
         id = field["id"]
         dup = fields_by_number.get(id)
         if dup:
-            raise Exception("Field %s has duplicate number %d with field %s in model %s" % (field["name"], id, dup["name"], field["accessor"]))
+            raise Exception(
+                "Field %s has duplicate number %d with field %s in model %s"
+                % (field["name"], id, dup["name"], field["accessor"])
+            )
         fields_by_number[id] = field
 
     return fields
 
+
 def xproto_base_rlinks(m, table):
     links = []
 
-    for base in m['bases']:
-        b = base['name']
+    for base in m["bases"]:
+        b = base["name"]
         if b in table:
             base_rlinks = xproto_base_rlinks(table[b], table)
 
-            model_rlinks = [x.copy() for x in table[b]['rlinks']]
+            model_rlinks = [x.copy() for x in table[b]["rlinks"]]
             for link in model_rlinks:
                 link["accessor"] = b
 
@@ -212,6 +250,7 @@
 
     return links
 
+
 def xproto_rlinks(m, table):
     """ Return the reverse links for the xproto message `m`.
 
@@ -228,14 +267,16 @@
 
     links = xproto_base_rlinks(m, table) + model_rlinks
 
-    links = [x for x in links if ("+" not in x["src_port"]) and ("+" not in x["dst_port"])]
+    links = [
+        x for x in links if ("+" not in x["src_port"]) and ("+" not in x["dst_port"])
+    ]
 
     if links:
         last_accessor = links[0]["accessor"]
         offset = 0
         index = 1900
         for link in links:
-            if (link["accessor"] != last_accessor):
+            if link["accessor"] != last_accessor:
                 last_accessor = link["accessor"]
                 offset += 100
 
@@ -249,13 +290,15 @@
                 index += 1
 
         # check for duplicates
-        links_by_number={}
+        links_by_number = {}
         for link in links:
             id = link["id"]
-            dup=links_by_number.get(id)
+            dup = links_by_number.get(id)
             if dup:
-                raise Exception("Field %s has duplicate number %d in model %s with reverse field %s" %
-                                (link["src_port"], id, m["name"], dup["src_port"]))
+                raise Exception(
+                    "Field %s has duplicate number %d in model %s with reverse field %s"
+                    % (link["src_port"], id, m["name"], dup["src_port"])
+                )
             links_by_number[id] = link
 
     return links
@@ -264,40 +307,45 @@
 def xproto_base_links(m, table):
     links = []
 
-    for base in m['bases']:
-        b = base['name']
+    for base in m["bases"]:
+        b = base["name"]
         if b in table:
             base_links = xproto_base_links(table[b], table)
 
-            model_links = table[b]['links']
+            model_links = table[b]["links"]
             links.extend(base_links)
             links.extend(model_links)
     return links
 
+
 def xproto_string_type(xptags):
     try:
-        max_length = eval(xptags['max_length'])
-    except:
+        max_length = eval(xptags["max_length"])
+    except BaseException:
         max_length = 1024
 
-    if ('varchar' not in xptags):
-        return 'string'
+    if "varchar" not in xptags:
+        return "string"
     else:
-        return 'text'
+        return "text"
+
 
 def xproto_tuplify(nested_list_or_set):
-    if not isinstance(nested_list_or_set, list) and not isinstance(nested_list_or_set, set):
+    if not isinstance(nested_list_or_set, list) and not isinstance(
+        nested_list_or_set, set
+    ):
         return nested_list_or_set
     else:
         return tuple([xproto_tuplify(i) for i in nested_list_or_set])
 
-def xproto_field_graph_components(fields, model, tag='unique_with'):
+
+def xproto_field_graph_components(fields, model, tag="unique_with"):
     def find_components(graph):
         pending = set(graph.keys())
         components = []
 
         while pending:
-            front = { pending.pop() }
+            front = {pending.pop()}
             component = set()
 
             while front:
@@ -308,87 +356,96 @@
 
                 pending -= neighbours
                 component |= neighbours
-            
+
             components.append(component)
 
         return components
 
     field_graph = {}
-    field_names = {f['name'] for f in fields}
+    field_names = {f["name"] for f in fields}
 
     for f in fields:
         try:
-            tagged_str = unquote(f['options'][tag])
-            tagged_fields = tagged_str.split(',')
+            tagged_str = unquote(f["options"][tag])
+            tagged_fields = tagged_str.split(",")
 
             for uf in tagged_fields:
                 if uf not in field_names:
-                    raise FieldNotFound('Field "%s" not found in model "%s", referenced from field "%s" by option "%s"' % (uf, model['name'], f['name'], tag))
+                    raise FieldNotFound(
+                        'Field "%s" not found in model "%s", referenced from field "%s" by option "%s"'
+                        % (uf, model["name"], f["name"], tag)
+                    )
 
-                field_graph.setdefault(f['name'], set()).add(uf)
-                field_graph.setdefault(uf, set()).add(f['name'])
+                field_graph.setdefault(f["name"], set()).add(uf)
+                field_graph.setdefault(uf, set()).add(f["name"])
         except KeyError:
             pass
 
     return find_components(field_graph)
 
+
 def xproto_api_opts(field):
     options = []
-    if 'max_length' in field['options'] and field['type']=='string':
-        options.append('(val).maxLength = %s'%field['options']['max_length'])
+    if "max_length" in field["options"] and field["type"] == "string":
+        options.append("(val).maxLength = %s" % field["options"]["max_length"])
 
     try:
-        if field['options']['null'] == 'False':
-            options.append('(val).nonNull = true')
+        if field["options"]["null"] == "False":
+            options.append("(val).nonNull = true")
     except KeyError:
         pass
 
-    if 'link' in field and 'model' in field['options']:
-        options.append('(foreignKey).modelName = "%s"'%field['options']['model'])
+    if "link" in field and "model" in field["options"]:
+        options.append('(foreignKey).modelName = "%s"' % field["options"]["model"])
         if ("options" in field) and ("port" in field["options"]):
-            options.append('(foreignKey).reverseFieldName = "%s"' % field['options']['port'])
+            options.append(
+                '(foreignKey).reverseFieldName = "%s"' % field["options"]["port"]
+            )
 
     if options:
-        options_str = '[' + ', '.join(options) + ']'
+        options_str = "[" + ", ".join(options) + "]"
     else:
-        options_str = ''
+        options_str = ""
 
     return options_str
 
+
 def xproto_type_to_swagger_type(f):
     try:
-        content_type = f['options']['content_type']
+        content_type = f["options"]["content_type"]
         content_type = eval(content_type)
-    except:
+    except BaseException:
         content_type = None
         pass
 
-    if 'choices' in f['options']:
-        return 'string'
-    elif content_type == 'date':
-        return 'string'
-    elif f['type'] == 'bool':
-        return 'boolean'
-    elif f['type'] == 'string':
-        return 'string'
-    elif f['type'] in ['int','uint32','int32'] or 'link' in f:
-        return 'integer'
-    elif f['type'] in ['double','float']:
-        return 'string'
+    if "choices" in f["options"]:
+        return "string"
+    elif content_type == "date":
+        return "string"
+    elif f["type"] == "bool":
+        return "boolean"
+    elif f["type"] == "string":
+        return "string"
+    elif f["type"] in ["int", "uint32", "int32"] or "link" in f:
+        return "integer"
+    elif f["type"] in ["double", "float"]:
+        return "string"
+
 
 def xproto_field_to_swagger_enum(f):
-    if 'choices' in f['options']:
+    if "choices" in f["options"]:
         list = []
 
-        for c in eval(xproto_unquote(f['options']['choices'])):
+        for c in eval(xproto_unquote(f["options"]["choices"])):
             list.append(c[0])
 
         return list
     else:
         return False
 
+
 def xproto_is_true(x):
     # TODO: Audit xproto and make specification of trueness more uniform
-    if (x==True) or (x=="True") or (x=='"True"'):
+    if x is True or (x == "True") or (x == '"True"'):
         return True
     return False
diff --git a/lib/xos-genx/xosgenx/jinja2_extensions/checklib.py b/lib/xos-genx/xosgenx/jinja2_extensions/checklib.py
index a61f7ca..db61f01 100644
--- a/lib/xos-genx/xosgenx/jinja2_extensions/checklib.py
+++ b/lib/xos-genx/xosgenx/jinja2_extensions/checklib.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,62 +14,67 @@
 
 import ast
 
+
 def xproto_check_synchronizer(m):
     try:
-        sync_step_path = 'synchronizer/steps/sync_%s.py'%m['name'].lower()
+        sync_step_path = "synchronizer/steps/sync_%s.py" % m["name"].lower()
         sync_step = open(sync_step_path).read()
     except IOError:
-        return '510 Model needs a sync step %s'%sync_step_path
+        return "510 Model needs a sync step %s" % sync_step_path
 
     try:
         sync_step_ast = ast.parse(sync_step)
     except SyntaxError:
-        return '511 Could not parse sync step %s'%sync_step_path
+        return "511 Could not parse sync step %s" % sync_step_path
 
-    classes = filter(lambda x:isinstance(x, ast.ClassDef), sync_step_ast.body)
+    classes = filter(lambda x: isinstance(x, ast.ClassDef), sync_step_ast.body)
     found_sync_step_class = False
 
     for c in classes:
         base_names = [v.id for v in c.bases]
-        if 'SyncStep' in base_names or 'SyncInstanceUsingAnsible' in base_names:
-            attributes = filter(lambda x:isinstance(x, ast.Assign), c.body)
+        if "SyncStep" in base_names or "SyncInstanceUsingAnsible" in base_names:
+            attributes = filter(lambda x: isinstance(x, ast.Assign), c.body)
             for a in attributes:
                 target_names = [t.id for t in a.targets]
                 values = a.value.elts if isinstance(a.value, ast.List) else [a.value]
                 value_names = [v.id for v in values]
 
-                if 'observes' in target_names and m['name'] in value_names:
+                if "observes" in target_names and m["name"] in value_names:
                     found_sync_step_class = True
                     break
 
     if not found_sync_step_class:
-        return '512 Synchronizer needs a sync step class with an observes field containing %s'%m['name']
+        return (
+            "512 Synchronizer needs a sync step class with an observes field containing %s"
+            % m["name"]
+        )
     else:
-        return '200 OK'
+        return "200 OK"
 
 
 def xproto_check_policy(m):
     try:
-        model_policy_path = 'synchronizer/model_policies/model_policy_%s.py'%m['name'].lower()
+        model_policy_path = (
+            "synchronizer/model_policies/model_policy_%s.py" % m["name"].lower()
+        )
         model_policy = open(model_policy_path).read()
     except IOError:
-        return '510 Model needs a model policy %s'%model_policy_path
+        return "510 Model needs a model policy %s" % model_policy_path
 
     try:
         model_policy_ast = ast.parse(model_policy)
     except SyntaxError:
-        return '511 Could not parse sync step %s'%model_policy_path
+        return "511 Could not parse sync step %s" % model_policy_path
 
-    classes = filter(lambda x:isinstance(x, ast.ClassDef), model_policy_ast.body)
+    classes = filter(lambda x: isinstance(x, ast.ClassDef), model_policy_ast.body)
     found_model_policy_class = False
     for c in classes:
         base_names = [v.id for v in c.bases]
-        if 'Policy' in base_names or 'TenantWithContainerPolicy' in base_names:
+        if "Policy" in base_names or "TenantWithContainerPolicy" in base_names:
             found_model_policy_class = True
             break
 
     if not found_model_policy_class:
-        return '513 Synchronizer needs a model policy class'
+        return "513 Synchronizer needs a model policy class"
     else:
-        return '200 OK'
-
+        return "200 OK"
diff --git a/lib/xos-genx/xosgenx/jinja2_extensions/django.py b/lib/xos-genx/xosgenx/jinja2_extensions/django.py
index 64ab51a..d71ea51 100644
--- a/lib/xos-genx/xosgenx/jinja2_extensions/django.py
+++ b/lib/xos-genx/xosgenx/jinja2_extensions/django.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,183 +17,217 @@
 import pdb
 import re
 
+
 def django_content_type_string(xptags):
     # Check possibility of KeyError in caller
-    content_type = xptags['content_type']
+    content_type = xptags["content_type"]
 
     try:
         content_type = eval(content_type)
-    except:
+    except BaseException:
         pass
 
-    if (content_type=='url'):
-        return 'URLField'
-    if (content_type=='date'):
-        return 'DateTimeField'
-    elif (content_type=='ip'):
-        return 'GenericIPAddressField'
-    elif (content_type=='stripped' or content_type=='"stripped"'):
-        return 'StrippedCharField'
+    if content_type == "url":
+        return "URLField"
+    if content_type == "date":
+        return "DateTimeField"
+    elif content_type == "ip":
+        return "GenericIPAddressField"
+    elif content_type == "stripped" or content_type == '"stripped"':
+        return "StrippedCharField"
     else:
-        raise Exception('Unknown Type: %s'%content_type)
+        raise Exception("Unknown Type: %s" % content_type)
+
 
 def django_string_type(xptags):
     try:
-        max_length = eval(xptags['max_length'])
-    except:
+        max_length = eval(xptags["max_length"])
+    except BaseException:
         max_length = 1024 * 1024
 
-    if ('content_type' in xptags):
+    if "content_type" in xptags:
         return django_content_type_string(xptags)
-    elif (max_length<1024*1024):
-        return 'CharField'
+    elif max_length < 1024 * 1024:
+        return "CharField"
     else:
-        return 'TextField'
+        return "TextField"
+
 
 def xproto_django_type(xptype, xptags):
-    if (xptype=='string'):
+    if xptype == "string":
         return django_string_type(xptags)
-    elif (xptype=='float'):
-        return 'FloatField'
-    elif (xptype=='bool'):
-        return 'BooleanField'
-    elif (xptype=='uint32'):
-        return 'IntegerField'
-    elif (xptype=='int32'):
-        return 'IntegerField'
-    elif (xptype=='int64'):
-        return 'BigIntegerField'
+    elif xptype == "float":
+        return "FloatField"
+    elif xptype == "bool":
+        return "BooleanField"
+    elif xptype == "uint32":
+        return "IntegerField"
+    elif xptype == "int32":
+        return "IntegerField"
+    elif xptype == "int64":
+        return "BigIntegerField"
     else:
-        raise Exception('Unknown Type: %s'%xptype)
+        raise Exception("Unknown Type: %s" % xptype)
+
 
 def xproto_django_link_type(f):
-    if (f['link_type']=='manytoone'):
-        return 'ForeignKey'
-    elif (f['link_type']=='onetoone'):
-        return 'OneToOneField'
-    elif (f['link_type']=='manytomany'):
-        if (f['dst_port']):
-            return 'ManyToManyField'
+    if f["link_type"] == "manytoone":
+        return "ForeignKey"
+    elif f["link_type"] == "onetoone":
+        return "OneToOneField"
+    elif f["link_type"] == "manytomany":
+        if f["dst_port"]:
+            return "ManyToManyField"
         else:
-            return 'GenericRelation'
+            return "GenericRelation"
+
 
 def map_xproto_to_django(f):
-    allowed_keys=['help_text','default','max_length','modifier','blank','choices','db_index','null','editable','on_delete','verbose_name', 'auto_now_add', 'unique', 'min_value', 'max_value']
+    allowed_keys = [
+        "help_text",
+        "default",
+        "max_length",
+        "modifier",
+        "blank",
+        "choices",
+        "db_index",
+        "null",
+        "editable",
+        "on_delete",
+        "verbose_name",
+        "auto_now_add",
+        "unique",
+        "min_value",
+        "max_value",
+    ]
 
     # TODO evaluate if setting Null = False for all strings
-    m = {'modifier':{'optional':True, 'required':False, '_targets': ['null', 'blank']}}
+    m = {
+        "modifier": {"optional": True, "required": False, "_targets": ["null", "blank"]}
+    }
     out = {}
 
-    for k,v in f['options'].items():
+    for k, v in f["options"].items():
         if k in allowed_keys:
             try:
-                # NOTE this will be used to parse xproto optional/required field prefix and apply it to the null and blank fields
+                # NOTE this will be used to parse xproto optional/required field prefix
+                # and apply it to the null and blank fields
                 kv2 = m[k]
-                for t in kv2['_targets']:
+                for t in kv2["_targets"]:
                     out[t] = kv2[v]
-            except:
+            except BaseException:
                 out[k] = v
 
     return out
 
+
 def xproto_django_link_options_str(field, dport=None):
     output_dict = map_xproto_to_django(field)
 
-    if (dport and (dport=='+' or '+' not in dport)):
-        output_dict['related_name'] = '%r'%dport
+    if dport and (dport == "+" or "+" not in dport):
+        output_dict["related_name"] = "%r" % dport
 
     try:
-        if field['through']:
+        if field["through"]:
             d = {}
-            if isinstance(field['through'], str):
-                split = field['through'].rsplit('.',1)
-                d['name'] = split[-1]
-                if len(split)==2:
-                    d['package'] = split[0]
-                    d['fqn'] = 'package' + '.' + d['name']
+            if isinstance(field["through"], str):
+                split = field["through"].rsplit(".", 1)
+                d["name"] = split[-1]
+                if len(split) == 2:
+                    d["package"] = split[0]
+                    d["fqn"] = "package" + "." + d["name"]
                 else:
-                    d['fqn'] = d['name']
-                    d['package'] = ''
+                    d["fqn"] = d["name"]
+                    d["package"] = ""
             else:
-                d = field['through']
+                d = field["through"]
 
-            if not d['name'].endswith('_'+field['name']):
-                output_dict['through'] = '%r'%d['fqn']
+            if not d["name"].endswith("_" + field["name"]):
+                output_dict["through"] = "%r" % d["fqn"]
     except KeyError:
         pass
 
     return format_options_string(output_dict)
 
+
 def use_native_django_validators(k, v):
 
     validators_map = {
-        'min_value': 'MinValueValidator',
-        'max_value': 'MaxValueValidator'
+        "min_value": "MinValueValidator",
+        "max_value": "MaxValueValidator",
     }
 
     return "%s(%s)" % (validators_map[k], v)
 
+
 def format_options_string(d):
 
-    known_validators = ['min_value', 'max_value']
+    known_validators = ["min_value", "max_value"]
     validator_lst = []
 
-    if (not d):
-        return ''
+    if not d:
+        return ""
     else:
 
         lst = []
-        for k,v in d.items():
+        for k, v in d.items():
             if k in known_validators:
                 validator_lst.append(use_native_django_validators(k, v))
-            elif (type(v)==str and k=='default' and v.endswith('()"')):
-                lst.append('%s = %s'%(k,v[1:-3]))
-            elif (type(v)==str and v.startswith('"')): 
+            elif isinstance(v, str) and k == "default" and v.endswith('()"'):
+                lst.append("%s = %s" % (k, v[1:-3]))
+            elif isinstance(v, str) and v.startswith('"'):
                 try:
                     # unquote the value if necessary
                     tup = eval(v[1:-1])
-                    if (type(tup)==tuple):
-                        lst.append('%s = %r'%(k,tup))
+                    if isinstance(tup, tuple):
+                        lst.append("%s = %r" % (k, tup))
                     else:
-                        lst.append('%s = %s'%(k,v))
-                except:
-                    lst.append('%s = %s'%(k,v))
-            elif (type(v)==bool):
-                lst.append('%s = %r'%(k,bool(v)))
+                        lst.append("%s = %s" % (k, v))
+                except BaseException:
+                    lst.append("%s = %s" % (k, v))
+            elif isinstance(v, bool):
+                lst.append("%s = %r" % (k, bool(v)))
             else:
                 try:
-                    lst.append('%s = %r'%(k,int(v)))
+                    lst.append("%s = %r" % (k, int(v)))
                 except ValueError:
-                    lst.append('%s = %s'%(k,v))
-        validator_string = "validators=[%s]" % ', '.join(validator_lst)
-        option_string = ', '.join(lst)
+                    lst.append("%s = %s" % (k, v))
+        validator_string = "validators=[%s]" % ", ".join(validator_lst)
+        option_string = ", ".join(lst)
         if len(validator_lst) == 0:
             return option_string
         elif len(lst) == 0:
             return validator_string
         else:
-            return  option_string + ", " + validator_string
+            return option_string + ", " + validator_string
+
 
 def xproto_django_options_str(field, dport=None):
     output_dict = map_xproto_to_django(field)
 
-    if (dport=='_'):
-        dport = '+'
+    if dport == "_":
+        dport = "+"
 
-    if (dport and (dport=='+' or '+' not in dport)):
-        output_dict['related_name'] = '%r'%dport
+    if dport and (dport == "+" or "+" not in dport):
+        output_dict["related_name"] = "%r" % dport
 
     return format_options_string(output_dict)
 
+
 def xproto_camel_to_underscore(name):
-    return re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
+    return re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
+
 
 def xproto_validations(options):
     try:
-        return [map(str.strip, validation.split(':')) for validation in unquote(options['validators']).split(',')]
+        return [
+            map(str.strip, validation.split(":"))
+            for validation in unquote(options["validators"]).split(",")
+        ]
     except KeyError:
         return []
 
+
 def xproto_optioned_fields_to_list(fields, option, val):
     """
     List all the field that have a particural option
@@ -207,14 +240,15 @@
     optioned_fields = []
     for f in fields:
         option_names = []
-        for k, v in f['options'].items():
+        for k, v in f["options"].items():
             option_names.append(k)
 
-        if option in  option_names and f['options'][option] == val:
-            optioned_fields.append(f['name'])
+        if option in option_names and f["options"][option] == val:
+            optioned_fields.append(f["name"])
 
     return optioned_fields
 
+
 # TODO
 # - in modeldefs add info about this fields
 # - update the gui to have this fields as readonly
diff --git a/lib/xos-genx/xosgenx/jinja2_extensions/fol2.py b/lib/xos-genx/xosgenx/jinja2_extensions/fol2.py
index 73d04af..6d66117 100644
--- a/lib/xos-genx/xosgenx/jinja2_extensions/fol2.py
+++ b/lib/xos-genx/xosgenx/jinja2_extensions/fol2.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import print_function
 import astunparse
 import ast
 import random
@@ -21,18 +21,22 @@
 import jinja2
 from plyxproto.parser import *
 
-BINOPS = ['|', '&', '->']
-QUANTS = ['exists', 'forall']
+BINOPS = ["|", "&", "->"]
+QUANTS = ["exists", "forall"]
+
 
 class PolicyException(Exception):
     pass
 
+
 class ConstructNotHandled(Exception):
     pass
 
+
 class TrivialPolicy(Exception):
     pass
 
+
 class AutoVariable:
     def __init__(self, base):
         self.base = base
@@ -42,25 +46,29 @@
         return self
 
     def next(self):
-        var = 'i%d' % self.idx
+        var = "i%d" % self.idx
         self.idx += 1
         return var
 
+
 def gen_random_string():
-    return ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(5))
+    return "".join(
+        random.choice(string.ascii_lowercase + string.digits) for _ in range(5)
+    )
+
 
 class FOL2Python:
     def __init__(self, context_map=None):
         # This will produce i0, i1, i2 etc.
-        self.loopvar = iter(AutoVariable('i'))
-        self.verdictvar = iter(AutoVariable('result'))
+        self.loopvar = iter(AutoVariable("i"))
+        self.verdictvar = iter(AutoVariable("result"))
 
         self.loop_variable = self.loopvar.next()
         self.verdict_variable = self.verdictvar.next()
         self.context_map = context_map
 
         if not self.context_map:
-            self.context_map = {'user': 'self', 'obj': 'obj'}
+            self.context_map = {"user": "self", "obj": "obj"}
 
     def loop_next(self):
         self.loop_variable = self.loopvar.next()
@@ -72,12 +80,12 @@
         pass
 
     def format_term_for_query(self, model, term, django=False):
-        if term.startswith(model + '.'):
+        if term.startswith(model + "."):
             term = term[len(model) + 1:]
             if django:
-                term = term.replace('.', '__')
+                term = term.replace(".", "__")
             else:
-                term = '__elt' + '.' + term
+                term = "__elt" + "." + term
         return term
 
     def fol_to_python_filter(self, model, e, django=False, negate=False):
@@ -89,109 +97,114 @@
         if django:
             if negate:
                 # De Morgan's negation
-                q_bracket = '~Q(%s)'
-                or_expr = ','
-                and_expr = '|'
+                q_bracket = "~Q(%s)"
+                or_expr = ","
+                and_expr = "|"
             else:
-                q_bracket = 'Q(%s)'
-                or_expr = '|'
-                and_expr = ','
+                q_bracket = "Q(%s)"
+                or_expr = "|"
+                and_expr = ","
         else:
             if negate:
                 # De Morgan's negation
-                q_bracket = 'not %s'
-                or_expr = ' and '
-                and_expr = ' or '
+                q_bracket = "not %s"
+                or_expr = " and "
+                and_expr = " or "
             else:
-                q_bracket = '%s'
-                or_expr = ' or '
-                and_expr = ' and '
+                q_bracket = "%s"
+                or_expr = " or "
+                and_expr = " and "
 
-        if k in ['=','in']:
-            v = [self.format_term_for_query(
-                model, term, django=django) for term in v]
+        if k in ["=", "in"]:
+            v = [self.format_term_for_query(model, term, django=django) for term in v]
             if django:
-                operator_map = {'=':' = ','in':'__in'}
+                operator_map = {"=": " = ", "in": "__in"}
             else:
-                operator_map = {'=':' == ','in':'in'}
+                operator_map = {"=": " == ", "in": "in"}
             operator = operator_map[k]
             return [q_bracket % operator.join(v)]
-        elif k == '|':
-            components = [self.fol_to_python_filter(
-                model, x, django=django).pop() for x in v]
+        elif k == "|":
+            components = [
+                self.fol_to_python_filter(model, x, django=django).pop() for x in v
+            ]
             return [or_expr.join(components)]
-        elif k == '&':
-            components = [self.fol_to_python_filter(
-                model, x, django=django).pop() for x in v]
+        elif k == "&":
+            components = [
+                self.fol_to_python_filter(model, x, django=django).pop() for x in v
+            ]
             return [and_expr.join(components)]
-        elif k == '->':
-            components = [self.fol_to_python_filter(
-                model, x, django=django).pop() for x in v]
-            return ['~%s | %s' % (components[0], components[1])]
+        elif k == "->":
+            components = [
+                self.fol_to_python_filter(model, x, django=django).pop() for x in v
+            ]
+            return ["~%s | %s" % (components[0], components[1])]
 
     """ Convert a single leaf node from a string
         to an AST"""
+
     def str_to_ast(self, s):
         ast_module = ast.parse(s)
         return ast_module.body[0]
 
     def reduce_operands(self, operands):
-        if operands[0] in ['True','False']: 
-            return (operands[0],operands[1])
-        elif operands[1] in ['True','False']: 
-            return (operands[1],operands[0])
+        if operands[0] in ["True", "False"]:
+            return (operands[0], operands[1])
+        elif operands[1] in ["True", "False"]:
+            return (operands[1], operands[0])
         else:
             return None
 
     """ Simplify binops with constants """
+
     def simplify_binop(self, binop):
-        (k,v), = binop.items()
-        if k == '->':
+        (k, v), = binop.items()
+        if k == "->":
             lhs, rhs = v
-            if lhs == 'True':
+            if lhs == "True":
                 return rhs
-            elif rhs == 'True':
-                return 'True'
-            elif lhs == 'False':
-                return 'True'
-            elif rhs == 'False':
-                return {'not': lhs}
+            elif rhs == "True":
+                return "True"
+            elif lhs == "False":
+                return "True"
+            elif rhs == "False":
+                return {"not": lhs}
 
         var_expr = self.reduce_operands(v)
 
-        if not var_expr: return binop
+        if not var_expr:
+            return binop
         else:
             constant, var = var_expr
-            if k=='|':
-                if constant=='True':
-                    return 'True'
-                elif constant=='False':
+            if k == "|":
+                if constant == "True":
+                    return "True"
+                elif constant == "False":
                     return var
                 else:
                     raise Exception("Internal error - variable read as constant")
-            elif k=='&':
-                if constant=='True':
+            elif k == "&":
+                if constant == "True":
                     return var
-                elif constant=='False':
-                    return 'False'
+                elif constant == "False":
+                    return "False"
 
     def is_constant(self, var, fol):
         try:
             (k, v), = fol.items()
         except AttributeError:
-            k = 'term'
+            k = "term"
             v = fol
-        
-        if k in ['python', 'policy']:
-           # Treat as a constant and hoist, since it cannot be quantified
-           return True
-        elif k == 'term':
+
+        if k in ["python", "policy"]:
+            # Treat as a constant and hoist, since it cannot be quantified
+            return True
+        elif k == "term":
             return not v.startswith(var)
-        elif k == 'not':
+        elif k == "not":
             return self.is_constant(var, fol)
-        elif k in ['in', '=']:
+        elif k in ["in", "="]:
             lhs, rhs = v
-            return self.is_constant(var,lhs) and self.is_constant(var, rhs)
+            return self.is_constant(var, lhs) and self.is_constant(var, rhs)
         elif k in BINOPS:
             lhs, rhs = v
             return self.is_constant(lhs, var) and self.is_constant(rhs, var)
@@ -205,21 +218,21 @@
         try:
             (k, v), = fol.items()
         except AttributeError:
-            k = 'term'
+            k = "term"
             v = fol
 
-        if k in ['python', 'policy']:
-           # Treat as a constant and hoist, since it cannot be quantified
-           if fol not in constants:
-               constants.append(fol)
-           return constants
-        elif k == 'term':
-           if not v.startswith(var):
-               constants.append(v)
-           return constants
-        elif k == 'not':
+        if k in ["python", "policy"]:
+            # Treat as a constant and hoist, since it cannot be quantified
+            if fol not in constants:
+                constants.append(fol)
+            return constants
+        elif k == "term":
+            if not v.startswith(var):
+                constants.append(v)
+            return constants
+        elif k == "not":
             return self.find_constants(var, v, constants)
-        elif k in ['in', '=']:
+        elif k in ["in", "="]:
             lhs, rhs = v
             if isinstance(lhs, str) and isinstance(rhs, str):
                 if not lhs.startswith(var) and not rhs.startswith(var):
@@ -235,32 +248,34 @@
             return constants
         elif k in QUANTS:
             is_constant = self.is_constant(var, v[1])
-            if is_constant: constants.append(fol)
+            if is_constant:
+                constants.append(fol)
             return constants
         else:
             raise ConstructNotHandled(k)
 
     """ Hoist constants out of quantifiers. Depth-first. """
+
     def hoist_outer(self, fol):
         try:
             (k, v), = fol.items()
         except AttributeError:
-            k = 'term'
+            k = "term"
             v = fol
 
-        if k in ['python', 'policy']:
-           # Tainted, optimization and distribution not possible
-           return fol
-        elif k == 'term':
-           return fol
-        elif k == 'not':
+        if k in ["python", "policy"]:
+            # Tainted, optimization and distribution not possible
+            return fol
+        elif k == "term":
+            return fol
+        elif k == "not":
             vprime = self.hoist_outer(v)
-            return {'not': vprime}
-        elif k in ['in', '=']:
+            return {"not": vprime}
+        elif k in ["in", "="]:
             lhs, rhs = v
             rlhs = self.hoist_outer(lhs)
             rrhs = self.hoist_outer(rhs)
-            return {k:[rlhs,rrhs]}
+            return {k: [rlhs, rrhs]}
         elif k in BINOPS:
             lhs, rhs = v
             rlhs = self.hoist_outer(lhs)
@@ -271,7 +286,7 @@
             return fol_simplified
         elif k in QUANTS:
             rexpr = self.hoist_outer(v[1])
-            return self.hoist_quant(k, [v[0],rexpr])
+            return self.hoist_quant(k, [v[0], rexpr])
         else:
             raise ConstructNotHandled(k)
 
@@ -282,27 +297,29 @@
         try:
             (k, v), = fol.items()
         except AttributeError:
-            k = 'term'
+            k = "term"
             v = fol
 
-        if k == 'term':
-            if v == c: return value
-            else: return v
-        elif k == 'not':
+        if k == "term":
+            if v == c:
+                return value
+            else:
+                return v
+        elif k == "not":
             new_expr = self.replace_const(v, c, value)
-            if new_expr=='True': 
-                return 'False'
-            elif new_expr=='False': 
-                return 'True'
-            else: 
-                return {'not': new_expr}
-        elif k in ['in', '=']:
+            if new_expr == "True":
+                return "False"
+            elif new_expr == "False":
+                return "True"
+            else:
+                return {"not": new_expr}
+        elif k in ["in", "="]:
             lhs, rhs = v
             rlhs = self.replace_const(lhs, c, value)
             rrhs = self.replace_const(rhs, c, value)
 
-            if rlhs==rrhs:
-                return 'True'
+            if rlhs == rrhs:
+                return "True"
             else:
                 return {k: [rlhs, rrhs]}
         elif k in BINOPS:
@@ -310,12 +327,12 @@
 
             rlhs = self.replace_const(lhs, c, value)
             rrhs = self.replace_const(rhs, c, value)
-        
-            return self.simplify_binop({k:[rlhs,rrhs]})
+
+            return self.simplify_binop({k: [rlhs, rrhs]})
         elif k in QUANTS:
             var, expr = v
             new_expr = self.replace_const(expr, c, value)
-            if new_expr in ['True', 'False']:
+            if new_expr in ["True", "False"]:
                 return new_expr
             else:
                 return {k: [var, new_expr]}
@@ -323,16 +340,16 @@
             raise ConstructNotHandled(k)
 
     def shannon_expand(self, c, fol):
-        lhs = self.replace_const(fol, c, 'True')
-        rhs = self.replace_const(fol, c, 'False')
-        not_c = {'not': c}
-        rlhs = {'&': [c, lhs]}
+        lhs = self.replace_const(fol, c, "True")
+        rhs = self.replace_const(fol, c, "False")
+        not_c = {"not": c}
+        rlhs = {"&": [c, lhs]}
         rlhs = self.simplify_binop(rlhs)
 
-        rrhs = {'&': [not_c, rhs]}
+        rrhs = {"&": [not_c, rhs]}
         rrhs = self.simplify_binop(rrhs)
 
-        combined = {'|': [rlhs, rrhs]}
+        combined = {"|": [rlhs, rrhs]}
         return self.simplify_binop(combined)
 
     def hoist_quant(self, k, expr):
@@ -418,17 +435,24 @@
         if not tag:
             tag = gen_random_string()
 
-        policy_function_name_template = 'policy_%s_' + '%(random_string)s' % {'random_string': tag}
+        policy_function_name_template = "policy_%s_" + "%(random_string)s" % {
+            "random_string": tag
+        }
         policy_function_name = policy_function_name_template % policy_name
         self.verdict_next()
         function_str = """
 def %(fn_name)s(obj, ctx):
     if not %(vvar)s: raise XOSValidationError("%(message)s".format(obj=obj, ctx=ctx))
-        """ % {'fn_name': policy_function_name, 'vvar': self.verdict_variable, 'message': message}
+        """ % {
+            "fn_name": policy_function_name,
+            "vvar": self.verdict_variable,
+            "message": message,
+        }
 
         function_ast = self.str_to_ast(function_str)
-        policy_code = self.gen_test(policy_function_name_template, fol, self.verdict_variable)
-
+        policy_code = self.gen_test(
+            policy_function_name_template, fol, self.verdict_variable
+        )
 
         function_ast.body = [policy_code] + function_ast.body
 
@@ -438,17 +462,24 @@
         if not tag:
             tag = gen_random_string()
 
-        policy_function_name_template = '%s_' + '%(random_string)s' % {'random_string': tag}
+        policy_function_name_template = "%s_" + "%(random_string)s" % {
+            "random_string": tag
+        }
         policy_function_name = policy_function_name_template % policy_name
 
         self.verdict_next()
         function_str = """
 def %(fn_name)s(obj, ctx):
     return %(vvar)s
-        """ % {'fn_name': policy_function_name, 'vvar': self.verdict_variable}
+        """ % {
+            "fn_name": policy_function_name,
+            "vvar": self.verdict_variable,
+        }
 
         function_ast = self.str_to_ast(function_str)
-        policy_code = self.gen_test(policy_function_name_template, fol, self.verdict_variable)
+        policy_code = self.gen_test(
+            policy_function_name_template, fol, self.verdict_variable
+        )
 
         function_ast.body = [policy_code] + function_ast.body
 
@@ -456,11 +487,14 @@
 
     def gen_test(self, fn_template, fol, verdict_var, bindings=None):
         if isinstance(fol, str):
-            return self.str_to_ast('%(verdict_var)s = %(constant)s' % {'verdict_var': verdict_var, 'constant': fol})
+            return self.str_to_ast(
+                "%(verdict_var)s = %(constant)s"
+                % {"verdict_var": verdict_var, "constant": fol}
+            )
 
         (k, v), = fol.items()
 
-        if k == 'policy':
+        if k == "policy":
             policy_name, object_name = v
 
             policy_fn = fn_template % policy_name
@@ -470,39 +504,48 @@
 else:
     # Everybody has access to null objects
     %(verdict_var)s = True
-            """ % {'verdict_var': verdict_var, 'policy_fn': policy_fn, 'object_name': object_name}
+            """ % {
+                "verdict_var": verdict_var,
+                "policy_fn": policy_fn,
+                "object_name": object_name,
+            }
 
             call_ast = self.str_to_ast(call_str)
             return call_ast
-        if k == 'python':
+        if k == "python":
             try:
                 expr_ast = self.str_to_ast(v)
             except SyntaxError:
-                raise PolicyException('Syntax error in %s' % v)
+                raise PolicyException("Syntax error in %s" % v)
 
             if not isinstance(expr_ast, ast.Expr):
-                raise PolicyException(
-                    '%s is not an expression' % expr_ast)
+                raise PolicyException("%s is not an expression" % expr_ast)
 
             assignment_str = """
 %(verdict_var)s = (%(escape_expr)s)
-            """ % {'verdict_var': verdict_var, 'escape_expr': v}
+            """ % {
+                "verdict_var": verdict_var,
+                "escape_expr": v,
+            }
 
             assignment_ast = self.str_to_ast(assignment_str)
             return assignment_ast
-        elif k == 'not':
+        elif k == "not":
             top_vvar = verdict_var
             self.verdict_next()
             sub_vvar = self.verdict_variable
             block = self.gen_test(fn_template, v, sub_vvar)
             assignment_str = """
 %(verdict_var)s = not (%(subvar)s)
-                    """ % {'verdict_var': top_vvar, 'subvar': sub_vvar}
+                    """ % {
+                "verdict_var": top_vvar,
+                "subvar": sub_vvar,
+            }
 
             assignment_ast = self.str_to_ast(assignment_str)
 
             return ast.Module(body=[block, assignment_ast])
-        elif k in ['=','in']:
+        elif k in ["=", "in"]:
             # This is the simplest case, we don't recurse further
             # To use terms that are not simple variables, use
             # the Python escape, e.g. {{ slice.creator is not None }}
@@ -512,7 +555,7 @@
 
             try:
                 for t in lhs, rhs:
-                    py_expr = t['python']
+                    py_expr = t["python"]
 
                     self.verdict_next()
                     vv = self.verdict_variable
@@ -520,15 +563,17 @@
                     try:
                         expr_ast = self.str_to_ast(py_expr)
                     except SyntaxError:
-                        raise PolicyException('Syntax error in %s' % v)
+                        raise PolicyException("Syntax error in %s" % v)
 
                     if not isinstance(expr_ast, ast.Expr):
-                        raise PolicyException(
-                            '%s is not an expression' % expr_ast)
+                        raise PolicyException("%s is not an expression" % expr_ast)
 
                     assignment_str = """
 %(verdict_var)s = (%(escape_expr)s)
-                    """ % {'verdict_var': vv, 'escape_expr': py_expr}
+                    """ % {
+                        "verdict_var": vv,
+                        "escape_expr": py_expr,
+                    }
 
                     if t == lhs:
                         lhs = vv
@@ -540,14 +585,19 @@
             except TypeError:
                 pass
 
-            if k=='=':
-                operator='=='
-            elif k=='in':
-                operator='in'
+            if k == "=":
+                operator = "=="
+            elif k == "in":
+                operator = "in"
 
             comparison_str = """
 %(verdict_var)s = (%(lhs)s %(operator)s %(rhs)s)
-            """ % {'verdict_var': verdict_var, 'lhs': lhs, 'rhs': rhs, 'operator':operator}
+            """ % {
+                "verdict_var": verdict_var,
+                "lhs": lhs,
+                "rhs": rhs,
+                "operator": operator,
+            }
 
             comparison_ast = self.str_to_ast(comparison_str)
             combined_ast = ast.Module(body=assignments + [comparison_ast])
@@ -567,24 +617,30 @@
             lblock = self.gen_test(fn_template, lhs, lvar)
             rblock = self.gen_test(fn_template, rhs, rvar)
 
-            invert = ''
-            if k == '&':
-                binop = 'and'
-            elif k == '|':
-                binop = 'or'
-            elif k == '->':
-                binop = 'or'
-                invert = 'not'
+            invert = ""
+            if k == "&":
+                binop = "and"
+            elif k == "|":
+                binop = "or"
+            elif k == "->":
+                binop = "or"
+                invert = "not"
 
             binop_str = """
 %(verdict_var)s = %(invert)s %(lvar)s %(binop)s %(rvar)s
-            """ % {'verdict_var': top_vvar, 'invert': invert, 'lvar': lvar, 'binop': binop, 'rvar': rvar}
+            """ % {
+                "verdict_var": top_vvar,
+                "invert": invert,
+                "lvar": lvar,
+                "binop": binop,
+                "rvar": rvar,
+            }
 
             binop_ast = self.str_to_ast(binop_str)
 
             combined_ast = ast.Module(body=[lblock, rblock, binop_ast])
             return combined_ast
-        elif k == 'exists':
+        elif k == "exists":
             # If the variable starts with a capital letter,
             # we assume that it is a model. If it starts with
             # a small letter, we assume it is an enumerable
@@ -599,7 +655,11 @@
 
                 python_str = """
 %(verdict_var)s = not not %(model)s.objects.filter(%(query)s)
-                """ % {'verdict_var': verdict_var, 'model': var, 'query': entry}
+                """ % {
+                    "verdict_var": verdict_var,
+                    "model": var,
+                    "query": entry,
+                }
 
                 python_ast = ast.parse(python_str)
             else:
@@ -608,16 +668,20 @@
 
                 python_str = """
 %(verdict_var)s = filter(lambda __elt:%(query)s, %(model)s)
-                """ % {'verdict_var': verdict_var, 'model': var, 'query': entry}
+                """ % {
+                    "verdict_var": verdict_var,
+                    "model": var,
+                    "query": entry,
+                }
 
                 python_ast = ast.parse(python_str)
 
             return python_ast
-        elif k=='forall':
+        elif k == "forall":
             var, expr = v
 
             if var.istitle():
-                f = self.fol_to_python_filter(var, expr, django=True, negate = True)
+                f = self.fol_to_python_filter(var, expr, django=True, negate=True)
                 entry = f.pop()
 
                 self.verdict_next()
@@ -625,71 +689,95 @@
 
                 python_str = """
 %(verdict_var)s = not not %(model)s.objects.filter(%(query)s)
-                """ % {'verdict_var': vvar, 'model': var, 'query': entry}
+                """ % {
+                    "verdict_var": vvar,
+                    "model": var,
+                    "query": entry,
+                }
 
                 python_ast = ast.parse(python_str)
             else:
-                f = self.fol_to_python_filter(var, expr, django=False, negate = True)
+                f = self.fol_to_python_filter(var, expr, django=False, negate=True)
                 entry = f.pop()
 
                 python_str = """
 %(verdict_var)s = next(elt for elt in %(model)s if %(query)s)
-                """ % {'verdict_var': vvar, 'model': var, 'query': entry}
+                """ % {
+                    "verdict_var": vvar,
+                    "model": var,
+                    "query": entry,
+                }
 
                 python_ast = ast.parse(python_str)
 
             negate_str = """
 %(verdict_var)s = not %(vvar)s
-            """ % {'verdict_var': verdict_var, 'vvar': vvar}
+            """ % {
+                "verdict_var": verdict_var,
+                "vvar": vvar,
+            }
 
             negate_ast = ast.parse(negate_str)
 
             return ast.Module(body=[python_ast, negate_ast])
 
+
 def xproto_fol_to_python_test(policy, fol, model, tag=None):
     if isinstance(fol, jinja2.Undefined):
-        raise Exception('Could not find policy:', policy)
+        raise Exception("Could not find policy:", policy)
 
     f2p = FOL2Python()
     fol_reduced = f2p.hoist_outer(fol)
 
-    if fol_reduced in ['True','False'] and fol != fol_reduced:
-        raise TrivialPolicy("Policy %(name)s trivially reduces to %(reduced)s. If this is what you want, replace its contents with %(reduced)s"%{'name':policy, 'reduced':fol_reduced})
+    if fol_reduced in ["True", "False"] and fol != fol_reduced:
+        raise TrivialPolicy(
+            "Policy %(name)s trivially reduces to %(reduced)s. If this is what you want, replace its contents with %(reduced)s" % {
+                "name": policy,
+                "reduced": fol_reduced})
 
-    a = f2p.gen_test_function(fol_reduced, policy, tag='security_check')
+    a = f2p.gen_test_function(fol_reduced, policy, tag="security_check")
 
     return astunparse.unparse(a)
 
+
 def xproto_fol_to_python_validator(policy, fol, model, message, tag=None):
     if isinstance(fol, jinja2.Undefined):
-        raise Exception('Could not find policy:', policy)
+        raise Exception("Could not find policy:", policy)
 
     f2p = FOL2Python()
     fol_reduced = f2p.hoist_outer(fol)
 
-    if fol_reduced in ['True','False'] and fol != fol_reduced:
-        raise TrivialPolicy("Policy %(name)s trivially reduces to %(reduced)s. If this is what you want, replace its contents with %(reduced)s"%{'name':policy, 'reduced':fol_reduced})
+    if fol_reduced in ["True", "False"] and fol != fol_reduced:
+        raise TrivialPolicy(
+            "Policy %(name)s trivially reduces to %(reduced)s. If this is what you want, replace its contents with %(reduced)s" % {
+                "name": policy,
+                "reduced": fol_reduced})
 
-    a = f2p.gen_validation_function(fol_reduced, policy, message, tag='validator')
-    
+    a = f2p.gen_validation_function(fol_reduced, policy, message, tag="validator")
+
     return astunparse.unparse(a)
 
+
 def main():
     while True:
-        inp = ''
+        inp = ""
         while True:
             inp_line = raw_input()
-            if inp_line=='EOF': break
-            else: inp+=inp_line
-            
+            if inp_line == "EOF":
+                break
+            else:
+                inp += inp_line
+
         fol_lexer = lex.lex(module=FOLLexer())
-        fol_parser = yacc.yacc(module=FOLParser(), start='goal', outputdir='/tmp', debug=0)
+        fol_parser = yacc.yacc(
+            module=FOLParser(), start="goal", outputdir="/tmp", debug=0
+        )
 
         val = fol_parser.parse(inp, lexer=fol_lexer)
-        a = xproto_fol_to_python_test('pol', val, 'output', 'Test')
-        #f2p = FOL2Python()
-        #a = f2p.hoist_outer(val)
-        print a
+        a = xproto_fol_to_python_test("pol", val, "output", "Test")
+        # f2p = FOL2Python()
+        # a = f2p.hoist_outer(val)
+        print(a)
 
 
 if __name__ == "__main__":
diff --git a/lib/xos-genx/xosgenx/jinja2_extensions/gui.py b/lib/xos-genx/xosgenx/jinja2_extensions/gui.py
index 50bcf0e..245bbda 100644
--- a/lib/xos-genx/xosgenx/jinja2_extensions/gui.py
+++ b/lib/xos-genx/xosgenx/jinja2_extensions/gui.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,74 +15,78 @@
 
 from base import xproto_string_type, unquote
 
+
 def xproto_type_to_ui_type(f):
     try:
-        content_type = f['options']['content_type']
+        content_type = f["options"]["content_type"]
         content_type = eval(content_type)
-    except:
+    except BaseException:
         content_type = None
         pass
 
-    if 'choices' in f['options']:
-        return 'select';
-    elif content_type == 'date':
-        return 'date'
-    elif f['type'] == 'bool':
-        return 'boolean'
-    elif f['type'] == 'string':
-        return xproto_string_type(f['options'])
-    elif f['type'] in ['int','uint32','int32'] or 'link' in f:
-        return 'number'
-    elif f['type'] in ['double','float']:
-        return 'string'
+    if "choices" in f["options"]:
+        return "select"
+    elif content_type == "date":
+        return "date"
+    elif f["type"] == "bool":
+        return "boolean"
+    elif f["type"] == "string":
+        return xproto_string_type(f["options"])
+    elif f["type"] in ["int", "uint32", "int32"] or "link" in f:
+        return "number"
+    elif f["type"] in ["double", "float"]:
+        return "string"
+
 
 def xproto_options_choices_to_dict(choices):
     list = []
 
     for c in eval(choices):
-        list.append({'id': c[0], 'label': c[1]})
+        list.append({"id": c[0], "label": c[1]})
     if len(list) > 0:
         return list
     else:
         return None
 
+
 def xproto_validators(f):
     # To be cleaned up when we formalize validation in xproto
     validators = []
 
     # bound-based validators
-    bound_validators = [('max_length','maxlength'), ('min', 'min'), ('max', 'max')]
+    bound_validators = [("max_length", "maxlength"), ("min", "min"), ("max", "max")]
 
     for v0, v1 in bound_validators:
         try:
-            validators.append({'name':v1, 'int_value':int(f['options'][v0])})
+            validators.append({"name": v1, "int_value": int(f["options"][v0])})
         except KeyError:
             pass
 
     # validators based on content_type
-    content_type_validators = ['ip', 'url', 'email']
+    content_type_validators = ["ip", "url", "email"]
 
     for v in content_type_validators:
-        #if f['name']=='ip': pdb.set_trace()
+        # if f['name']=='ip': pdb.set_trace()
         try:
-            val = unquote(f['options']['content_type'])==v
+            val = unquote(f["options"]["content_type"]) == v
             if not val:
                 raise KeyError
 
-            validators.append({'name':v, 'bool_value': True})
+            validators.append({"name": v, "bool_value": True})
         except KeyError:
             pass
 
     # required validator
     try:
-        required = f['options']['blank']=='False' and f['options']['null']=='False'
+        required = f["options"]["blank"] == "False" and f["options"]["null"] == "False"
         if required:
-            validators.append({'name':'required', 'bool_value':required})
+            validators.append({"name": "required", "bool_value": required})
     except KeyError:
         pass
 
     return validators
 
+
 def is_number(s):
     try:
         float(s)
@@ -91,16 +94,17 @@
     except ValueError:
         return False
 
+
 def xproto_default_to_gui(default):
     val = "null"
     if is_number(default):
         val = str(default)
-    elif eval(default) == True:
-        val = 'true'
-    elif eval(default) == False:
-        val = 'false'
-    elif eval(default) == None:
-        val = 'null'
+    elif eval(default) is True:
+        val = "true"
+    elif eval(default) is False:
+        val = "false"
+    elif eval(default) is None:
+        val = "null"
     else:
         val = str(default)
     return val
@@ -111,17 +115,20 @@
     seen = []
     for l in llst:
         try:
-            t = l['link_type']
-        except KeyError, e:
+            t = l["link_type"]
+        except KeyError as e:
             raise e
 
-        if l['peer']['fqn'] not in seen and t != 'manytomany':
-            on_field = 'null'
-            if l['link_type'] == 'manytoone':
-                on_field = l['src_port']
-            elif l['link_type'] == 'onetomany':
-                on_field = l['dst_port']
-            outlist.append('- {model: %s, type: %s, on_field: %s}\n' % (l['peer']['name'], l['link_type'], on_field))
-        seen.append(l['peer'])
+        if l["peer"]["fqn"] not in seen and t != "manytomany":
+            on_field = "null"
+            if l["link_type"] == "manytoone":
+                on_field = l["src_port"]
+            elif l["link_type"] == "onetomany":
+                on_field = l["dst_port"]
+            outlist.append(
+                "- {model: %s, type: %s, on_field: %s}\n"
+                % (l["peer"]["name"], l["link_type"], on_field)
+            )
+        seen.append(l["peer"])
 
     return outlist
diff --git a/lib/xos-genx/xosgenx/jinja2_extensions/tosca.py b/lib/xos-genx/xosgenx/jinja2_extensions/tosca.py
index 996d63d..d8eada7 100644
--- a/lib/xos-genx/xosgenx/jinja2_extensions/tosca.py
+++ b/lib/xos-genx/xosgenx/jinja2_extensions/tosca.py
@@ -14,12 +14,14 @@
 
 from xosgenx.jinja2_extensions import xproto_field_graph_components
 
+
 def xproto_tosca_required(null, blank, default=None):
 
-    if null == 'True' or blank == 'True' or default != 'False':
+    if null == "True" or blank == "True" or default != "False":
         return "false"
     return "true"
 
+
 def xproto_tosca_field_type(type):
     """
     TOSCA requires fields of type 'bool' to be 'boolean'
@@ -33,30 +35,41 @@
     else:
         return type
 
+
 def xproto_fields_to_tosca_keys(fields, m):
     keys = []
 
     # look for one_of keys
-    _one_of = xproto_field_graph_components(fields, m, 'tosca_key_one_of')
+    _one_of = xproto_field_graph_components(fields, m, "tosca_key_one_of")
     one_of = [list(i) for i in _one_of]
 
     # look for explicit keys
     for f in fields:
-        if 'tosca_key' in f['options'] and f['options']['tosca_key'] and 'link' not in f:
-            keys.append(f['name'])
-        if 'tosca_key' in f['options'] and f['options']['tosca_key'] and ('link' in f and f['link']):
-            keys.append("%s_id" % f['name'])
+        if (
+            "tosca_key" in f["options"]
+            and f["options"]["tosca_key"]
+            and "link" not in f
+        ):
+            keys.append(f["name"])
+        if (
+            "tosca_key" in f["options"]
+            and f["options"]["tosca_key"]
+            and ("link" in f and f["link"])
+        ):
+            keys.append("%s_id" % f["name"])
     # if not keys are specified and there is a name field, use that as key.
-    if len(keys) == 0 and 'name' in map(lambda f: f['name'], fields):
-        keys.append('name')
+    if len(keys) == 0 and "name" in map(lambda f: f["name"], fields):
+        keys.append("name")
 
     for of in one_of:
         # check if the field is a link, and in case add _id
         for index, f in enumerate(of):
             try:
-                field = [x for x in fields if x['name'] == f and ('link' in x and x['link'])][0]
+                field = [
+                    x for x in fields if x["name"] == f and ("link" in x and x["link"])
+                ][0]
                 of[index] = "%s_id" % f
-            except IndexError, e:
+            except IndexError as e:
                 # the field is not a link
                 pass
 
diff --git a/lib/xos-genx/xosgenx/proto2xproto.py b/lib/xos-genx/xosgenx/proto2xproto.py
index d2bde67..f1dc959 100644
--- a/lib/xos-genx/xosgenx/proto2xproto.py
+++ b/lib/xos-genx/xosgenx/proto2xproto.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -27,78 +26,90 @@
 import ply.lex as lex
 import ply.yacc as yacc
 
+
 class Stack(list):
-    def push(self,x):
+    def push(self, x):
         self.append(x)
 
+
 def str_to_dict(s):
-    lst = s.rsplit('.',1)
+    lst = s.rsplit(".", 1)
     name = lst[-1]
 
-    if len(lst)==2:
+    if len(lst) == 2:
         package = lst[0]
     else:
-        package = ''
+        package = ""
 
-    return {'name': name, 'package': package, 'fqn': s}
+    return {"name": name, "package": package, "fqn": s}
 
 
 def replace_link(obj):
+    try:
+        link = obj.link
         try:
-            link = obj.link
-            try:
-                through = link['through']
-            except KeyError:
-                through = None
+            through = link["through"]
+        except KeyError:
+            through = None
 
-            try:
-                through_str = through[1:-1]
-            except TypeError:
-                through_str = None
+        try:
+            through_str = through[1:-1]
+        except TypeError:
+            through_str = None
 
-            if through_str:
-                through_dict = str_to_dict(through_str)
-            else:
-                through_dict = {}
+        if through_str:
+            through_dict = str_to_dict(through_str)
+        else:
+            through_dict = {}
 
-            model_dict = str_to_dict(link['model'][1:-1])
+        model_dict = str_to_dict(link["model"][1:-1])
 
-            ls = m.LinkSpec(obj, m.LinkDefinition(link['link'][1:-1],obj.name,model_dict,link['port'][1:-1],through_dict))
-            return ls
-        except:
-            return obj
+        ls = m.LinkSpec(
+            obj,
+            m.LinkDefinition(
+                link["link"][1:-1],
+                obj.name,
+                model_dict,
+                link["port"][1:-1],
+                through_dict,
+            ),
+        )
+        return ls
+    except BaseException:
+        return obj
+
 
 class Proto2XProto(Visitor):
     fol_lexer = lex.lex(module=FOLLexer())
-    fol_parser = yacc.yacc(module=FOLParser(), start='goal', debug=0, outputdir='/tmp')
+    fol_parser = yacc.yacc(module=FOLParser(), start="goal", debug=0, outputdir="/tmp")
 
     def __init__(self):
         super(Proto2XProto, self).__init__()
 
         self.stack = Stack()
         self.count_stack = Stack()
-        self.content=""
-        self.offset=0
-        self.statementsChanged=0
+        self.content = ""
+        self.offset = 0
+        self.statementsChanged = 0
         self.message_options = {}
         self.options = {}
         self.current_message_name = None
 
-        self.xproto_message_options = ['bases']
-        self.xproto_field_options = ['model']
+        self.xproto_message_options = ["bases"]
+        self.xproto_field_options = ["model"]
         self.verbose = 0
         self.first_field = True
         self.first_method = True
-    
+
     def replace_policy(self, obj):
         if isinstance(obj, m.OptionStatement):
             rhs = obj.value.value.pval
             if rhs.startswith('"') and rhs.endswith('"'):
                 rhs = rhs[1:-1]
 
-            if rhs.startswith('policy:'):
-                str = rhs.split(':',1)[1]
-                val = self.fol_parser.parse(str, lexer = self.fol_lexer)
+            if rhs.startswith("policy:"):
+                str = rhs.split(":", 1)[1]
+                val = self.fol_parser.parse(str, lexer=self.fol_lexer)
 
                 return m.PolicyDefinition(obj.name, val)
 
@@ -110,9 +121,9 @@
             for fd in obj.fieldDirective:
                 k = fd.pval.name.value.pval
                 v = fd.pval.value.value.pval
-                opts[k]=v
+                opts[k] = v
 
-            if ('model' in opts and 'link' in opts and 'port' in opts):
+            if "model" in opts and "link" in opts and "port" in opts:
                 obj.link = opts
             pass
         except KeyError:
@@ -121,41 +132,42 @@
     def proto_to_xproto_message(self, obj):
         try:
             try:
-                bases = self.message_options['bases'].split(',')
+                bases = self.message_options["bases"].split(",")
             except KeyError:
                 bases = []
 
-            bases = map(lambda x:str_to_dict(x[1:-1]), bases)
+            bases = map(lambda x: str_to_dict(x[1:-1]), bases)
             obj.bases = bases
         except KeyError:
             raise
 
     def map_field(self, obj, s):
-        if 'model' in s:
-            link = m.LinkDefinition('onetoone','src','name','dst', obj.linespan, obj.lexspan, obj.p)
+        if "model" in s:
+            link = m.LinkDefinition(
+                "onetoone", "src", "name", "dst", obj.linespan, obj.lexspan, obj.p
+            )
             lspec = m.LinkSpec(link, obj)
         else:
             lspec = obj
         return lspec
 
-
     def get_stack(self):
         return self.stack
 
     def visit_PackageStatement(self, obj):
-        '''Ignore'''
+        """Ignore"""
         return True
 
     def visit_ImportStatement(self, obj):
-        '''Ignore'''
+        """Ignore"""
         return True
 
     def visit_OptionStatement(self, obj):
-        if (self.current_message_name):
+        if self.current_message_name:
             k = obj.name.value.pval
             self.message_options[k] = obj.value.value.pval
-            if (k in self.xproto_message_options):
-               obj.mark_for_deletion = True  
+            if k in self.xproto_message_options:
+                obj.mark_for_deletion = True
         else:
             self.options[obj.name.value.pval] = obj.value.value.pval
 
@@ -197,10 +209,10 @@
         self.message_options = {}
 
         return True
-    
+
     def visit_MessageDefinition_post(self, obj):
         self.proto_to_xproto_message(obj)
-        obj.body = filter(lambda x:not hasattr(x, 'mark_for_deletion'), obj.body)
+        obj.body = filter(lambda x: not hasattr(x, "mark_for_deletion"), obj.body)
         obj.body = map(replace_link, obj.body)
 
         self.current_message_name = None
@@ -230,7 +242,7 @@
     def visit_Proto(self, obj):
         self.count_stack.push(len(obj.body))
         return True
-    
+
     def visit_Proto_post(self, obj):
 
         obj.body = [self.replace_policy(o) for o in obj.body]
diff --git a/lib/xos-genx/xosgenx/version.py b/lib/xos-genx/xosgenx/version.py
index a118c43..79c5702 100644
--- a/lib/xos-genx/xosgenx/version.py
+++ b/lib/xos-genx/xosgenx/version.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/lib/xos-genx/xosgenx/xos2jinja.py b/lib/xos-genx/xosgenx/xos2jinja.py
index aca2468..471be93 100644
--- a/lib/xos-genx/xosgenx/xos2jinja.py
+++ b/lib/xos-genx/xosgenx/xos2jinja.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import print_function
 import plyxproto.model as m
 from plyxproto.helpers import Visitor
 import argparse
@@ -25,15 +25,20 @@
 import copy
 import pdb
 
+
 class MissingPolicyException(Exception):
     pass
 
+
 def find_missing_policy_calls(name, policies, policy):
     if isinstance(policy, dict):
         (k, lst), = policy.items()
-        if k=='policy':
+        if k == "policy":
             policy_name = lst[0]
-            if policy_name not in policies: raise MissingPolicyException("Policy %s invoked missing policy %s"%(name, policy_name))
+            if policy_name not in policies:
+                raise MissingPolicyException(
+                    "Policy %s invoked missing policy %s" % (name, policy_name)
+                )
         else:
             for p in lst:
                 find_missing_policy_calls(name, policies, p)
@@ -41,26 +46,27 @@
         for p in lst:
             find_missing_policy_calls(name, policies, p)
 
+
 def dotname_to_fqn(dotname):
     b_names = [part.pval for part in dotname]
-    package = '.'.join(b_names[:-1])
+    package = ".".join(b_names[:-1])
     name = b_names[-1]
     if package:
-        fqn = package + '.' + name
+        fqn = package + "." + name
     else:
         fqn = name
-    return {'name': name, 'fqn': fqn, 'package': package}
+    return {"name": name, "fqn": fqn, "package": package}
 
 
 def dotname_to_name(dotname):
     b_names = [part.pval for part in dotname]
-    return '.'.join(b_names)
+    return ".".join(b_names)
 
 
 def count_messages(body):
     count = 0
     for e in body:
-        if (type(e) == m.MessageDefinition):
+        if isinstance(e, m.MessageDefinition):
             count += 1
     return count
 
@@ -68,7 +74,7 @@
 def count_fields(body):
     count = 0
     for e in body:
-        if (type(e) in [m.LinkDefinition, m.FieldDefinition, m.LinkSpec]):
+        if type(e) in [m.LinkDefinition, m.FieldDefinition, m.LinkSpec]:
             count += 1
     return count
 
@@ -90,8 +96,9 @@
         self.append(x)
 
 
-''' XOS2Jinja overrides the underlying visitor pattern to transform the tree
-    in addition to traversing it '''
+""" XOS2Jinja overrides the underlying visitor pattern to transform the tree
+    in addition to traversing it """
+
 
 class XOS2Jinja(Visitor):
     def __init__(self, args):
@@ -114,7 +121,7 @@
 
     def visit_PolicyDefinition(self, obj):
         if self.package:
-            pname = '.'.join([self.package, obj.name.value.pval])
+            pname = ".".join([self.package, obj.name.value.pval])
         else:
             pname = obj.name.value.pval
 
@@ -126,17 +133,17 @@
     def visit_PackageStatement(self, obj):
         dotlist = obj.name.value
         dotlist2 = [f.pval for f in dotlist]
-        dotstr = '.'.join(dotlist2)
+        dotstr = ".".join(dotlist2)
         self.package = dotstr
         return True
 
     def visit_ImportStatement(self, obj):
-        '''Ignore'''
+        """Ignore"""
         return True
 
     def visit_OptionStatement(self, obj):
-        if not hasattr(obj, 'mark_for_deletion'):
-            if (self.current_message_name):
+        if not hasattr(obj, "mark_for_deletion"):
+            if self.current_message_name:
                 self.message_options[obj.name.value.pval] = obj.value.value.pval
             else:
                 self.options[obj.name.value.pval] = obj.value.value.pval
@@ -159,7 +166,7 @@
         except AttributeError:
             name = obj.name.value
 
-        if type(obj.value) == list:
+        if isinstance(obj.value, list):
             value = dotname_to_name(obj.value)
         else:
             value = name_to_value(obj)
@@ -168,52 +175,52 @@
         return True
 
     def visit_FieldType(self, obj):
-        '''Field type, if type is name, then it may need refactoring consistent with refactoring rules according to the table'''
+        """Field type, if type is name, then it may need refactoring consistent with refactoring rules according to the table"""
         return True
 
     def visit_LinkDefinition(self, obj):
         s = {}
 
         try:
-            s['link_type'] = obj.link_type.pval
+            s["link_type"] = obj.link_type.pval
         except AttributeError:
-            s['link_type'] = obj.link_type
+            s["link_type"] = obj.link_type
 
-        s['src_port'] = obj.src_port.value.pval
-        s['name'] = obj.src_port.value.pval
+        s["src_port"] = obj.src_port.value.pval
+        s["name"] = obj.src_port.value.pval
         try:
-            s['policy'] = obj.policy.pval
+            s["policy"] = obj.policy.pval
         except AttributeError:
-            s['policy'] = None
+            s["policy"] = None
 
         try:
-            s['dst_port'] = obj.dst_port.value.pval
+            s["dst_port"] = obj.dst_port.value.pval
         except AttributeError:
-            s['dst_port'] = obj.dst_port
+            s["dst_port"] = obj.dst_port
 
-        if type(obj.through) == list:
-            s['through'] = dotname_to_fqn(obj.through)
+        if isinstance(obj.through, list):
+            s["through"] = dotname_to_fqn(obj.through)
         else:
             try:
-                s['through'] = obj.through.pval
+                s["through"] = obj.through.pval
             except AttributeError:
-                s['through'] = obj.through
+                s["through"] = obj.through
 
-        if type(obj.name) == list:
-            s['peer'] = dotname_to_fqn(obj.name)
+        if isinstance(obj.name, list):
+            s["peer"] = dotname_to_fqn(obj.name)
         else:
             try:
-                s['peer'] = obj.name.pval
+                s["peer"] = obj.name.pval
             except AttributeError:
-                s['peer'] = obj.name
+                s["peer"] = obj.name
 
         try:
-            s['reverse_id'] = obj.reverse_id.pval
+            s["reverse_id"] = obj.reverse_id.pval
         except AttributeError:
-            s['reverse_id'] = obj.reverse_id
+            s["reverse_id"] = obj.reverse_id
 
-        s['_type'] = 'link'
-        s['options'] = {'modifier': 'optional'}
+        s["_type"] = "link"
+        s["options"] = {"modifier": "optional"}
 
         self.stack.push(s)
         return True
@@ -226,21 +233,21 @@
         s = {}
 
         if isinstance(obj.ftype, m.Name):
-            s['type'] = obj.ftype.value
+            s["type"] = obj.ftype.value
         else:
-            s['type'] = obj.ftype.name.pval
+            s["type"] = obj.ftype.name.pval
 
-        s['name'] = obj.name.value.pval
+        s["name"] = obj.name.value.pval
 
         try:
-            s['policy'] = obj.policy.pval
+            s["policy"] = obj.policy.pval
         except AttributeError:
-            s['policy'] = None
+            s["policy"] = None
 
-        s['modifier'] = obj.field_modifier.pval
-        s['id'] = obj.fieldId.pval
+        s["modifier"] = obj.field_modifier.pval
+        s["id"] = obj.fieldId.pval
 
-        opts = {'modifier': s['modifier']}
+        opts = {"modifier": s["modifier"]}
         n = self.count_stack.pop()
         for i in range(0, n):
             k, v = self.stack.pop()
@@ -253,28 +260,28 @@
 
             opts[k] = v
 
-        s['options'] = opts
+        s["options"] = opts
         try:
-            last_link = self.stack[-1]['_type']
-            if (last_link == 'link'):
-                s['link'] = True
-        except:
+            last_link = self.stack[-1]["_type"]
+            if last_link == "link":
+                s["link"] = True
+        except BaseException:
             pass
-        s['_type'] = 'field'
+        s["_type"] = "field"
 
         self.stack.push(s)
         return True
 
     def visit_EnumFieldDefinition(self, obj):
         if self.verbose > 4:
-            print "\tEnumField: name=%s, %s" % (obj.name, obj)
+            print("\tEnumField: name=%s, %s" % (obj.name, obj))
 
         return True
 
     def visit_EnumDefinition(self, obj):
-        '''New enum definition, refactor name'''
+        """New enum definition, refactor name"""
         if self.verbose > 3:
-            print "Enum, [%s] body=%s\n\n" % (obj.name, obj.body)
+            print("Enum, [%s] body=%s\n\n" % (obj.name, obj.body))
 
         return True
 
@@ -297,28 +304,39 @@
         last_field = {}
         for i in range(0, stack_num):
             f = self.stack.pop()
-            if (f['_type'] == 'link'):
-                f['options'] = {i: d[i] for d in [f['options'], last_field['options']] for i in d}
-                assert (last_field == fields[0])
-                fields[0].setdefault('options', {})['link_type'] = f['link_type']
+            if f["_type"] == "link":
+                f["options"] = {
+                    i: d[i] for d in [f["options"], last_field["options"]] for i in d
+                }
+                assert last_field == fields[0]
+                fields[0].setdefault("options", {})["link_type"] = f["link_type"]
                 links.insert(0, f)
             else:
                 fields.insert(0, f)
                 last_field = f
 
         if self.package:
-            model_name = '.'.join([self.package, obj.name.value.pval])
+            model_name = ".".join([self.package, obj.name.value.pval])
         else:
             model_name = obj.name.value.pval
 
-        model_def = {'name':obj.name.value.pval,'fields':fields,'links':links, 'bases':obj.bases, 'options':self.message_options, 'package':self.package, 'fqn': model_name, 'rlinks': []}
+        model_def = {
+            "name": obj.name.value.pval,
+            "fields": fields,
+            "links": links,
+            "bases": obj.bases,
+            "options": self.message_options,
+            "package": self.package,
+            "fqn": model_name,
+            "rlinks": [],
+        }
         try:
-            model_def['policy'] = obj.policy.pval
+            model_def["policy"] = obj.policy.pval
         except AttributeError:
-            model_def['policy'] = None
+            model_def["policy"] = None
 
         self.stack.push(model_def)
-        
+
         self.models[model_name] = model_def
 
         # Set message options
@@ -382,40 +400,53 @@
         rev_links = {}
 
         link_opposite = {
-            'manytomany': 'manytomany',
-            'manytoone': 'onetomany',
-            'onetoone': 'onetoone',
-            'onetomany': 'manytoone'
+            "manytomany": "manytomany",
+            "manytoone": "onetomany",
+            "onetoone": "onetoone",
+            "onetomany": "manytoone",
         }
 
         for m in messages:
-            for l in m['links']:
+            for l in m["links"]:
                 rlink = copy.deepcopy(l)
 
-                rlink['_type'] = 'rlink'  # An implicit link, not declared in the model
-                rlink['src_port'] = l['dst_port']
-                rlink['dst_port'] = l['src_port']
-                rlink['peer'] = {'name': m['name'], 'package': m['package'], 'fqn': m['fqn']}
-                rlink['link_type'] = link_opposite[l['link_type']]
-                rlink["reverse_id"] = l['reverse_id']
+                rlink["_type"] = "rlink"  # An implicit link, not declared in the model
+                rlink["src_port"] = l["dst_port"]
+                rlink["dst_port"] = l["src_port"]
+                rlink["peer"] = {
+                    "name": m["name"],
+                    "package": m["package"],
+                    "fqn": m["fqn"],
+                }
+                rlink["link_type"] = link_opposite[l["link_type"]]
+                rlink["reverse_id"] = l["reverse_id"]
 
-                if (not l['reverse_id']) and (self.args.verbosity >= 1):
-                    print >> sys.stderr, "WARNING: Field %s in model %s has no reverse_id" % (l["src_port"], m["name"])
+                if (not l["reverse_id"]) and (self.args.verbosity >= 1):
+                    print(
+                        "WARNING: Field %s in model %s has no reverse_id"
+                        % (l["src_port"], m["name"]),
+                        file=sys.stderr,
+                    )
 
-                if l["reverse_id"] and ((int(l["reverse_id"]) < 1000) or (int(l["reverse_id"]) >= 1900)):
-                    raise Exception("reverse id for field %s in model %s should be between 1000 and 1899" % (l["src_port"], m["name"]))
+                if l["reverse_id"] and (
+                    (int(l["reverse_id"]) < 1000) or (int(l["reverse_id"]) >= 1900)
+                ):
+                    raise Exception(
+                        "reverse id for field %s in model %s should be between 1000 and 1899"
+                        % (l["src_port"], m["name"])
+                    )
 
                 try:
                     try:
-                        rev_links[l['peer']['fqn']].append(rlink)
+                        rev_links[l["peer"]["fqn"]].append(rlink)
                     except TypeError:
                         pass
                 except KeyError:
-                    rev_links[l['peer']['fqn']] = [rlink]
+                    rev_links[l["peer"]["fqn"]] = [rlink]
 
         for m in messages:
             try:
-                m['rlinks'] = rev_links[m['name']]
-                message_dict[m['name']]['rlinks'] = m['rlinks']
+                m["rlinks"] = rev_links[m["name"]]
+                message_dict[m["name"]]["rlinks"] = m["rlinks"]
             except KeyError:
                 pass
diff --git a/lib/xos-genx/xosgenx/xosgen.py b/lib/xos-genx/xosgenx/xosgen.py
index 27a7fb4..1318242 100755
--- a/lib/xos-genx/xosgenx/xosgen.py
+++ b/lib/xos-genx/xosgenx/xosgen.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,49 +15,116 @@
 
 #!/usr/bin/python
 
+from __future__ import print_function
 import argparse
 from generator import *
 from version import __version__
 
-parse = argparse.ArgumentParser(description='XOS Generative Toolchain')
-parse.add_argument('--rev', dest='rev', action='store_true',
-                   default=XOSProcessorArgs.default_rev, help='Convert proto to xproto')
-parse.add_argument('--output', dest='output', action='store',
-                   default=XOSProcessorArgs.default_output, help='Destination dir')
-parse.add_argument('--attic', dest='attic', action='store',
-                   default=XOSProcessorArgs.default_attic, help='The location at which static files are stored')
-parse.add_argument('--kvpairs', dest='kv', action='store',
-                   default=XOSProcessorArgs.default_kvpairs, help='Key value pairs to make available to the target')
-parse.add_argument('--write-to-file', dest='write_to_file', choices = ['single', 'model', 'target'], action='store',
-                   default=XOSProcessorArgs.default_write_to_file,
-                   help='Single output file (single) or output file per model (model) or let target decide (target)')
-parse.add_argument('--version', action='version', version=__version__)
-parse.add_argument("-v", "--verbosity", action="count",
-                   default=XOSProcessorArgs.default_verbosity, help="increase output verbosity")
-parse.add_argument("--include-models", dest="include_models", action="append",
-                   default=XOSProcessorArgs.default_include_models, help="list of models to include")
-parse.add_argument("--include-apps", dest="include_apps", action="append",
-                   default=XOSProcessorArgs.default_include_apps, help="list of models to include")
+parse = argparse.ArgumentParser(description="XOS Generative Toolchain")
+parse.add_argument(
+    "--rev",
+    dest="rev",
+    action="store_true",
+    default=XOSProcessorArgs.default_rev,
+    help="Convert proto to xproto",
+)
+parse.add_argument(
+    "--output",
+    dest="output",
+    action="store",
+    default=XOSProcessorArgs.default_output,
+    help="Destination dir",
+)
+parse.add_argument(
+    "--attic",
+    dest="attic",
+    action="store",
+    default=XOSProcessorArgs.default_attic,
+    help="The location at which static files are stored",
+)
+parse.add_argument(
+    "--kvpairs",
+    dest="kv",
+    action="store",
+    default=XOSProcessorArgs.default_kvpairs,
+    help="Key value pairs to make available to the target",
+)
+parse.add_argument(
+    "--write-to-file",
+    dest="write_to_file",
+    choices=["single", "model", "target"],
+    action="store",
+    default=XOSProcessorArgs.default_write_to_file,
+    help="Single output file (single) or output file per model (model) or let target decide (target)",
+)
+parse.add_argument("--version", action="version", version=__version__)
+parse.add_argument(
+    "-v",
+    "--verbosity",
+    action="count",
+    default=XOSProcessorArgs.default_verbosity,
+    help="increase output verbosity",
+)
+parse.add_argument(
+    "--include-models",
+    dest="include_models",
+    action="append",
+    default=XOSProcessorArgs.default_include_models,
+    help="list of models to include",
+)
+parse.add_argument(
+    "--include-apps",
+    dest="include_apps",
+    action="append",
+    default=XOSProcessorArgs.default_include_apps,
+    help="list of models to include",
+)
 
 group = parse.add_mutually_exclusive_group()
-group.add_argument('--dest-file', dest='dest_file', action='store',
-                   default=XOSProcessorArgs.default_dest_file, help='Output file name (if write-to-file is set to single)')
-group.add_argument('--dest-extension', dest='dest_extension', action='store',
-                   default=XOSProcessorArgs.default_dest_extension, help='Output file extension (if write-to-file is set to single)')
+group.add_argument(
+    "--dest-file",
+    dest="dest_file",
+    action="store",
+    default=XOSProcessorArgs.default_dest_file,
+    help="Output file name (if write-to-file is set to single)",
+)
+group.add_argument(
+    "--dest-extension",
+    dest="dest_extension",
+    action="store",
+    default=XOSProcessorArgs.default_dest_extension,
+    help="Output file extension (if write-to-file is set to single)",
+)
 
 group = parse.add_mutually_exclusive_group(required=True)
-group.add_argument('--target', dest='target', action='store',
-                   default=XOSProcessorArgs.default_target, help='Output format, corresponding to <output>.yaml file')
-group.add_argument('--checkers', dest='checkers', action='store',
-                   default=XOSProcessorArgs.default_checkers, help='Comma-separated list of static checkers')
+group.add_argument(
+    "--target",
+    dest="target",
+    action="store",
+    default=XOSProcessorArgs.default_target,
+    help="Output format, corresponding to <output>.yaml file",
+)
+group.add_argument(
+    "--checkers",
+    dest="checkers",
+    action="store",
+    default=XOSProcessorArgs.default_checkers,
+    help="Comma-separated list of static checkers",
+)
 
-parse.add_argument('files', metavar='<input file>', nargs='+', action='store', help='xproto files to compile')
+parse.add_argument(
+    "files",
+    metavar="<input file>",
+    nargs="+",
+    action="store",
+    help="xproto files to compile",
+)
 
 CHECK = 1
 GEN = 2
 
-class XosGen:
 
+class XosGen:
     @staticmethod
     def init(args=None):
         if not args:
@@ -68,77 +134,94 @@
 
         if args.target:
             op = GEN
-            subdir = '/targets/'
+            subdir = "/targets/"
         elif args.checkers:
             op = CHECK
-            subdir = '/checkers/'
+            subdir = "/checkers/"
         else:
             parse.error("At least one of --target and --checkers is required")
 
-        operators = args.checkers.split(',') if hasattr(args, 'checkers') and args.checkers else [args.target]
+        operators = (
+            args.checkers.split(",")
+            if hasattr(args, "checkers") and args.checkers
+            else [args.target]
+        )
 
         for i in xrange(len(operators)):
-            if not '/' in operators[i]:
+            if "/" not in operators[i]:
                 # if the target is not a path, it refer to a library included one
-                operators[i] = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + subdir + operators[i])
+                operators[i] = os.path.abspath(
+                    os.path.dirname(os.path.realpath(__file__)) + subdir + operators[i]
+                )
 
             if not os.path.isabs(operators[i]):
-                operators[i] = os.path.abspath(os.getcwd() + '/' + operators[i])
+                operators[i] = os.path.abspath(os.getcwd() + "/" + operators[i])
 
         if op == GEN:
             # convert output to absolute path
             if args.output is not None and not os.path.isabs(args.output):
-                args.output = os.path.abspath(os.getcwd() + '/' + args.output)
+                args.output = os.path.abspath(os.getcwd() + "/" + args.output)
 
             operator = operators[0]
-            
+
             # check if there's a line that starts with +++ in the target
             # if so, then the output file names are left to the target to decide
             # also, if dest-file or dest-extension are supplied, then an error is generated.
-            plusplusplus = reduce(lambda acc, line: True if line.startswith('+++') else acc, open(operator).read().splitlines(), False)
+            plusplusplus = reduce(
+                lambda acc, line: True if line.startswith("+++") else acc,
+                open(operator).read().splitlines(),
+                False,
+            )
 
-            if plusplusplus and args.write_to_file != 'target':
-                parse.error('%s chooses the names of the files that it generates, you must set --write-to-file to "target"' % operator)
+            if plusplusplus and args.write_to_file != "target":
+                parse.error(
+                    '%s chooses the names of the files that it generates, you must set --write-to-file to "target"'
+                    % operator
+                )
 
-            if args.write_to_file != 'single' and (args.dest_file):
-                parse.error('--dest-file requires --write-to-file to be set to "single"')
+            if args.write_to_file != "single" and (args.dest_file):
+                parse.error(
+                    '--dest-file requires --write-to-file to be set to "single"'
+                )
 
-            if args.write_to_file != 'model' and (args.dest_extension):
-                parse.error('--dest-extension requires --write-to-file to be set to "model"')
-            
+            if args.write_to_file != "model" and (args.dest_extension):
+                parse.error(
+                    '--dest-extension requires --write-to-file to be set to "model"'
+                )
+
         else:
             if args.write_to_file or args.dest_extension:
-                parse.error('Checkers cannot write to files')
+                parse.error("Checkers cannot write to files")
 
         inputs = []
 
         for fname in args.files:
             if not os.path.isabs(fname):
-                inputs.append(os.path.abspath(os.getcwd() + '/' + fname))
+                inputs.append(os.path.abspath(os.getcwd() + "/" + fname))
             else:
                 inputs.append(fname)
 
         args.files = inputs
 
-        if op==GEN:
+        if op == GEN:
             generated = XOSProcessor.process(args, operators[0])
             if not args.output and not args.write_to_file:
-                print generated
-        elif op==CHECK:
+                print(generated)
+        elif op == CHECK:
             for o in operators:
                 verdict_str = XOSProcessor.process(args, o)
-                vlst = verdict_str.split('\n')
+                vlst = verdict_str.split("\n")
 
                 try:
                     verdict = next(v for v in vlst if v.strip())
-                    status_code, status_string = verdict.split(' ', 1)
+                    status_code, status_string = verdict.split(" ", 1)
                     status_code = int(status_code)
-                except:
-                    print "Checker %s returned mangled output" % o
+                except BaseException:
+                    print("Checker %s returned mangled output" % o)
                     exit(1)
 
                 if status_code != 200:
-                    print '%s: %s - %s' % (o, status_code, status_string)
+                    print("%s: %s - %s" % (o, status_code, status_string))
                     exit(1)
                 else:
-                    print '%s: OK'%o
+                    print("%s: OK" % o)
diff --git a/lib/xos-kafka/setup.py b/lib/xos-kafka/setup.py
index 4486f19..3d05854 100644
--- a/lib/xos-kafka/setup.py
+++ b/lib/xos-kafka/setup.py
@@ -19,27 +19,25 @@
 
 
 def readme():
-    with open('README.rst') as f:
+    with open("README.rst") as f:
         return f.read()
 
 
 setup_with_auto_version(
-    name='xoskafka',
+    name="xoskafka",
     version=__version__,
-    description='Wrapper around kafka for XOS',
+    description="Wrapper around kafka for XOS",
     long_description=readme(),
-    classifiers=[
-        'License :: OSI Approved :: Apache Software License',
-    ],
-    author='Zack Williams',
-    author_email='zdw@opennetworking.org',
-    packages=['xoskafka'],
-    license='Apache v2',
+    classifiers=["License :: OSI Approved :: Apache Software License"],
+    author="Zack Williams",
+    author_email="zdw@opennetworking.org",
+    packages=["xoskafka"],
+    license="Apache v2",
     install_requires=[
-        'confluent-kafka>=0.11.5',
-        'xosconfig>=2.1.0',
-        'multistructlog>=1.5',
-        ],
+        "confluent-kafka>=0.11.5",
+        "xosconfig>=2.1.0",
+        "multistructlog>=1.5",
+    ],
     include_package_data=True,
     zip_safe=False,
-    )
+)
diff --git a/lib/xos-kafka/xoskafka/__init__.py b/lib/xos-kafka/xoskafka/__init__.py
index 69f5a32..293a26e 100644
--- a/lib/xos-kafka/xoskafka/__init__.py
+++ b/lib/xos-kafka/xoskafka/__init__.py
@@ -13,3 +13,5 @@
 # limitations under the License.
 
 from .xoskafkaproducer import XOSKafkaProducer
+
+__all__ = ["XOSKafkaProducer"]
diff --git a/lib/xos-kafka/xoskafka/xoskafkaproducer.py b/lib/xos-kafka/xoskafka/xoskafkaproducer.py
index b4134d5..4611547 100644
--- a/lib/xos-kafka/xoskafka/xoskafkaproducer.py
+++ b/lib/xos-kafka/xoskafka/xoskafkaproducer.py
@@ -18,7 +18,8 @@
 
 from xosconfig import Config
 from multistructlog import create_logger
-log = create_logger(Config().get('logging'))
+
+log = create_logger(Config().get("logging"))
 
 kafka_producer = None
 
@@ -34,23 +35,24 @@
         global kafka_producer
 
         if kafka_producer:
-            raise Exception('XOSKafkaProducer already initialized')
+            raise Exception("XOSKafkaProducer already initialized")
 
         else:
-            log.info('Connecting to Kafka with bootstrap servers: %s' %
-                     Config.get('kafka_bootstrap_servers'))
+            log.info(
+                "Connecting to Kafka with bootstrap servers: %s"
+                % Config.get("kafka_bootstrap_servers")
+            )
 
             try:
                 producer_config = {
-                    'bootstrap.servers':
-                        ','.join(Config.get('kafka_bootstrap_servers')),
+                    "bootstrap.servers": ",".join(Config.get("kafka_bootstrap_servers"))
                 }
 
                 kafka_producer = confluent_kafka.Producer(**producer_config)
 
-                log.info('Connected to Kafka: %s' % kafka_producer)
+                log.info("Connected to Kafka: %s" % kafka_producer)
 
-            except confluent_kafka.KafkaError, e:
+            except confluent_kafka.KafkaError as e:
                 log.exception("Kafka Error: %s" % e)
 
     @classmethod
@@ -58,25 +60,22 @@
 
         try:
             kafka_producer.produce(
-                topic,
-                value,
-                key,
-                callback=cls._kafka_delivery_callback
-                )
+                topic, value, key, callback=cls._kafka_delivery_callback
+            )
 
             # see https://github.com/confluentinc/confluent-kafka-python/issues/16
             kafka_producer.poll(0)
 
-        except confluent_kafka.KafkaError, err:
+        except confluent_kafka.KafkaError as err:
             log.exception("Kafka Error", err)
 
     def __del__(self):
-       if kafka_producer is not None:
+        if kafka_producer is not None:
             kafka_producer.flush()
 
     @staticmethod
     def _kafka_delivery_callback(err, msg):
         if err:
-            log.error('Message failed delivery: %s' % err)
+            log.error("Message failed delivery: %s" % err)
         else:
-            log.trace('Message delivered', message=msg)
+            log.trace("Message delivered", message=msg)
diff --git a/lib/xos-util/setup.py b/lib/xos-util/setup.py
index b60435d..a3707ef 100644
--- a/lib/xos-util/setup.py
+++ b/lib/xos-util/setup.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python 
+#!/usr/bin/env python
 
 # Copyright 2017-present Open Networking Foundation
 #
@@ -15,18 +15,15 @@
 # limitations under the License.
 
 
-import os
-
-from setuptools import setup
-
 from xosutil.autoversion_setup import setup_with_auto_version
 from xosutil.version import __version__
 
-setup_with_auto_version(name='XosUtil',
-      version=__version__,
-      description='XOS Utility Library',
-      author='Scott Baker',
-      author_email='scottb@opennetworking.org',
-      packages=['xosutil'],
-      include_package_data=True
-     )
+setup_with_auto_version(
+    name="XosUtil",
+    version=__version__,
+    description="XOS Utility Library",
+    author="Scott Baker",
+    author_email="scottb@opennetworking.org",
+    packages=["xosutil"],
+    include_package_data=True,
+)
diff --git a/lib/xos-util/tests/test_util.py b/lib/xos-util/tests/test_util.py
index ff1ce58..2b5be07 100644
--- a/lib/xos-util/tests/test_util.py
+++ b/lib/xos-util/tests/test_util.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,6 +19,7 @@
 
 test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
 
+
 class XOSUtilTest(unittest.TestCase):
     """
     Testing the XOS Util Module
@@ -40,9 +40,15 @@
         test_save_fn = os.path.join(test_path, "test_version.py")
         if os.path.exists(test_save_fn):
             os.remove(test_save_fn)
-        self.assertEqual(version, autodiscover_version.autodiscover_version_of_caller(save_to="test_version.py"))
+        self.assertEqual(
+            version,
+            autodiscover_version.autodiscover_version_of_caller(
+                save_to="test_version.py"
+            ),
+        )
         self.assertTrue(os.path.exists(test_save_fn))
         self.assertTrue(version in open(test_save_fn).read())
 
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/lib/xos-util/xosutil/__init__.py b/lib/xos-util/xosutil/__init__.py
index 42722a8..b0fb0b2 100644
--- a/lib/xos-util/xosutil/__init__.py
+++ b/lib/xos-util/xosutil/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/lib/xos-util/xosutil/autodiscover_version.py b/lib/xos-util/xosutil/autodiscover_version.py
index 2e606e1..8f4ea47 100644
--- a/lib/xos-util/xosutil/autodiscover_version.py
+++ b/lib/xos-util/xosutil/autodiscover_version.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -24,6 +23,7 @@
 import inspect
 import os
 
+
 def autodiscover_version(caller_filename=None, save_to=None, max_parent_depth=None):
     """ walk back along the path to the current module, searching for a VERSION file """
     if not caller_filename:
@@ -42,8 +42,8 @@
             return version
 
         # limit_parent_depth can be used to limit how far back we search the tree for a VERSION file.
-        if (max_parent_depth is not None):
-            if (max_parent_depth <= 0):
+        if max_parent_depth is not None:
+            if max_parent_depth <= 0:
                 return None
             max_parent_depth -= 1
 
@@ -51,13 +51,16 @@
         if not remainder:
             return None
 
+
 def autodiscover_version_of_caller(*args, **kwargs):
     frame = inspect.stack()[1]
     module = inspect.getmodule(frame[0])
     return autodiscover_version(module.__file__, *args, **kwargs)
 
+
 def autodiscover_version_of_main(*args, **kwargs):
     import __main__
+
     if hasattr(__main__, "__file__"):
         return autodiscover_version(__main__.__file__, *args, **kwargs)
     else:
diff --git a/lib/xos-util/xosutil/autoversion_setup.py b/lib/xos-util/xosutil/autoversion_setup.py
index 5a7ea44..e027d5c 100644
--- a/lib/xos-util/xosutil/autoversion_setup.py
+++ b/lib/xos-util/xosutil/autoversion_setup.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -33,42 +32,47 @@
 import inspect
 from autodiscover_version import autodiscover_version
 
+
 class SdistCommand(sdist):
     def copy_file(self, infile, outfile, *args, **kwargs):
         if kwargs.get("dry_run"):
             return (outfile, 1)
-        if (os.path.split(outfile)[1] == "version.py"):
-            open(outfile, "w").write("# do not edit. Autogenerated file.\n" \
-                                     "__version__ = '%s'\n" % self.distribution.metadata.version)
+        if os.path.split(outfile)[1] == "version.py":
+            open(outfile, "w").write(
+                "# do not edit. Autogenerated file.\n"
+                "__version__ = '%s'\n" % self.distribution.metadata.version
+            )
             return (outfile, 1)
         else:
             return sdist.copy_file(self, infile, outfile, *args, **kwargs)
 
+
 class BuildPyCommand(build_py):
     def copy_file(self, infile, outfile, *args, **kwargs):
         if kwargs.get("dry_run"):
             return (outfile, 1)
-        if (os.path.split(outfile)[1] == "version.py"):
-            open(outfile, "w").write("# do not edit. Autogenerated file.\n" \
-                                     "__version__ = '%s'\n" % self.distribution.metadata.version)
+        if os.path.split(outfile)[1] == "version.py":
+            open(outfile, "w").write(
+                "# do not edit. Autogenerated file.\n"
+                "__version__ = '%s'\n" % self.distribution.metadata.version
+            )
             return (outfile, 1)
         else:
             return build_py.copy_file(self, infile, outfile, *args, **kwargs)
 
+
 def setup_with_auto_version(*args, **kwargs):
     # Learn the module that called this function, so we can search for any VERSION files in it.
     frame = inspect.stack()[1]
     caller_module = inspect.getmodule(frame[0])
 
     # Search for a VERSION file and extract the version number from it.
-    version = autodiscover_version(caller_filename = caller_module.__file__)
+    version = autodiscover_version(caller_filename=caller_module.__file__)
     if version:
         kwargs["version"] = version
 
     cmdclass = kwargs.get("cmdclass", {}).copy()
-    cmdclass.update( {"sdist": SdistCommand,
-                "build_py": BuildPyCommand} )
+    cmdclass.update({"sdist": SdistCommand, "build_py": BuildPyCommand})
     kwargs["cmdclass"] = cmdclass
 
     return setup(*args, **kwargs)
-
diff --git a/lib/xos-util/xosutil/version.py b/lib/xos-util/xosutil/version.py
index 57833b8..8456fc5 100644
--- a/lib/xos-util/xosutil/version.py
+++ b/lib/xos-util/xosutil/version.py
@@ -13,4 +13,4 @@
 # limitations under the License.
 
 # This file is autogenerated. Do not edit.
-__version__ = 'unknown'
+__version__ = "unknown"
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..ea9ee50
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,21 @@
+; Copyright 2019-present Open Networking Foundation
+;
+; Licensed under the Apache License, Version 2.0 (the "License");
+; you may not use this file except in compliance with the License.
+; You may obtain a copy of the License at
+;
+; http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing, software
+; distributed under the License is distributed on an "AS IS" BASIS,
+; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+; See the License for the specific language governing permissions and
+; limitations under the License.
+
+; using tox.ini only to store config information for other tools
+
+[flake8]
+; F821, ignoring undefined names would be valuable, but our testing dynamically loads them
+; W503, allow breaks before binary operators (see: https://github.com/PyCQA/pycodestyle/issues/498)
+ignore = W503
+max-line-length = 120
diff --git a/xos/__init__.py b/xos/__init__.py
index 42722a8..b0fb0b2 100644
--- a/xos/__init__.py
+++ b/xos/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/xos/api/__init__.py b/xos/api/__init__.py
index 65fdf5b..b0fb0b2 100644
--- a/xos/api/__init__.py
+++ b/xos/api/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,6 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
-
diff --git a/xos/api/import_methods.py b/xos/api/import_methods.py
index 3e6ea62..e610a31 100644
--- a/xos/api/import_methods.py
+++ b/xos/api/import_methods.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,24 +12,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
 from django.views.generic import View
-from django.conf.urls import patterns, url, include
-from rest_framework.routers import DefaultRouter
+from django.conf.urls import url
 from xosapi_helpers import XOSIndexViewSet
-import os, sys
+import os
+import sys
 import inspect
-import importlib
-
-try:
-    from rest_framework.serializers import DictField
-except:
-    raise Exception("Failed check for django-rest-framework >= 3.3.3")
 
 urlpatterns = []
 
+
 def import_module_from_filename(dirname, fn):
-    print "importing", dirname, fn
+    print("importing", dirname, fn)
     sys_path_save = sys.path
     try:
         # __import__() and importlib.import_module() both import modules from
@@ -44,41 +38,53 @@
 
     return module
 
+
 def import_module_by_dotted_name(name):
-    print "import", name
+    print("import", name)
     try:
         module = __import__(name)
-    except:
+    except BaseException:
         # django will eat the exception, and then fail later with
         #  'conflicting models in application'
         # when it tries to import the module a second time.
-        print "exception in import_model_by_dotted_name"
+        print("exception in import_model_by_dotted_name")
         import traceback
+
         traceback.print_exc()
         raise
     for part in name.split(".")[1:]:
         module = getattr(module, part)
     return module
 
+
 def import_api_methods(dirname=None, api_path="api", api_module="api"):
     has_index_view = False
-    subdirs=[]
-    urlpatterns=[]
+    subdirs = []
+    urlpatterns = []
 
     if not dirname:
         dirname = os.path.dirname(os.path.abspath(__file__))
 
     view_urls = []
     for fn in os.listdir(dirname):
-        pathname = os.path.join(dirname,fn)
-        if os.path.isfile(pathname) and fn.endswith(".py") and (fn!="__init__.py") and (fn!="import_methods.py"):
-            #module = import_module_from_filename(dirname, fn)
+        pathname = os.path.join(dirname, fn)
+        if (
+            os.path.isfile(pathname)
+            and fn.endswith(".py")
+            and (fn != "__init__.py")
+            and (fn != "import_methods.py")
+        ):
+            # module = import_module_from_filename(dirname, fn)
             module = import_module_by_dotted_name(api_module + "." + fn[:-3])
             for classname in dir(module):
-#                print "  ",classname
+                #                print "  ",classname
                 c = getattr(module, classname, None)
 
-                if inspect.isclass(c) and issubclass(c, View) and (classname not in globals()):
+                if (
+                    inspect.isclass(c)
+                    and issubclass(c, View)
+                    and (classname not in globals())
+                ):
                     globals()[classname] = c
 
                     method_kind = getattr(c, "method_kind", None)
@@ -89,30 +95,59 @@
                         else:
                             method_name = api_path
                             has_index_view = True
-                        view_urls.append( (method_kind, method_name, classname, c) )
+                        view_urls.append((method_kind, method_name, classname, c))
 
         elif os.path.isdir(pathname):
-            urlpatterns.extend(import_api_methods(pathname, os.path.join(api_path, fn), api_module+"." + fn))
+            urlpatterns.extend(
+                import_api_methods(
+                    pathname, os.path.join(api_path, fn), api_module + "." + fn
+                )
+            )
             subdirs.append(fn)
 
     for view_url in view_urls:
         if view_url[0] == "list":
-           urlpatterns.append(url(r'^' + view_url[1] + '/$',  view_url[3].as_view(), name=view_url[1]+'list'))
+            urlpatterns.append(
+                url(
+                    r"^" + view_url[1] + "/$",
+                    view_url[3].as_view(),
+                    name=view_url[1] + "list",
+                )
+            )
         elif view_url[0] == "detail":
-           urlpatterns.append(url(r'^' + view_url[1] + '/(?P<pk>[a-zA-Z0-9\-]+)/$',  view_url[3].as_view(), name=view_url[1]+'detail'))
+            urlpatterns.append(
+                url(
+                    r"^" + view_url[1] + r"/(?P<pk>[a-zA-Z0-9\-]+)/$",
+                    view_url[3].as_view(),
+                    name=view_url[1] + "detail",
+                )
+            )
         elif view_url[0] == "viewset":
-           viewset = view_url[3]
-           urlpatterns.extend(viewset.get_urlpatterns(api_path="^"+api_path+"/"))
+            viewset = view_url[3]
+            urlpatterns.extend(viewset.get_urlpatterns(api_path="^" + api_path + "/"))
 
     # Only add an index_view if 1) the is not already an index view, and
     # 2) we have found some methods in this directory.
     if (not has_index_view) and (urlpatterns):
         # The browseable API uses the classname as the breadcrumb and page
         # title, so try to create index views with descriptive classnames
-        viewset = type("IndexOf"+api_path.split("/")[-1].title(), (XOSIndexViewSet,), {})
-        urlpatterns.append(url('^' + api_path + '/$', viewset.as_view({'get': 'list'}, view_urls=view_urls, subdirs=subdirs, api_path=api_path), name=api_path+"_index"))
+        viewset = type(
+            "IndexOf" + api_path.split("/")[-1].title(), (XOSIndexViewSet,), {}
+        )
+        urlpatterns.append(
+            url(
+                "^" + api_path + "/$",
+                viewset.as_view(
+                    {"get": "list"},
+                    view_urls=view_urls,
+                    subdirs=subdirs,
+                    api_path=api_path,
+                ),
+                name=api_path + "_index",
+            )
+        )
 
     return urlpatterns
 
-urlpatterns = import_api_methods()
 
+urlpatterns = import_api_methods()
diff --git a/xos/api/service/__init__.py b/xos/api/service/__init__.py
index 65fdf5b..b0fb0b2 100644
--- a/xos/api/service/__init__.py
+++ b/xos/api/service/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,6 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
-
diff --git a/xos/api/tenant/__init__.py b/xos/api/tenant/__init__.py
index 65fdf5b..b0fb0b2 100644
--- a/xos/api/tenant/__init__.py
+++ b/xos/api/tenant/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,6 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
-
diff --git a/xos/api/utility/__init__.py b/xos/api/utility/__init__.py
index 65fdf5b..b0fb0b2 100644
--- a/xos/api/utility/__init__.py
+++ b/xos/api/utility/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,6 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
-
diff --git a/xos/api/utility/toscaapi.py b/xos/api/utility/toscaapi.py
index 3c18595..7e66103 100644
--- a/xos/api/utility/toscaapi.py
+++ b/xos/api/utility/toscaapi.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,28 +12,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-import json
 import os
 import sys
 import traceback
-from django.http import HttpResponse
-from rest_framework.decorators import api_view
-from rest_framework.response import Response
-from rest_framework.reverse import reverse
-from rest_framework import serializers
-from rest_framework import generics
-from rest_framework import status
-from core.models import *
-from xos.apibase import XOSListCreateAPIView, XOSRetrieveUpdateDestroyAPIView, XOSPermissionDenied
-from api.xosapi_helpers import PlusModelSerializer, XOSViewSet, ReadOnlyField
 
 # The Tosca engine expects to be run from /opt/xos/tosca/ or equivalent. It
 # needs some sys.path fixing up.
 import inspect
+
 currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
 toscadir = os.path.join(currentdir, "../../tosca")
 
+
 class ToscaViewSet(XOSViewSet):
     base_name = "tosca"
     method_name = "tosca"
@@ -44,12 +33,11 @@
     def get_urlpatterns(self, api_path="^"):
         patterns = []
 
-        patterns.append( self.list_url("run/$", {"post": "post_run"}, "tosca_run") )
+        patterns.append(self.list_url("run/$", {"post": "post_run"}, "tosca_run"))
 
         return patterns
 
     def post_run(self, request):
-        result = []
 
         recipe = request.data.get("recipe", None)
 
@@ -57,19 +45,12 @@
         try:
             sys.path.append(toscadir)
             from tosca.engine import XOSTosca
+
             xt = XOSTosca(recipe, parent_dir=toscadir, log_to_console=False)
             xt.execute(request.user)
-        except:
-            return Response( {"error_text": traceback.format_exc()}, status=500 )
+        except BaseException:
+            return Response({"error_text": traceback.format_exc()}, status=500)
         finally:
             sys.path = sys_path_save
 
-
-        return Response( {"log_msgs": xt.log_msgs} )
-
-
-
-
-
-
-
+        return Response({"log_msgs": xt.log_msgs})
diff --git a/xos/api/xosapi_helpers.py b/xos/api/xosapi_helpers.py
index 626a495..b01d637 100644
--- a/xos/api/xosapi_helpers.py
+++ b/xos/api/xosapi_helpers.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,24 +12,21 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-from rest_framework import generics
-from rest_framework import serializers
+from rest_framework import serializers, viewsets
 from rest_framework.response import Response
-from rest_framework import status
-from xos.apibase import XOSRetrieveUpdateDestroyAPIView, XOSListCreateAPIView
-from rest_framework import viewsets
-from django.conf.urls import patterns, url
-from xos.exceptions import *
-from rest_framework.reverse import reverse
+from django.conf.urls import url
+from xos.exceptions import XOSPermissionDenied
 from django.core.urlresolvers import get_script_prefix, resolve, Resolver404
 
 # rest_framework 3.x
 ReadOnlyField = serializers.ReadOnlyField
 
-ICON_URLS = {"success": "/static/admin/img/icon_success.gif",
-            "clock": "/static/admin/img/icon_clock.gif",
-            "error": "/static/admin/img/icon_error.gif"}
+ICON_URLS = {
+    "success": "/static/admin/img/icon_success.gif",
+    "clock": "/static/admin/img/icon_clock.gif",
+    "error": "/static/admin/img/icon_error.gif",
+}
+
 
 class PlusObjectMixin:
     def getBackendIcon(self):
@@ -47,12 +43,14 @@
         else:
             return '<img src="%s">' % icon_url
 
+
 """ PlusSerializerMixin
 
     Implements Serializer fields that are common to all OpenCloud objects. For
     example, stuff related to backend fields.
 """
 
+
 class PlusModelSerializer(serializers.ModelSerializer):
     backendIcon = serializers.SerializerMethodField("getBackendIcon")
     backendHtml = serializers.SerializerMethodField("getBackendHtml")
@@ -73,20 +71,24 @@
         property_fields = getattr(self, "property_fields", [])
         create_fields = {}
         for k in validated_data:
-            if not k in property_fields:
+            if k not in property_fields:
                 create_fields[k] = validated_data[k]
         instance = self.Meta.model(**create_fields)
 
-        if instance and hasattr(instance,"can_update") and self.context.get('request',None):
-            user = self.context['request'].user
-            if user.__class__.__name__=="AnonymousUser":
+        if (
+            instance
+            and hasattr(instance, "can_update")
+            and self.context.get("request", None)
+        ):
+            user = self.context["request"].user
+            if user.__class__.__name__ == "AnonymousUser":
                 raise XOSPermissionDenied()
 
         for k in validated_data:
             if k in property_fields:
                 setattr(instance, k, validated_data[k])
 
-        instance.caller = self.context['request'].user
+        instance.caller = self.context["request"].user
         instance.save()
         return instance
 
@@ -95,18 +97,19 @@
         for k in validated_data.keys():
             v = validated_data[k]
             if k in nested_fields:
-                d = getattr(instance,k)
+                d = getattr(instance, k)
                 d.update(v)
-                setattr(instance,k,d)
+                setattr(instance, k, d)
             else:
                 setattr(instance, k, v)
-        instance.caller = self.context['request'].user
+        instance.caller = self.context["request"].user
         instance.save()
         return instance
 
+
 class XOSViewSet(viewsets.ModelViewSet):
-    api_path=""
-    read_only=False
+    api_path = ""
+    read_only = False
 
     @classmethod
     def get_api_method_path(self):
@@ -117,15 +120,19 @@
 
     @classmethod
     def detail_url(self, pattern, viewdict, name):
-        return url(self.get_api_method_path() + r'(?P<pk>[a-zA-Z0-9\-_]+)/' + pattern,
-                   self.as_view(viewdict),
-                   name=self.base_name+"_"+name)
+        return url(
+            self.get_api_method_path() + r"(?P<pk>[a-zA-Z0-9\-_]+)/" + pattern,
+            self.as_view(viewdict),
+            name=self.base_name + "_" + name,
+        )
 
     @classmethod
     def list_url(self, pattern, viewdict, name):
-        return url(self.get_api_method_path() + pattern,
-                   self.as_view(viewdict),
-                   name=self.base_name+"_"+name)
+        return url(
+            self.get_api_method_path() + pattern,
+            self.as_view(viewdict),
+            name=self.base_name + "_" + name,
+        )
 
     @classmethod
     def get_urlpatterns(self, api_path="^"):
@@ -134,16 +141,52 @@
         patterns = []
 
         if self.read_only:
-            patterns.append(url(self.get_api_method_path() + '$', self.as_view({'get': 'list'}), name=self.base_name+'_list'))
-            patterns.append(url(self.get_api_method_path() + '(?P<pk>[a-zA-Z0-9\-_]+)/$', self.as_view({'get': 'retrieve'}), name=self.base_name+'_detail'))
+            patterns.append(
+                url(
+                    self.get_api_method_path() + "$",
+                    self.as_view({"get": "list"}),
+                    name=self.base_name + "_list",
+                )
+            )
+            patterns.append(
+                url(
+                    self.get_api_method_path() + r"(?P<pk>[a-zA-Z0-9\-_]+)/$",
+                    self.as_view({"get": "retrieve"}),
+                    name=self.base_name + "_detail",
+                )
+            )
         else:
-            patterns.append(url(self.get_api_method_path() + '$', self.as_view({'get': 'list', 'post': 'create'}), name=self.base_name+'_list'))
-            patterns.append(url(self.get_api_method_path() + '(?P<pk>[a-zA-Z0-9\-_]+)/$', self.as_view({'get': 'retrieve', 'put': 'update', 'post': 'update', 'delete': 'destroy', 'patch': 'partial_update'}), name=self.base_name+'_detail'))
+            patterns.append(
+                url(
+                    self.get_api_method_path() + "$",
+                    self.as_view({"get": "list", "post": "create"}),
+                    name=self.base_name + "_list",
+                )
+            )
+            patterns.append(
+                url(
+                    self.get_api_method_path() + r"(?P<pk>[a-zA-Z0-9\-_]+)/$",
+                    self.as_view(
+                        {
+                            "get": "retrieve",
+                            "put": "update",
+                            "post": "update",
+                            "delete": "destroy",
+                            "patch": "partial_update",
+                        }
+                    ),
+                    name=self.base_name + "_detail",
+                )
+            )
 
         return patterns
 
     def get_serializer_class(self):
-        if hasattr(self, "custom_serializers") and hasattr(self, "action") and (self.action in self.custom_serializers):
+        if (
+            hasattr(self, "custom_serializers")
+            and hasattr(self, "action")
+            and (self.action in self.custom_serializers)
+        ):
             return self.custom_serializers[self.action]
         else:
             return super(XOSViewSet, self).get_serializer_class()
@@ -151,17 +194,22 @@
     def get_object(self):
         obj = super(XOSViewSet, self).get_object()
 
-        if self.action=="update" or self.action=="destroy" or self.action.startswith("set_"):
-            if obj and hasattr(obj,"can_update"):
+        if (
+            self.action == "update"
+            or self.action == "destroy"
+            or self.action.startswith("set_")
+        ):
+            if obj and hasattr(obj, "can_update"):
                 user = self.request.user
-                if user.__class__.__name__=="AnonymousUser":
+                if user.__class__.__name__ == "AnonymousUser":
                     raise XOSPermissionDenied()
 
         return obj
 
+
 class XOSIndexViewSet(viewsets.ViewSet):
-    view_urls=[]
-    subdirs=[]
+    view_urls = []
+    subdirs = []
     api_path = None
 
     def __init__(self, view_urls, subdirs, api_path):
@@ -174,12 +222,25 @@
         endpoints = {}
 
         # If it is the root, add core
-        if(self.api_path == "api"):
-            endpoints['core'] = "http://" + request.get_host() + get_script_prefix() + self.api_path + "/core"
+        if self.api_path == "api":
+            endpoints["core"] = (
+                "http://"
+                + request.get_host()
+                + get_script_prefix()
+                + self.api_path
+                + "/core"
+            )
 
         for view_url in self.view_urls:
             method_name = view_url[1].split("/")[-1]
-            method_url = "http://" + request.get_host() + get_script_prefix() + self.api_path + "/" + method_name
+            method_url = (
+                "http://"
+                + request.get_host()
+                + get_script_prefix()
+                + self.api_path
+                + "/"
+                + method_name
+            )
             endpoints[method_name] = method_url
 
         for subdir in self.subdirs:
@@ -195,4 +256,3 @@
             endpoints[method_name] = method_url
 
         return Response(endpoints)
-
diff --git a/xos/core/__init__.py b/xos/core/__init__.py
index dad322a..1d63f91 100644
--- a/xos/core/__init__.py
+++ b/xos/core/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/xos/core/middleware.py b/xos/core/middleware.py
index f8c6613..24dc13a 100644
--- a/xos/core/middleware.py
+++ b/xos/core/middleware.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,11 +17,15 @@
 
 _active = local()
 
+
 def get_request():
     if not hasattr(_active, "request"):
-        raise Exception("Please add 'core.middleware.GlobalRequestMiddleware' to <XOS_DIR>/xos.settings.py:MIDDLEWARE_CLASSES")
+        raise Exception(
+            "Please add 'core.middleware.GlobalRequestMiddleware' to <XOS_DIR>/xos.settings.py:MIDDLEWARE_CLASSES"
+        )
     return _active.request
 
+
 class GlobalRequestMiddleware(object):
     def process_view(self, request, view_func, view_args, view_kwargs):
         _active.request = request
diff --git a/xos/core/migrations/0002_initial_data.py b/xos/core/migrations/0002_initial_data.py
index 77758b1..22227f8 100644
--- a/xos/core/migrations/0002_initial_data.py
+++ b/xos/core/migrations/0002_initial_data.py
@@ -12,19 +12,23 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
 import os
 import yaml
 from django.db import models, migrations
 from django.contrib.auth.hashers import make_password
 
-FIXTURES = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/initial_data.yaml')
+FIXTURES = os.path.abspath(
+    os.path.dirname(os.path.realpath(__file__)) + "/initial_data.yaml"
+)
+
 
 def load_data_from_yaml():
-    file = open(FIXTURES, 'r').read()
+    file = open(FIXTURES, "r").read()
     try:
         data = yaml.load(file)
         return data
-    except Exception, e:
+    except Exception as e:
         raise Exception("Cannot load inital data file: %s" % e.message)
 
 
@@ -36,38 +40,34 @@
     for entry in data:
 
         # retrieve the class for that model
-        [app, model_name] = entry['model'].split('.')
+        [app, model_name] = entry["model"].split(".")
         model_class = apps.get_model(app, model_name)
 
         # create a new instance for that model
-        i = model_class(**entry['fields'])
+        i = model_class(**entry["fields"])
 
         # if model is user hash the password
         if model_name == "User":
-            i.password = make_password(entry['fields']['password'])
+            i.password = make_password(entry["fields"]["password"])
 
         # check relations
-        if 'relations' in entry:
-            for (r_name, r) in entry['relations'].items():
+        if "relations" in entry:
+            for (r_name, r) in entry["relations"].items():
                 # retrieve the related model
-                [r_app, r_model_name] = r['model'].split('.')
+                [r_app, r_model_name] = r["model"].split(".")
                 related_model_class = apps.get_model(r_app, r_model_name)
-                r_model = related_model_class.objects.get(**r['fields'])
+                r_model = related_model_class.objects.get(**r["fields"])
 
                 # assign relation
                 setattr(i, r_name, r_model)
 
         # save the instance
         i.save()
-        print "Created %s: %s" % (model_name, entry['fields'])
+        print("Created %s: %s" % (model_name, entry["fields"]))
 
 
 class Migration(migrations.Migration):
 
-    dependencies = [
-        ('core', '0001_initial'),
-    ]
+    dependencies = [("core", "0001_initial")]
 
-    operations = [
-        migrations.RunPython(persist_data)
-    ]
+    operations = [migrations.RunPython(persist_data)]
diff --git a/xos/core/models/addresspool.py b/xos/core/models/addresspool.py
index dd8df6b..d89820b 100644
--- a/xos/core/models/addresspool.py
+++ b/xos/core/models/addresspool.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,6 +18,7 @@
 from xos.exceptions import *
 from addresspool_decl import *
 
+
 class AddressPool(AddressPool_decl):
     class Meta:
         proxy = True
@@ -30,7 +30,7 @@
 
         dest = []
 
-        netmask = (~(pow(2, 32 - bits) - 1) & 0xFFFFFFFF)
+        netmask = ~(pow(2, 32 - bits) - 1) & 0xFFFFFFFF
 
         count = pow(2, 32 - bits)
         for i in range(2, count - 1):
@@ -75,7 +75,7 @@
                     # were still in use.
                     continue
 
-                inuse_ips.insert(0,addr)
+                inuse_ips.insert(0, addr)
 
                 ap.inuse = " ".join(inuse_ips)
                 ap.addresses = " ".join(avail_ips)
@@ -91,7 +91,7 @@
             addresses = ap.addresses or ""
             parts = addresses.split()
             if addr not in parts:
-                parts.insert(0,addr)
+                parts.insert(0, addr)
                 ap.addresses = " ".join(parts)
 
             inuse = ap.inuse or ""
@@ -101,5 +101,3 @@
                 ap.inuse = " ".join(parts)
 
             ap.save()
-
-
diff --git a/xos/core/models/computeserviceinstance.py b/xos/core/models/computeserviceinstance.py
index 8e265a4..319dc81 100644
--- a/xos/core/models/computeserviceinstance.py
+++ b/xos/core/models/computeserviceinstance.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from computeserviceinstance_decl import *
 
+
 class ComputeServiceInstance(ComputeServiceInstance_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/contenttype.py b/xos/core/models/contenttype.py
index 4cb4d8b..23499a2 100644
--- a/xos/core/models/contenttype.py
+++ b/xos/core/models/contenttype.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/xos/core/models/controller.py b/xos/core/models/controller.py
index e0d20e7..90af736 100644
--- a/xos/core/models/controller.py
+++ b/xos/core/models/controller.py
@@ -15,6 +15,7 @@
 from xos.exceptions import *
 from controller_decl import *
 
+
 class Controller(Controller_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/controllerimages.py b/xos/core/models/controllerimages.py
index fdec3d4..4ae0ce8 100644
--- a/xos/core/models/controllerimages.py
+++ b/xos/core/models/controllerimages.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from controllerimages_decl import *
 
+
 class ControllerImages(ControllerImages_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/controllernetwork.py b/xos/core/models/controllernetwork.py
index 6259bdb..ac40575 100644
--- a/xos/core/models/controllernetwork.py
+++ b/xos/core/models/controllernetwork.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,15 +15,16 @@
 from xos.exceptions import *
 from controllernetwork_decl import *
 
+
 class ControllerNetwork(ControllerNetwork_decl):
     class Meta:
         proxy = True
 
     def tologdict(self):
-        d=super(ControllerNetwork,self).tologdict()
+        d = super(ControllerNetwork, self).tologdict()
         try:
-            d['network_name']=self.network.name
-            d['controller_name']=self.controller.name
-        except:
+            d["network_name"] = self.network.name
+            d["controller_name"] = self.controller.name
+        except BaseException:
             pass
         return d
diff --git a/xos/core/models/controllerrole.py b/xos/core/models/controllerrole.py
index 91746a1..f01d958 100644
--- a/xos/core/models/controllerrole.py
+++ b/xos/core/models/controllerrole.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from controllerrole_decl import *
 
+
 class ControllerRole(ControllerRole_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/controllersite.py b/xos/core/models/controllersite.py
index 5186c05..178c014 100644
--- a/xos/core/models/controllersite.py
+++ b/xos/core/models/controllersite.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from controllersite_decl import *
 
+
 class ControllerSite(ControllerSite_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/controllersiteprivilege.py b/xos/core/models/controllersiteprivilege.py
index 915aa37..e4283d5 100644
--- a/xos/core/models/controllersiteprivilege.py
+++ b/xos/core/models/controllersiteprivilege.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from controllersiteprivilege_decl import *
 
+
 class ControllerSitePrivilege(ControllerSitePrivilege_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/controllerslice.py b/xos/core/models/controllerslice.py
index 8b468af..0e48dc9 100644
--- a/xos/core/models/controllerslice.py
+++ b/xos/core/models/controllerslice.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,15 +15,16 @@
 from xos.exceptions import *
 from controllerslice_decl import *
 
+
 class ControllerSlice(ControllerSlice_decl):
     class Meta:
         proxy = True
 
     def tologdict(self):
-        d=super(ControllerSlice,self).tologdict()
+        d = super(ControllerSlice, self).tologdict()
         try:
-            d['slice_name']=self.slice.name
-            d['controller_name']=self.controller.name
-        except:
+            d["slice_name"] = self.slice.name
+            d["controller_name"] = self.controller.name
+        except BaseException:
             pass
         return d
diff --git a/xos/core/models/controllersliceprivilege.py b/xos/core/models/controllersliceprivilege.py
index e9963ff..9fa9ca5 100644
--- a/xos/core/models/controllersliceprivilege.py
+++ b/xos/core/models/controllersliceprivilege.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from controllersliceprivilege_decl import *
 
+
 class ControllerSlicePrivilege(ControllerSlicePrivilege_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/controlleruser.py b/xos/core/models/controlleruser.py
index cdd152d..c64cd26 100644
--- a/xos/core/models/controlleruser.py
+++ b/xos/core/models/controlleruser.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from controlleruser_decl import *
 
+
 class ControllerUser(ControllerUser_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/deployment.py b/xos/core/models/deployment.py
index 0098891..84b2ba6 100644
--- a/xos/core/models/deployment.py
+++ b/xos/core/models/deployment.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from deployment_decl import *
 
+
 class Deployment(Deployment_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/flavor.py b/xos/core/models/flavor.py
index e2156b2..c7ad57d 100644
--- a/xos/core/models/flavor.py
+++ b/xos/core/models/flavor.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from flavor_decl import *
 
+
 class Flavor(Flavor_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/image.py b/xos/core/models/image.py
index 7aa139a..95367fe 100644
--- a/xos/core/models/image.py
+++ b/xos/core/models/image.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from image_decl import *
 
+
 class Image(Image_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/imagedeployments.py b/xos/core/models/imagedeployments.py
index 3db6a47..5c62569 100644
--- a/xos/core/models/imagedeployments.py
+++ b/xos/core/models/imagedeployments.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from imagedeployments_decl import *
 
+
 class ImageDeployments(ImageDeployments_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/instance.py b/xos/core/models/instance.py
index df33590..a742d94 100644
--- a/xos/core/models/instance.py
+++ b/xos/core/models/instance.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,23 +15,24 @@
 from xos.exceptions import *
 from instance_decl import *
 
+
 class Instance(Instance_decl):
     class Meta:
         proxy = True
 
     def tologdict(self):
-        d=super(Instance,self).tologdict()
+        d = super(Instance, self).tologdict()
         try:
-            d['slice_name']=self.slice.name
-            d['controller_name']=self.get_controller().name
-        except:
+            d["slice_name"] = self.slice.name
+            d["controller_name"] = self.get_controller().name
+        except BaseException:
             pass
         return d
 
     def save(self, *args, **kwargs):
         if not self.name:
             self.name = self.slice.name
-        if not self.creator and hasattr(self, 'caller'):
+        if not self.creator and hasattr(self, "caller"):
             self.creator = self.caller
 
         super(Instance, self).save(*args, **kwargs)
diff --git a/xos/core/models/interfacetype.py b/xos/core/models/interfacetype.py
index 264a981..a88afe5 100644
--- a/xos/core/models/interfacetype.py
+++ b/xos/core/models/interfacetype.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from interfacetype_decl import *
 
+
 class InterfaceType(InterfaceType_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/network.py b/xos/core/models/network.py
index 466b30f..c19e558 100644
--- a/xos/core/models/network.py
+++ b/xos/core/models/network.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from network_decl import *
 
+
 class Network(Network_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/networkparameter.py b/xos/core/models/networkparameter.py
index b8d4b8c..7ecc297 100644
--- a/xos/core/models/networkparameter.py
+++ b/xos/core/models/networkparameter.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from networkparameter_decl import *
 
+
 class NetworkParameter(NetworkParameter_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/networkparametertype.py b/xos/core/models/networkparametertype.py
index f33aa28..5895711 100644
--- a/xos/core/models/networkparametertype.py
+++ b/xos/core/models/networkparametertype.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from networkparametertype_decl import *
 
+
 class NetworkParameterType(NetworkParameterType_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/networkslice.py b/xos/core/models/networkslice.py
index 9a1b119..1f2060f 100644
--- a/xos/core/models/networkslice.py
+++ b/xos/core/models/networkslice.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from networkslice_decl import *
 
+
 class NetworkSlice(NetworkSlice_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/networktemplate.py b/xos/core/models/networktemplate.py
index ee8f5bd..5104c5f 100644
--- a/xos/core/models/networktemplate.py
+++ b/xos/core/models/networktemplate.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from networktemplate_decl import *
 
+
 class NetworkTemplate(NetworkTemplate_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/node.py b/xos/core/models/node.py
index aea3bac..c741491 100644
--- a/xos/core/models/node.py
+++ b/xos/core/models/node.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from node_decl import *
 
+
 class Node(Node_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/nodelabel.py b/xos/core/models/nodelabel.py
index 68e5f5c..d591447 100644
--- a/xos/core/models/nodelabel.py
+++ b/xos/core/models/nodelabel.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from nodelabel_decl import *
 
+
 class NodeLabel(NodeLabel_decl):
     class Meta:
         proxy = True
@@ -25,11 +25,11 @@
             until the ORM is extended with support for ManyToMany relations.
         """
 
-        if self.name and '###' in self.name:
+        if self.name and "###" in self.name:
             from core.models import Node
 
-            self.name, node_id_str = self.name.split('###')
-            node_ids = map(int, node_id_str.split(','))
+            self.name, node_id_str = self.name.split("###")
+            node_ids = map(int, node_id_str.split(","))
 
             for node_id in node_ids:
                 node = Node.get(node_id)
diff --git a/xos/core/models/port.py b/xos/core/models/port.py
index 9495498..5db5db1 100644
--- a/xos/core/models/port.py
+++ b/xos/core/models/port.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,20 +15,25 @@
 from xos.exceptions import *
 from port_decl import *
 
+
 class Port(Port_decl):
     class Meta:
         proxy = True
 
     def save(self, *args, **kwargs):
         if self.instance:
-            if (self.instance.slice not in self.network.permitted_slices.all()) and \
-                (self.instance.slice != self.network.owner) and \
-                (not self.network.permit_all_slices):
+            if (
+                (self.instance.slice not in self.network.permitted_slices.all())
+                and (self.instance.slice != self.network.owner)
+                and (not self.network.permit_all_slices)
+            ):
                 raise XOSValidationError("Slice is not allowed to connect to network")
 
         if self.instance and self.service_instance:
-            raise XOSValidationError("Only one of (instance, service_instance) may be set,"
-                                      "port=%s, network=%s, instance=%s, service_instance=%s" %
-                                     (self, self.network, self.instance, self.service_instance))
+            raise XOSValidationError(
+                "Only one of (instance, service_instance) may be set,"
+                "port=%s, network=%s, instance=%s, service_instance=%s"
+                % (self, self.network, self.instance, self.service_instance)
+            )
 
         super(Port, self).save(*args, **kwargs)
diff --git a/xos/core/models/principal.py b/xos/core/models/principal.py
index 65b9786..0e17a01 100644
--- a/xos/core/models/principal.py
+++ b/xos/core/models/principal.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from principal_decl import *
 
+
 class Principal(Principal_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/privilege.py b/xos/core/models/privilege.py
index 1485ec8..5bb0cc6 100644
--- a/xos/core/models/privilege.py
+++ b/xos/core/models/privilege.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from privilege_decl import *
 
+
 class Privilege(Privilege_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/role.py b/xos/core/models/role.py
index 447df8e..800060e 100644
--- a/xos/core/models/role.py
+++ b/xos/core/models/role.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from role_decl import *
 
+
 class Role(Role_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/service.py b/xos/core/models/service.py
index 75be0f4..0ecf0f3 100644
--- a/xos/core/models/service.py
+++ b/xos/core/models/service.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from service_decl import *
 
+
 class Service(Service_decl):
     class Meta:
         proxy = True
@@ -23,12 +23,14 @@
     KIND = "generic"
 
     def get_composable_networks(self):
-        SUPPORTED_VTN_SERVCOMP_KINDS = ['VSG','PRIVATE']
+        SUPPORTED_VTN_SERVCOMP_KINDS = ["VSG", "PRIVATE"]
 
         nets = []
         for slice in self.slices.all():
             for net in slice.networks.all():
-                if (net.template.vtn_kind not in SUPPORTED_VTN_SERVCOMP_KINDS) or (net.owner != slice):
+                if (net.template.vtn_kind not in SUPPORTED_VTN_SERVCOMP_KINDS) or (
+                    net.owner != slice
+                ):
                     continue
 
                 if not net.controllernetworks.exists():
@@ -51,6 +53,3 @@
         for dep in service_deps:
             svcs.append(dep.subscriber_service)
         return svcs
-
-
-
diff --git a/xos/core/models/serviceattribute.py b/xos/core/models/serviceattribute.py
index 19b4550..6f7bdc8 100644
--- a/xos/core/models/serviceattribute.py
+++ b/xos/core/models/serviceattribute.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from serviceattribute_decl import *
 
+
 class ServiceAttribute(ServiceAttribute_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/servicedependency.py b/xos/core/models/servicedependency.py
index ae661b3..3986f2f 100644
--- a/xos/core/models/servicedependency.py
+++ b/xos/core/models/servicedependency.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from servicedependency_decl import *
 
+
 class ServiceDependency(ServiceDependency_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/servicegraphconstraint.py b/xos/core/models/servicegraphconstraint.py
index cc703a8..82c9959 100644
--- a/xos/core/models/servicegraphconstraint.py
+++ b/xos/core/models/servicegraphconstraint.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from servicegraphconstraint_decl import *
 
+
 class ServiceGraphConstraint(ServiceGraphConstraint_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/serviceinstance.py b/xos/core/models/serviceinstance.py
index 577b3d9..10abd3f 100644
--- a/xos/core/models/serviceinstance.py
+++ b/xos/core/models/serviceinstance.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,6 +16,7 @@
 from xos.exceptions import XOSValidationError, XOSMissingField, XOSDuplicateKey
 from serviceinstance_decl import *
 
+
 class ServiceInstance(ServiceInstance_decl):
     class Meta:
         proxy = True
@@ -27,23 +27,30 @@
     # TODO: Used by CordSubscriberRoot. Verify whether the usage is necessary.
     def validate_unique_service_specific_id(self, none_okay=False):
         if not none_okay and (self.service_specific_id is None):
-            raise XOSMissingField("subscriber_specific_id is None, and it's a required field", fields={
-                                  "service_specific_id": "cannot be none"})
+            raise XOSMissingField(
+                "subscriber_specific_id is None, and it's a required field",
+                fields={"service_specific_id": "cannot be none"},
+            )
 
         if self.service_specific_id:
             conflicts = self.__class__.objects.filter(
-                service_specific_id=self.service_specific_id)
+                service_specific_id=self.service_specific_id
+            )
             if self.pk:
                 conflicts = conflicts.exclude(pk=self.pk)
             if conflicts:
-                raise XOSDuplicateKey("service_specific_id %s already exists" % self.service_specific_id, fields={
-                                      "service_specific_id": "duplicate key"})
+                raise XOSDuplicateKey(
+                    "service_specific_id %s already exists" % self.service_specific_id,
+                    fields={"service_specific_id": "duplicate key"},
+                )
 
     def set_owner(self):
         if hasattr(self, "OWNER_CLASS_NAME"):
             owner_class = self.get_model_class_by_name(self.OWNER_CLASS_NAME)
             if not owner_class:
-                raise XOSValidationError("Cannot find owner class %s" % self.OWNER_CLASS_NAME)
+                raise XOSValidationError(
+                    "Cannot find owner class %s" % self.OWNER_CLASS_NAME
+                )
 
             need_set_owner = True
             if self.owner_id:
@@ -56,7 +63,9 @@
             if need_set_owner:
                 owners = owner_class.objects.all()
                 if not owners:
-                    raise XOSValidationError("Cannot find eligible owner of class %s" % self.OWNER_CLASS_NAME)
+                    raise XOSValidationError(
+                        "Cannot find eligible owner of class %s" % self.OWNER_CLASS_NAME
+                    )
 
                 self.owner = owners[0]
         else:
@@ -66,9 +75,14 @@
 
             # TODO: Delete this after all services have been migrated away from using field defaults
 
-            if (not self.owner_id) and (self._meta.get_field("owner").default) and \
-                    (self._meta.get_field("owner").default!=NOT_PROVIDED):
-                self.owner = Service.objects.get(id = self._meta.get_field("owner").default)
+            if (
+                (not self.owner_id)
+                and (self._meta.get_field("owner").default)
+                and (self._meta.get_field("owner").default != NOT_PROVIDED)
+            ):
+                self.owner = Service.objects.get(
+                    id=self._meta.get_field("owner").default
+                )
 
     def save(self, *args, **kwargs):
         # NOTE(CORD-3128): Only set the owner if not in deleted state.
@@ -78,9 +92,12 @@
         # If the model has a Creator and it's not specified, then attempt to default to the Caller. Caller is
         # automatically filled in my the API layer. This code was typically used by ServiceInstances that lead to
         # instance creation.
-        if (hasattr(self, "creator")) and (not self.creator) and (hasattr(self, "caller")) and (self.caller):
+        if (
+            (hasattr(self, "creator"))
+            and (not self.creator)
+            and (hasattr(self, "caller"))
+            and (self.caller)
+        ):
             self.creator = self.caller
 
         super(ServiceInstance, self).save(*args, **kwargs)
-
-
diff --git a/xos/core/models/serviceinstanceattribute.py b/xos/core/models/serviceinstanceattribute.py
index 8dcaa81..1b6c1c5 100644
--- a/xos/core/models/serviceinstanceattribute.py
+++ b/xos/core/models/serviceinstanceattribute.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from serviceinstanceattribute_decl import *
 
+
 class ServiceInstanceAttribute(ServiceInstanceAttribute_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/serviceinstancelink.py b/xos/core/models/serviceinstancelink.py
index c2ce0c1..59cb8b6 100644
--- a/xos/core/models/serviceinstancelink.py
+++ b/xos/core/models/serviceinstancelink.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,36 +15,54 @@
 from xos.exceptions import *
 from serviceinstancelink_decl import *
 
+
 class ServiceInstanceLink(ServiceInstanceLink_decl):
     class Meta:
         proxy = True
 
     def save(self, *args, **kwargs):
-        subCount = sum([1 for e in [self.subscriber_service, self.subscriber_service_instance, self.subscriber_network] if e is not None])
-        if (subCount > 1):
+        subCount = sum(
+            [
+                1
+                for e in [
+                    self.subscriber_service,
+                    self.subscriber_service_instance,
+                    self.subscriber_network,
+                ]
+                if e is not None
+            ]
+        )
+        if subCount > 1:
             raise XOSConflictingField(
-                "Only one of subscriber_service, subscriber_service_instance, subscriber_network should be set")
+                "Only one of subscriber_service, subscriber_service_instance, subscriber_network should be set"
+            )
 
         try:
             existing_instance = ServiceInstanceLink.objects.get(
                 provider_service_instance=self.provider_service_instance,
                 subscriber_service_instance=self.subscriber_service_instance,
                 subscriber_service=self.subscriber_service,
-                subscriber_network=self.subscriber_network
+                subscriber_network=self.subscriber_network,
             )
 
-            if (not self.pk and existing_instance) or (self.pk and self.pk != existing_instance.pk):
+            if (not self.pk and existing_instance) or (
+                self.pk and self.pk != existing_instance.pk
+            ):
                 raise XOSValidationError(
                     "A ServiceInstanceLink with attributes 'provider_service_instance=%s, subscriber_service_instance=%s, subscriber_service=%s, subscriber_network=%s' already exists"
-                    % (self.provider_service_instance, self.subscriber_service_instance, self.subscriber_service,
-                       self.subscriber_network))
+                    % (
+                        self.provider_service_instance,
+                        self.subscriber_service_instance,
+                        self.subscriber_service,
+                        self.subscriber_network,
+                    )
+                )
         except self.DoesNotExist:
             # NOTE this is correct, no duplicated links
             pass
 
         super(ServiceInstanceLink, self).save(*args, **kwargs)
 
-
     def delete(self, *args, **kwargs):
         provider_service_instance = self.provider_service_instance
         super(ServiceInstanceLink, self).delete(*args, **kwargs)
@@ -54,4 +71,7 @@
         # model policy for core objects, so handle it during the save method.
         if provider_service_instance and (not provider_service_instance.deleted):
             provider_service_instance.link_deleted_count += 1
-            provider_service_instance.save(always_update_timestamp=True, update_fields=["updated", "link_deleted_count"])
+            provider_service_instance.save(
+                always_update_timestamp=True,
+                update_fields=["updated", "link_deleted_count"],
+            )
diff --git a/xos/core/models/serviceinterface.py b/xos/core/models/serviceinterface.py
index c6a7c5d..11d8fa9 100644
--- a/xos/core/models/serviceinterface.py
+++ b/xos/core/models/serviceinterface.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from serviceinterface_decl import *
 
+
 class ServiceInterface(ServiceInterface_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/servicemonitoringagentinfo.py b/xos/core/models/servicemonitoringagentinfo.py
index 7b7befc..28ea64b 100644
--- a/xos/core/models/servicemonitoringagentinfo.py
+++ b/xos/core/models/servicemonitoringagentinfo.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from servicemonitoringagentinfo_decl import *
 
+
 class ServiceMonitoringAgentInfo(ServiceMonitoringAgentInfo_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/serviceport.py b/xos/core/models/serviceport.py
index 21b78bc..00b6fac 100644
--- a/xos/core/models/serviceport.py
+++ b/xos/core/models/serviceport.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from serviceport_decl import *
 
+
 class ServicePort(ServicePort_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/site.py b/xos/core/models/site.py
index 1fabf93..a5499a3 100644
--- a/xos/core/models/site.py
+++ b/xos/core/models/site.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from site_decl import *
 
+
 class Site(Site_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/sitedeployment.py b/xos/core/models/sitedeployment.py
index 9c79264..dbac66d 100644
--- a/xos/core/models/sitedeployment.py
+++ b/xos/core/models/sitedeployment.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from sitedeployment_decl import *
 
+
 class SiteDeployment(SiteDeployment_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/siteprivilege.py b/xos/core/models/siteprivilege.py
index 4c13592..ab450b9 100644
--- a/xos/core/models/siteprivilege.py
+++ b/xos/core/models/siteprivilege.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from siteprivilege_decl import *
 
+
 class SitePrivilege(SitePrivilege_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/siterole.py b/xos/core/models/siterole.py
index 0c89ef3..e38c0a4 100644
--- a/xos/core/models/siterole.py
+++ b/xos/core/models/siterole.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from siterole_decl import *
 
+
 class SiteRole(SiteRole_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/slice.py b/xos/core/models/slice.py
index 975ec5a..aff0de3 100644
--- a/xos/core/models/slice.py
+++ b/xos/core/models/slice.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,33 +15,44 @@
 from xos.exceptions import *
 from slice_decl import *
 
+
 class Slice(Slice_decl):
     class Meta:
         proxy = True
 
-    NETWORK_CHOICES = ((None, 'Default'), ('host', 'Host'), ('bridged', 'Bridged'), ('noauto', 'No Automatic Networks'))
+    NETWORK_CHOICES = (
+        (None, "Default"),
+        ("host", "Host"),
+        ("bridged", "Bridged"),
+        ("noauto", "No Automatic Networks"),
+    )
 
     def save(self, *args, **kwargs):
         # set creator on first save
-        if not self.creator and hasattr(self, 'caller'):
+        if not self.creator and hasattr(self, "caller"):
             self.creator = self.caller
 
         # TODO: Verify this logic is still in use
         # only admins change a slice's creator
-        if 'creator' in self.changed_fields and \
-            (not hasattr(self, 'caller') or not self.caller.is_admin):
+        if "creator" in self.changed_fields and (
+            not hasattr(self, "caller") or not self.caller.is_admin
+        ):
 
-            if (self._initial["creator"]==None) and (self.creator==getattr(self,"caller",None)):
+            if (self._initial["creator"] is None) and (
+                self.creator == getattr(self, "caller", None)
+            ):
                 # it's okay if the creator is being set by the caller to
                 # himeself on a new slice object.
                 pass
             else:
-                raise PermissionDenied("Insufficient privileges to change slice creator",
-                                       {'creator': "Insufficient privileges to change slice creator"})
+                raise PermissionDenied(
+                    "Insufficient privileges to change slice creator",
+                    {"creator": "Insufficient privileges to change slice creator"},
+                )
 
-        if self.network=="Private Only":
+        if self.network == "Private Only":
             # "Private Only" was the default from the old Tenant View
-            self.network=None
+            self.network = None
         self.enforce_choices(self.network, self.NETWORK_CHOICES)
 
-        super(Slice, self).save(*args,  **kwargs)
+        super(Slice, self).save(*args, **kwargs)
diff --git a/xos/core/models/sliceprivilege.py b/xos/core/models/sliceprivilege.py
index 53eab21..26832f7 100644
--- a/xos/core/models/sliceprivilege.py
+++ b/xos/core/models/sliceprivilege.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from sliceprivilege_decl import *
 
+
 class SlicePrivilege(SlicePrivilege_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/slicerole.py b/xos/core/models/slicerole.py
index 373fb8c..957c2e1 100644
--- a/xos/core/models/slicerole.py
+++ b/xos/core/models/slicerole.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from slicerole_decl import *
 
+
 class SliceRole(SliceRole_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/tag.py b/xos/core/models/tag.py
index ae3a8f1..41f3f7f 100644
--- a/xos/core/models/tag.py
+++ b/xos/core/models/tag.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from tag_decl import *
 
+
 class Tag(Tag_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/tenantwithcontainer.py b/xos/core/models/tenantwithcontainer.py
index 63abcb2..d645a63 100644
--- a/xos/core/models/tenantwithcontainer.py
+++ b/xos/core/models/tenantwithcontainer.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from tenantwithcontainer_decl import *
 
+
 class TenantWithContainer(TenantWithContainer_decl):
     class Meta:
         proxy = True
@@ -28,7 +28,7 @@
 
     # vSG service relies on instance_id attribute
     def get_attribute(self, name, default=None):
-        if name=="instance_id":
+        if name == "instance_id":
             if self.instance:
                 return self.instance.id
             else:
@@ -48,6 +48,7 @@
     @property
     def image(self):
         from core.models import Image
+
         # Implement the logic here to pick the image that should be used when
         # instantiating the VM that will hold the container.
 
@@ -67,4 +68,3 @@
             self.creator = self.caller
 
         super(TenantWithContainer, self).save(*args, **kwargs)
-
diff --git a/xos/core/models/trustdomain.py b/xos/core/models/trustdomain.py
index 23a1884..43da6f8 100644
--- a/xos/core/models/trustdomain.py
+++ b/xos/core/models/trustdomain.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from trustdomain_decl import *
 
+
 class TrustDomain(TrustDomain_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/user.py b/xos/core/models/user.py
index 7447b49..a829619 100644
--- a/xos/core/models/user.py
+++ b/xos/core/models/user.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -44,20 +43,19 @@
 
 
 class UserManager(BaseUserManager):
-
     def create_user(self, email, firstname, lastname, password=None):
         """
         Creates and saves a User with the given email, date of
         birth and password.
         """
         if not email:
-            raise ValueError('Users must have an email address')
+            raise ValueError("Users must have an email address")
 
         user = self.model(
             email=UserManager.normalize_email(email),
             firstname=firstname,
             lastname=lastname,
-            password=password
+            password=password,
         )
         # user.set_password(password)
         user.is_admin = True
@@ -69,11 +67,9 @@
         Creates and saves a superuser with the given email, date of
         birth and password.
         """
-        user = self.create_user(email,
-                                password=password,
-                                firstname=firstname,
-                                lastname=lastname
-                                )
+        user = self.create_user(
+            email, password=password, firstname=firstname, lastname=lastname
+        )
         user.is_admin = True
         user.save(using=self._db)
         return user
@@ -91,7 +87,6 @@
 
 
 class DeletedUserManager(UserManager):
-
     def get_queryset(self):
         parent = super(UserManager, self)
         if hasattr(parent, "get_queryset"):
@@ -115,25 +110,24 @@
         app_label = "core"
 
     email = models.EmailField(
-        verbose_name='email address',
-        max_length=255,
-        unique=True,
-        db_index=True,
+        verbose_name="email address", max_length=255, unique=True, db_index=True
     )
 
     username = StrippedCharField(max_length=255, default="Something")
 
-    firstname = StrippedCharField(
-        help_text="person's given name", max_length=200)
+    firstname = StrippedCharField(help_text="person's given name", max_length=200)
     lastname = StrippedCharField(help_text="person's surname", max_length=200)
 
-    phone = StrippedCharField(null=True, blank=True,
-                              help_text="phone number contact", max_length=100)
+    phone = StrippedCharField(
+        null=True, blank=True, help_text="phone number contact", max_length=100
+    )
     user_url = models.URLField(null=True, blank=True)
-    site = models.ForeignKey('Site', related_name='users',
-                             help_text="Site this user will be homed too")
+    site = models.ForeignKey(
+        "Site", related_name="users", help_text="Site this user will be homed too"
+    )
     public_key = models.TextField(
-        null=True, blank=True, max_length=1024, help_text="Public key string")
+        null=True, blank=True, max_length=1024, help_text="Public key string"
+    )
 
     is_active = models.BooleanField(default=True)
     is_admin = models.BooleanField(default=False)
@@ -143,54 +137,102 @@
     is_appuser = models.BooleanField(default=False)
 
     login_page = StrippedCharField(
-        help_text="send this user to a specific page on login", max_length=200, null=True, blank=True)
+        help_text="send this user to a specific page on login",
+        max_length=200,
+        null=True,
+        blank=True,
+    )
 
-    created = models.DateTimeField(help_text="Time this model was created", auto_now_add=True, null=False, blank=False)
-    updated = models.DateTimeField(help_text="Time this model was changed by a non-synchronizer", default=now, null=False,
-                            blank=False)
-    enacted = models.DateTimeField(default=None, help_text="When synced, set to the timestamp of the data that was synced",
-                            null=True, blank=True)
-    policed = models.DateTimeField(default=None, help_text="When policed, set to the timestamp of the data that was policed",
-                            null=True, blank=True)
-    backend_status = StrippedCharField(max_length=1024,
-                                       default="Provisioning in progress")
-    backend_code = models.IntegerField( default = 0, null = False )
+    created = models.DateTimeField(
+        help_text="Time this model was created",
+        auto_now_add=True,
+        null=False,
+        blank=False,
+    )
+    updated = models.DateTimeField(
+        help_text="Time this model was changed by a non-synchronizer",
+        default=now,
+        null=False,
+        blank=False,
+    )
+    enacted = models.DateTimeField(
+        default=None,
+        help_text="When synced, set to the timestamp of the data that was synced",
+        null=True,
+        blank=True,
+    )
+    policed = models.DateTimeField(
+        default=None,
+        help_text="When policed, set to the timestamp of the data that was policed",
+        null=True,
+        blank=True,
+    )
+    backend_status = StrippedCharField(
+        max_length=1024, default="Provisioning in progress"
+    )
+    backend_code = models.IntegerField(default=0, null=False)
     backend_need_delete = models.BooleanField(default=False)
     backend_need_reap = models.BooleanField(default=False)
     deleted = models.BooleanField(default=False)
     write_protect = models.BooleanField(default=False)
     lazy_blocked = models.BooleanField(default=False)
-    no_sync = models.BooleanField(default=False)     # prevent object sync
-    no_policy = models.BooleanField(default=False)   # prevent model_policy run
+    no_sync = models.BooleanField(default=False)  # prevent object sync
+    no_policy = models.BooleanField(default=False)  # prevent model_policy run
 
-    timezone = models.CharField(max_length=100, null=True, blank=True, default=settings.TIME_ZONE)
+    timezone = models.CharField(
+        max_length=100, null=True, blank=True, default=settings.TIME_ZONE
+    )
 
-    leaf_model_name = models.CharField( help_text = "The most specialized model in this chain of inheritance, often defined by a service developer", max_length = 1024, null = False )
+    leaf_model_name = models.CharField(
+        help_text="The most specialized model in this chain of inheritance, often defined by a service developer",
+        max_length=1024,
+        null=False,
+    )
 
-    policy_status = models.CharField( default = "0 - Policy in process", max_length = 1024, null = True )
-    policy_code = models.IntegerField( default = 0, null = True )
+    policy_status = models.CharField(
+        default="0 - Policy in process", max_length=1024, null=True
+    )
+    policy_code = models.IntegerField(default=0, null=True)
 
     backend_need_delete_policy = models.BooleanField(
-        help_text="True if delete model_policy must be run before object can be reaped", default=False, null=False,
-        blank=True)
-    xos_managed = models.BooleanField(help_text="True if xos is responsible for creating/deleting this object", default=True,
-                               null=False, blank=True)
-    backend_handle = models.CharField(help_text="Handle used by the backend to track this object", max_length=1024, null=True,
-                               blank=True)
-    changed_by_step = models.DateTimeField(default=None, help_text="Time this model was changed by a sync step", null=True,
-                                    blank=True)
-    changed_by_policy = models.DateTimeField(default=None, help_text="Time this model was changed by a model policy",
-                                      null=True, blank=True)
+        help_text="True if delete model_policy must be run before object can be reaped",
+        default=False,
+        null=False,
+        blank=True,
+    )
+    xos_managed = models.BooleanField(
+        help_text="True if xos is responsible for creating/deleting this object",
+        default=True,
+        null=False,
+        blank=True,
+    )
+    backend_handle = models.CharField(
+        help_text="Handle used by the backend to track this object",
+        max_length=1024,
+        null=True,
+        blank=True,
+    )
+    changed_by_step = models.DateTimeField(
+        default=None,
+        help_text="Time this model was changed by a sync step",
+        null=True,
+        blank=True,
+    )
+    changed_by_policy = models.DateTimeField(
+        default=None,
+        help_text="Time this model was changed by a model policy",
+        null=True,
+        blank=True,
+    )
 
     objects = UserManager()
     deleted_objects = DeletedUserManager()
 
-    USERNAME_FIELD = 'email'
-    REQUIRED_FIELDS = ['firstname', 'lastname']
+    USERNAME_FIELD = "email"
+    REQUIRED_FIELDS = ["firstname", "lastname"]
 
     PI_FORBIDDEN_FIELDS = ["is_admin", "site", "is_staff"]
-    USER_FORBIDDEN_FIELDS = ["is_admin", "is_active",
-                             "site", "is_staff", "is_readonly"]
+    USER_FORBIDDEN_FIELDS = ["is_admin", "is_active", "site", "is_staff", "is_readonly"]
 
     def __init__(self, *args, **kwargs):
         super(User, self).__init__(*args, **kwargs)
@@ -210,7 +252,7 @@
 
     @property
     def keyname(self):
-        return self.email[:self.email.find('@')]
+        return self.email[: self.email.find("@")]
 
     def __unicode__(self):
         return self.email
@@ -230,27 +272,31 @@
 
     def delete(self, *args, **kwds):
         # so we have something to give the observer
-        purge = kwds.get('purge',False)
+        purge = kwds.get("purge", False)
         if purge:
-            del kwds['purge']
-        silent = kwds.get('silent',False)
+            del kwds["purge"]
+        silent = kwds.get("silent", False)
         if silent:
-            del kwds['silent']
+            del kwds["silent"]
         try:
             purge = purge or observer_disabled
         except NameError:
             pass
 
-        if (purge):
+        if purge:
             super(User, self).delete(*args, **kwds)
         else:
-            if (not self.write_protect ):
+            if not self.write_protect:
                 self.deleted = True
-                self.enacted=None
-                self.policed=None
-                self.save(update_fields=['enacted','deleted','policed'], silent=silent)
+                self.enacted = None
+                self.policed = None
+                self.save(
+                    update_fields=["enacted", "deleted", "policed"], silent=silent
+                )
 
-                collector = XOSCollector(using=router.db_for_write(self.__class__, instance=self))
+                collector = XOSCollector(
+                    using=router.db_for_write(self.__class__, instance=self)
+                )
                 collector.collect([self])
                 with transaction.atomic():
                     for (k, models) in collector.data.items():
@@ -259,16 +305,25 @@
                                 # in case it's already been deleted, don't delete again
                                 continue
                             model.deleted = True
-                            model.enacted=None
-                            model.policed=None
-                            model.save(update_fields=['enacted','deleted','policed'], silent=silent)
+                            model.enacted = None
+                            model.policed = None
+                            model.save(
+                                update_fields=["enacted", "deleted", "policed"],
+                                silent=silent,
+                            )
 
     def has_important_changes(self):
         """ Determine whether the model has changes that should be reflected in one of the changed_by_* timestamps.
             Ignores various feedback and bookkeeping state set by synchronizers.
         """
         for field_name in self.changed_fields:
-            if field_name in ["policed", "updated", "enacted", "changed_by_step", "changed_by_policy"]:
+            if field_name in [
+                "policed",
+                "updated",
+                "enacted",
+                "changed_by_step",
+                "changed_by_policy",
+            ]:
                 continue
             if field_name.startswith("backend_"):
                 continue
@@ -290,7 +345,7 @@
         # let the user specify silence as either a kwarg or an instance varible
         silent = self.silent
         if "silent" in kwargs:
-            silent=silent or kwargs.pop("silent")
+            silent = silent or kwargs.pop("silent")
 
         caller_kind = "unknown"
 
@@ -307,7 +362,9 @@
         # want to cause an update. For model_policies or sync_steps it should no longer be required.
         always_update_timestamp = False
         if "always_update_timestamp" in kwargs:
-            always_update_timestamp = always_update_timestamp or kwargs.pop("always_update_timestamp")
+            always_update_timestamp = always_update_timestamp or kwargs.pop(
+                "always_update_timestamp"
+            )
 
         is_sync_save = False
         if "is_sync_save" in kwargs:
@@ -317,13 +374,18 @@
         if "is_policy_save" in kwargs:
             is_policy_save = kwargs.pop("is_policy_save")
 
-        if (caller_kind!="synchronizer") or always_update_timestamp:
+        if (caller_kind != "synchronizer") or always_update_timestamp:
             self.updated = timezone.now()
         else:
             # We're not auto-setting timestamp, but let's check to make sure that the caller hasn't tried to set our
             # timestamp backward...
-            if (self.updated != self._initial["updated"]) and ((not update_fields) or ("updated" in update_fields)):
-                log.info("Synchronizer tried to change `updated` timestamp on model %s from %s to %s. Ignored." % (self, self._initial["updated"], self.updated))
+            if (self.updated != self._initial["updated"]) and (
+                (not update_fields) or ("updated" in update_fields)
+            ):
+                log.info(
+                    "Synchronizer tried to change `updated` timestamp on model %s from %s to %s. Ignored."
+                    % (self, self._initial["updated"], self.updated)
+                )
                 self.updated = self._initial["updated"]
 
         if is_sync_save and self.has_important_changes():
@@ -351,13 +413,22 @@
     def send_temporary_password(self):
         password = User.objects.make_random_password()
         self.set_password(password)
-        subject, from_email, to = 'OpenCloud Account Credentials', 'support@opencloud.us', str(
-            self.email)
-        text_content = 'This is an important message.'
+        subject, from_email, to = (
+            "OpenCloud Account Credentials",
+            "support@opencloud.us",
+            str(self.email),
+        )
+        text_content = "This is an important message."
         userUrl = "http://%s/" % get_request().get_host()
-        html_content = """<p>Your account has been created on OpenCloud. Please log in <a href=""" + userUrl + """>here</a> to activate your account<br><br>Username: """ + \
-            self.email + """<br>Temporary Password: """ + password + \
-            """<br>Please change your password once you successully login into the site.</p>"""
+        html_content = (
+            """<p>Your account has been created on OpenCloud. Please log in <a href="""
+            + userUrl
+            + """>here</a> to activate your account<br><br>Username: """
+            + self.email
+            + """<br>Temporary Password: """
+            + password
+            + """<br>Please change your password once you successully login into the site.</p>"""
+        )
         msg = EmailMultiAlternatives(subject, text_content, from_email, [to])
         msg.attach_alternative(html_content, "text/html")
         msg.send()
@@ -386,8 +457,8 @@
         cls = ct.model_class()
         return cls.objects.get(id=object_id)
 
-    ''' This function is hardcoded here because we do not yet
-    generate the User class'''
+    """ This function is hardcoded here because we do not yet
+    generate the User class"""
+
     def can_access(self, ctx):
         return security.user_policy_security_check(self, ctx), "user_policy"
-
diff --git a/xos/core/models/xosbase.py b/xos/core/models/xosbase.py
index 94bbb68..d45c06a 100644
--- a/xos/core/models/xosbase.py
+++ b/xos/core/models/xosbase.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,9 +12,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from xos.exceptions import*
+from xos.exceptions import *
 from xosbase_decl import *
 
+
 class XOSBase(XOSBase_decl):
     objects = XOSBaseManager()
     deleted_objects = XOSBaseDeletionManager()
@@ -28,7 +28,7 @@
 
     def __init__(self, *args, **kwargs):
         super(XOSBase, self).__init__(*args, **kwargs)
-        self._initial = self._dict # for PlModelMixIn
+        self._initial = self._dict  # for PlModelMixIn
         self.silent = False
 
     def get_controller(self):
@@ -36,29 +36,33 @@
 
     def delete(self, *args, **kwds):
         # so we have something to give the observer
-        purge = kwds.get('purge',False)
+        purge = kwds.get("purge", False)
         if purge:
-            del kwds['purge']
-        silent = kwds.get('silent',False)
+            del kwds["purge"]
+        silent = kwds.get("silent", False)
         if silent:
-            del kwds['silent']
+            del kwds["silent"]
         try:
             purge = purge or observer_disabled
         except NameError:
             pass
 
-        if (purge):
+        if purge:
             pk = self.pk
             super(XOSBase, self).delete(*args, **kwds)
             self.push_messagebus_event(deleted=True, pk=pk)
         else:
-            if (not self.write_protect ):
+            if not self.write_protect:
                 self.deleted = True
-                self.enacted=None
-                self.policed=None
-                self.save(update_fields=['enacted','deleted','policed'], silent=silent)
+                self.enacted = None
+                self.policed = None
+                self.save(
+                    update_fields=["enacted", "deleted", "policed"], silent=silent
+                )
 
-                collector = XOSCollector(using=router.db_for_write(self.__class__, instance=self))
+                collector = XOSCollector(
+                    using=router.db_for_write(self.__class__, instance=self)
+                )
                 collector.collect([self])
                 with transaction.atomic():
                     for (k, models) in collector.data.items():
@@ -71,9 +75,12 @@
                                 # in case it's already been deleted, don't delete again
                                 continue
                             model.deleted = True
-                            model.enacted=None
-                            model.policed=None
-                            model.save(update_fields=['enacted','deleted','policed'], silent=silent)
+                            model.enacted = None
+                            model.policed = None
+                            model.save(
+                                update_fields=["enacted", "deleted", "policed"],
+                                silent=silent,
+                            )
 
     def verify_live_keys(self, update_fields):
         """ Check the fields to be updated, if they contain foreign keys, that the foreign keys only point
@@ -92,7 +99,7 @@
         for field in self._meta.fields:
             try:
                 f = getattr(self, field.name)
-            except Exception, e:
+            except Exception as e:
                 # Exception django.db.models.fields.related.RelatedObjectDoesNotExist
                 # is thrown by django when you're creating an object that has a base and the base doesn't exist yet
                 continue
@@ -102,7 +109,7 @@
                 continue
 
             ftype = field.get_internal_type()
-            if (ftype != "ForeignKey"):
+            if ftype != "ForeignKey":
                 # If field isn't a foreign key, we don't care
                 continue
 
@@ -111,14 +118,22 @@
                 continue
 
             if getattr(f, "deleted", False):
-                raise Exception("Attempt to save object with deleted foreign key reference")
+                raise Exception(
+                    "Attempt to save object with deleted foreign key reference"
+                )
 
     def has_important_changes(self):
         """ Determine whether the model has changes that should be reflected in one of the changed_by_* timestamps.
             Ignores various feedback and bookkeeping state set by synchronizers.
         """
         for field_name in self.changed_fields:
-            if field_name in ["policed", "updated", "enacted", "changed_by_step", "changed_by_policy"]:
+            if field_name in [
+                "policed",
+                "updated",
+                "enacted",
+                "changed_by_step",
+                "changed_by_policy",
+            ]:
                 continue
             if field_name.startswith("backend_"):
                 continue
@@ -132,7 +147,7 @@
         # let the user specify silence as either a kwarg or an instance varible
         silent = self.silent
         if "silent" in kwargs:
-            silent=silent or kwargs.pop("silent")
+            silent = silent or kwargs.pop("silent")
 
         caller_kind = "unknown"
 
@@ -149,7 +164,9 @@
         # want to cause an update. For model_policies or sync_steps it should no longer be required.
         always_update_timestamp = False
         if "always_update_timestamp" in kwargs:
-            always_update_timestamp = always_update_timestamp or kwargs.pop("always_update_timestamp")
+            always_update_timestamp = always_update_timestamp or kwargs.pop(
+                "always_update_timestamp"
+            )
 
         is_sync_save = False
         if "is_sync_save" in kwargs:
@@ -166,12 +183,29 @@
         if "allow_modify_feedback" in kwargs:
             allow_modify_feedback = kwargs.pop("allow_modify_feedback")
 
-        if hasattr(self, "feedback_state_fields") and not allow_modify_feedback and not self.is_new:
-            feedback_changed = [field for field in self.changed_fields if field in self.feedback_state_fields]
+        if (
+            hasattr(self, "feedback_state_fields")
+            and not allow_modify_feedback
+            and not self.is_new
+        ):
+            feedback_changed = [
+                field
+                for field in self.changed_fields
+                if field in self.feedback_state_fields
+            ]
 
             if len(feedback_changed) > 0 and caller_kind != "synchronizer":
-                log.error('A non Synchronizer is trying to update fields marked as feedback_state', model=self._dict, feedback_state_fields=self.feedback_state_fields, caller_kind=caller_kind, feedback_changed=feedback_changed)
-                raise XOSPermissionDenied('A non Synchronizer is trying to update fields marked as feedback_state: %s' % feedback_changed)
+                log.error(
+                    "A non Synchronizer is trying to update fields marked as feedback_state",
+                    model=self._dict,
+                    feedback_state_fields=self.feedback_state_fields,
+                    caller_kind=caller_kind,
+                    feedback_changed=feedback_changed,
+                )
+                raise XOSPermissionDenied(
+                    "A non Synchronizer is trying to update fields marked as feedback_state: %s"
+                    % feedback_changed
+                )
 
         if (caller_kind != "synchronizer") or always_update_timestamp:
             # Non-synchronizers update the `updated` timestamp
@@ -179,8 +213,13 @@
         else:
             # We're not auto-setting timestamp, but let's check to make sure that the caller hasn't tried to set our
             # timestamp backward...
-            if (self.updated != self._initial["updated"]) and ((not update_fields) or ("updated" in update_fields)):
-                log.info("Synchronizer tried to change `updated` timestamp on model %s from %s to %s. Ignored." % (self, self._initial["updated"], self.updated))
+            if (self.updated != self._initial["updated"]) and (
+                (not update_fields) or ("updated" in update_fields)
+            ):
+                log.info(
+                    "Synchronizer tried to change `updated` timestamp on model %s from %s to %s. Ignored."
+                    % (self, self._initial["updated"], self.updated)
+                )
                 self.updated = self._initial["updated"]
 
         if is_sync_save and self.has_important_changes():
@@ -194,7 +233,7 @@
                 update_fields.append("changed_by_policy")
 
         with transaction.atomic():
-            self.verify_live_keys(update_fields = update_fields)
+            self.verify_live_keys(update_fields=update_fields)
             super(XOSBase, self).save(*args, **kwargs)
 
         self.push_messagebus_event()
@@ -203,8 +242,8 @@
 
     def tologdict(self):
         try:
-            d = {'model_name':self.__class__.__name__, 'pk': self.pk}
-        except:
+            d = {"model_name": self.__class__.__name__, "pk": self.pk}
+        except BaseException:
             d = {}
 
         return d
@@ -212,14 +251,14 @@
     # for the old django admin UI
     def __unicode__(self):
         if hasattr(self, "name") and self.name:
-            return u'%s' % self.name
+            return u"%s" % self.name
         elif hasattr(self, "id") and self.id:
             if hasattr(self, "leaf_model_name") and self.leaf_model_name:
-                return u'%s-%s' % (self.leaf_model_name, self.id)
+                return u"%s-%s" % (self.leaf_model_name, self.id)
             else:
-                return u'%s-%s' % (self.__class__.__name__, self.id)
+                return u"%s-%s" % (self.__class__.__name__, self.id)
         else:
-            return u'%s-unsaved' % self.__class__.__name__
+            return u"%s-unsaved" % self.__class__.__name__
 
     def get_content_type_key(self):
         ct = ContentType.objects.get_for_model(self.__class__)
@@ -235,4 +274,3 @@
         ct = XOSBase.get_content_type_from_key(content_type)
         cls = ct.model_class()
         return cls.objects.get(id=object_id)
-
diff --git a/xos/core/models/xosbase_header.py b/xos/core/models/xosbase_header.py
index 0a108ba..f27fc05 100644
--- a/xos/core/models/xosbase_header.py
+++ b/xos/core/models/xosbase_header.py
@@ -39,15 +39,16 @@
 from xosconfig import Config
 from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
 XOS_GLOBAL_DEFAULT_SECURITY_POLICY = True
 
+
 def json_handler(obj):
     if isinstance(obj, pytz.tzfile.DstTzInfo):
         # json can't serialize DstTzInfo
         return str(obj)
-    elif hasattr(obj, 'timetuple'):
+    elif hasattr(obj, "timetuple"):
         return calendar.timegm(obj.timetuple())
     elif isinstance(obj, QuerySet):
         # django 1.11.0 - model_to_dict() turns reverse foreign relations into querysets
@@ -58,8 +59,10 @@
     else:
         return obj
 
+
 class StrippedCharField(models.CharField):
     """ CharField that strips trailing and leading spaces."""
+
     def clean(self, value, *args, **kwds):
         if value is not None:
             value = value.strip()
@@ -70,7 +73,7 @@
 # the core model is abstract.
 class XOSBaseDeletionManager(models.Manager):
     def get_queryset(self):
-        parent=super(XOSBaseDeletionManager, self)
+        parent = super(XOSBaseDeletionManager, self)
         if hasattr(parent, "get_queryset"):
             return parent.get_queryset().filter(deleted=True)
         else:
@@ -80,11 +83,12 @@
     def get_query_set(self):
         return self.get_queryset()
 
+
 # This manager will be inherited by all subclasses because
 # the core model is abstract.
 class XOSBaseManager(models.Manager):
     def get_queryset(self):
-        parent=super(XOSBaseManager, self)
+        parent = super(XOSBaseManager, self)
         if hasattr(parent, "get_queryset"):
             return parent.get_queryset().filter(deleted=False)
         else:
@@ -94,6 +98,7 @@
     def get_query_set(self):
         return self.get_queryset()
 
+
 class PlModelMixIn(object):
     # Provides useful methods for computing which objects in a model have
     # changed. Make sure to do self._initial = self._dict in the __init__
@@ -106,20 +111,23 @@
 
     @property
     def _dict(self):
-        return model_to_dict(self, fields=[field.name for field in
-                             self._meta.fields])
+        return model_to_dict(self, fields=[field.name for field in self._meta.fields])
 
-    def fields_differ(self,f1,f2):
-        if isinstance(f1,datetime.datetime) and isinstance(f2,datetime.datetime) and (timezone.is_aware(f1) != timezone.is_aware(f2)):
+    def fields_differ(self, f1, f2):
+        if (
+            isinstance(f1, datetime.datetime)
+            and isinstance(f2, datetime.datetime)
+            and (timezone.is_aware(f1) != timezone.is_aware(f2))
+        ):
             return True
         else:
-            return (f1 != f2)
+            return f1 != f2
 
     @property
     def diff(self):
         d1 = self._initial
         d2 = self._dict
-        diffs = [(k, (v, d2[k])) for k, v in d1.items() if self.fields_differ(v,d2[k])]
+        diffs = [(k, (v, d2[k])) for k, v in d1.items() if self.fields_differ(v, d2[k])]
         return dict(diffs)
 
     @property
@@ -157,19 +165,19 @@
         if not leaf_model_name:
             return self
 
-        if (leaf_model_name == self.__class__.__name__):
+        if leaf_model_name == self.__class__.__name__:
             return self
 
         leaf_model_class = self.get_model_class_by_name(self.leaf_model_name)
 
-        assert (self.id)
+        assert self.id
 
         if self.deleted:
             return leaf_model_class.deleted_objects.get(id=self.id)
         else:
             return leaf_model_class.objects.get(id=self.id)
 
-    #classmethod
+    # classmethod
     def getValidators(cls):
         """ primarily for REST API, return a dictionary of field names mapped
             to lists of the type of validations that need to be applied to
@@ -178,9 +186,9 @@
         validators = {}
         for field in cls._meta.fields:
             l = []
-            if field.blank==False:
+            if not field.blank:
                 l.append("notBlank")
-            if field.__class__.__name__=="URLField":
+            if field.__class__.__name__ == "URLField":
                 l.append("url")
             validators[field.name] = l
         return validators
@@ -194,9 +202,9 @@
     def set_backend_register(self, k, v):
         br = {}
         try:
-            br=json.loads(self.backend_register)
+            br = json.loads(self.backend_register)
         except AttributeError:
-            br={}
+            br = {}
 
         br[k] = v
         self.backend_register = json.dumps(br)
@@ -208,24 +216,24 @@
             return (None, None, None, None)
 
         try:
-            exponent = scratchpad['exponent']
+            exponent = scratchpad["exponent"]
         except KeyError:
             exponent = None
 
         try:
-            last_success_time = scratchpad['last_success']
+            last_success_time = scratchpad["last_success"]
             dt = datetime.datetime.fromtimestamp(last_success_time)
             last_success = dt.strftime("%Y-%m-%d %H:%M")
         except KeyError:
             last_success = None
 
         try:
-            failures = scratchpad['failures']
+            failures = scratchpad["failures"]
         except KeyError:
-            failures=None
+            failures = None
 
         try:
-            last_failure_time = scratchpad['last_failure']
+            last_failure_time = scratchpad["last_failure"]
             dt = datetime.datetime.fromtimestamp(last_failure_time)
             last_failure = dt.strftime("%Y-%m-%d %H:%M")
         except KeyError:
@@ -234,25 +242,42 @@
         return (exponent, last_success, last_failure, failures)
 
     def get_backend_icon(self):
-        is_perfect = (self.backend_status is not None) and self.backend_status.startswith("1 -")
-        is_good = (self.backend_status is not None) and (self.backend_status.startswith("0 -") or self.backend_status.startswith("1 -"))
-        is_provisioning = self.backend_status is None or self.backend_status == "Provisioning in progress" or self.backend_status==""
+        is_perfect = (
+            self.backend_status is not None
+        ) and self.backend_status.startswith("1 -")
+        is_good = (self.backend_status is not None) and (
+            self.backend_status.startswith("0 -")
+            or self.backend_status.startswith("1 -")
+        )
+        is_provisioning = (
+            self.backend_status is None
+            or self.backend_status == "Provisioning in progress"
+            or self.backend_status == ""
+        )
 
         # returns (icon_name, tooltip)
-        if (self.enacted is not None) and (self.enacted >= self.updated and is_good) or is_perfect:
+        if (
+            (self.enacted is not None)
+            and (self.enacted >= self.updated and is_good)
+            or is_perfect
+        ):
             return ("success", "successfully enacted")
         else:
             if is_good or is_provisioning:
-                return ("clock", "Pending sync, last_status = " + html_escape(self.backend_status, quote=True))
+                return (
+                    "clock",
+                    "Pending sync, last_status = "
+                    + html_escape(self.backend_status, quote=True),
+                )
             else:
                 return ("error", html_escape(self.backend_status, quote=True))
 
     def enforce_choices(self, field, choices):
         choices = [x[0] for x in choices]
         for choice in choices:
-            if field==choice:
+            if field == choice:
                 return
-            if (choice==None) and (field==""):
+            if (choice is None) and (field == ""):
                 # allow "" and None to be equivalent
                 return
         raise Exception("Field value %s is not in %s" % (field, str(choices)))
@@ -271,7 +296,7 @@
 
         fields = model_to_dict(self)
         for k in fields.keys():
-            if field_types.get(k,None) == "ForeignKey":
+            if field_types.get(k, None) == "ForeignKey":
                 new_key_name = "%s_id" % k
                 if (k in fields) and (new_key_name not in fields):
                     fields[new_key_name] = fields[k]
@@ -289,20 +314,16 @@
         bases = inspect.getmro(self.__class__)
         class_names = ",".join([x.__name__ for x in bases])
 
-        model['class_names'] = class_names
+        model["class_names"] = class_names
 
         if not pk:
             pk = self.pk
 
-        json_dict = {
-            'pk': pk,
-            'changed_fields': self.changed_fields,
-            'object': model
-        }
+        json_dict = {"pk": pk, "changed_fields": self.changed_fields, "object": model}
 
         if deleted:
-            json_dict['deleted'] = True
-            json_dict['object']['id'] = pk
+            json_dict["deleted"] = True
+            json_dict["object"]["id"] = pk
 
         topic = "xos.gui_events"
         key = self.__class__.__name__
@@ -332,8 +353,7 @@
 
     def get_initial_attribute(self, name, default=None):
         if self._initial["service_specific_attribute"]:
-            attributes = json.loads(
-                self._initial["service_specific_attribute"])
+            attributes = json.loads(self._initial["service_specific_attribute"])
         else:
             attributes = {}
         return attributes.get(name, default)
@@ -352,21 +372,31 @@
     @classmethod
     def setup_simple_attributes(cls):
         for (attrname, default) in cls.simple_attributes:
-            setattr(cls, attrname, property(
-                lambda self, attrname=attrname, default=default: self.get_attribute(attrname, default),
-                lambda self, value, attrname=attrname: self.set_attribute(
-                    attrname, value),
-                None,
-                attrname))
+            setattr(
+                cls,
+                attrname,
+                property(
+                    lambda self, attrname=attrname, default=default: self.get_attribute(
+                        attrname, default
+                    ),
+                    lambda self, value, attrname=attrname: self.set_attribute(
+                        attrname, value
+                    ),
+                    None,
+                    attrname,
+                ),
+            )
+
 
 # For cascading deletes, we need a Collector that doesn't do fastdelete,
 # so we get a full list of models.
 class XOSCollector(Collector):
-  def can_fast_delete(self, *args, **kwargs):
-    return False
+    def can_fast_delete(self, *args, **kwargs):
+        return False
+
 
 class ModelLink:
-    def __init__(self,dest,via,into=None):
-        self.dest=dest
-        self.via=via
-        self.into=into
+    def __init__(self, dest, via, into=None):
+        self.dest = dest
+        self.via = via
+        self.into = into
diff --git a/xos/core/models/xoscore.py b/xos/core/models/xoscore.py
index 1243a56..5c4af86 100644
--- a/xos/core/models/xoscore.py
+++ b/xos/core/models/xoscore.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from xoscore_decl import *
 
+
 class XOSCore(XOSCore_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/models/xosguiextension.py b/xos/core/models/xosguiextension.py
index 835a430..c32255e 100644
--- a/xos/core/models/xosguiextension.py
+++ b/xos/core/models/xosguiextension.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 from xos.exceptions import *
 from xosguiextension_decl import *
 
+
 class XOSGuiExtension(XOSGuiExtension_decl):
     class Meta:
         proxy = True
diff --git a/xos/core/serializers.py b/xos/core/serializers.py
index ff7e7b2..b5143f2d 100644
--- a/xos/core/serializers.py
+++ b/xos/core/serializers.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -23,263 +22,279 @@
 
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
-    sites = serializers.HyperlinkedRelatedField(view_name='site-detail')
+    sites = serializers.HyperlinkedRelatedField(view_name="site-detail")
+
     class Meta:
         model = Deployment
-        fields = ('id',
-                  'url',
-                  'name',
-                  'sites'
-                 )
+        fields = ("id", "url", "name", "sites")
+
 
 class ImageSerializer(serializers.HyperlinkedModelSerializer):
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
+
     class Meta:
         model = Image
-        fields = ('id',
-                  'url',
-                  'image_id',
-                  'name',
-                  'disk_format',
-                  'container_format')
+        fields = ("id", "url", "image_id", "name", "disk_format", "container_format")
+
 
 class NodeSerializer(serializers.HyperlinkedModelSerializer):
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
+
     class Meta:
         model = Node
-        fields = ('id',
-                 'url',
-                 'name')
+        fields = ("id", "url", "name")
+
 
 class ProjectSerializer(serializers.HyperlinkedModelSerializer):
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
+
     class Meta:
         model = Project
-        fields = ('id',
-                 'url',
-                 'name')
+        fields = ("id", "url", "name")
+
 
 class ReservationSerializer(serializers.HyperlinkedModelSerializer):
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
+
     class Meta:
         model = Reservation
-        fields = ('id',
-                 'url',
-                 'startTime',
-                 'slice',
-                 'duration',
-                 'endTime',
-                 )
+        fields = ("id", "url", "startTime", "slice", "duration", "endTime")
+
 
 class RoleSerializer(serializers.HyperlinkedModelSerializer):
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
+
     class Meta:
         model = Role
-        fields = ('id', 
-                 'url',
-                 'role',
-                 'role_type')
+        fields = ("id", "url", "role", "role_type")
 
 
 class ServiceClassSerializer(serializers.HyperlinkedModelSerializer):
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
+
     class Meta:
         model = ServiceClass
-        fields = ('id',
-                 'url',
-                 'name',
-                 'description',
-                 'commitment',
-                 'membershipFee',
-                 'membershipFeeMonths',
-                 'upgradeRequiresApproval',
-                 'upgradeFrom',
-                 )
+        fields = (
+            "id",
+            "url",
+            "name",
+            "description",
+            "commitment",
+            "membershipFee",
+            "membershipFeeMonths",
+            "upgradeRequiresApproval",
+            "upgradeFrom",
+        )
+
 
 class ServiceResourceSerializer(serializers.HyperlinkedModelSerializer):
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
-    serviceClass = serializers.HyperlinkedRelatedField(view_name='serviceclass-detail')
+    serviceClass = serializers.HyperlinkedRelatedField(view_name="serviceclass-detail")
+
     class Meta:
         model = ServiceResource
-        fields = ('id',
-                 'url',
-                 'name',
-                 'serviceClass',
-                 'maxUnitsDeployment',
-                 'maxUnitsNode',
-                 'maxDuration',
-                 'bucketInRate',
-                 'bucketMaxSize',
-                 'cost',
-                 'calendarReservable',
-                 )
+        fields = (
+            "id",
+            "url",
+            "name",
+            "serviceClass",
+            "maxUnitsDeployment",
+            "maxUnitsNode",
+            "maxDuration",
+            "bucketInRate",
+            "bucketMaxSize",
+            "cost",
+            "calendarReservable",
+        )
+
 
 class SliceSerializer(serializers.HyperlinkedModelSerializer):
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
-    site = serializers.HyperlinkedRelatedField(view_name='site-detail')
-    instances = serializers.HyperlinkedRelatedField(view_name='instance-detail')
+    site = serializers.HyperlinkedRelatedField(view_name="site-detail")
+    instances = serializers.HyperlinkedRelatedField(view_name="instance-detail")
+
     class Meta:
         model = Slice
-        fields = ('id',
-                  'url',
-                  'tenant_id',
-                  'enabled',
-                  'name',
-                  'url',
-                  'omf_friendly',
-                  'description',
-                  'slice_url',
-                  'network_id',
-                  'router_id',
-                  'subnet_id',
-                  'imagePreference',
-		  'network',
-		  'mountDataSets',
-                  'site',
-                  'instances',
-                  'updated',
-                  'created')
+        fields = (
+            "id",
+            "url",
+            "tenant_id",
+            "enabled",
+            "name",
+            "url",
+            "omf_friendly",
+            "description",
+            "slice_url",
+            "network_id",
+            "router_id",
+            "subnet_id",
+            "imagePreference",
+            "network",
+            "mountDataSets",
+            "site",
+            "instances",
+            "updated",
+            "created",
+        )
+
 
 class SlicePrivilegeSerializer(serializers.HyperlinkedModelSerializer):
     id = serializers.Field()
-    slice = serializers.HyperlinkedRelatedField(view_name='slice-detail')
-    user = serializers.HyperlinkedRelatedField(view_name='user-detail')
-    role = serializers.HyperlinkedRelatedField(view_name='role-detail')
+    slice = serializers.HyperlinkedRelatedField(view_name="slice-detail")
+    user = serializers.HyperlinkedRelatedField(view_name="user-detail")
+    role = serializers.HyperlinkedRelatedField(view_name="role-detail")
+
     class Meta:
         model = SlicePrivilege
-        fields = ('id',
-                  'url',
-                  'user',
-                  'slice',
-                  'role')
+        fields = ("id", "url", "user", "slice", "role")
+
 
 class SiteSerializer(serializers.HyperlinkedModelSerializer):
 
-    #Experimenting with whether to use ids, hyperlinks, or nested includes
-    #slices = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
-    #slices = serializers.RelatedField(many=True, read_only=True)
-    #slices = SliceSerializer(many=True)
+    # Experimenting with whether to use ids, hyperlinks, or nested includes
+    # slices = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
+    # slices = serializers.RelatedField(many=True, read_only=True)
+    # slices = SliceSerializer(many=True)
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
-    slices = serializers.HyperlinkedRelatedField(many=True, read_only=True,view_name='slice-detail')
+    slices = serializers.HyperlinkedRelatedField(
+        many=True, read_only=True, view_name="slice-detail"
+    )
 
     class Meta:
         model = Site
-        fields = ('id',
-                  'url',
-                  'name',
-                  'slices',
-                  'site_url',
-                  'enabled',
-                  'longitude',
-                  'latitude',
-                  'login_base',
-                  'tenant_id',
-                  'is_public',
-                  'abbreviated_name',
-                  'updated',
-                  'created')
+        fields = (
+            "id",
+            "url",
+            "name",
+            "slices",
+            "site_url",
+            "enabled",
+            "longitude",
+            "latitude",
+            "login_base",
+            "tenant_id",
+            "is_public",
+            "abbreviated_name",
+            "updated",
+            "created",
+        )
+
 
 class SitePrivilegeSerializer(serializers.HyperlinkedModelSerializer):
     id = serializers.Field()
-    site = serializers.HyperlinkedRelatedField(view_name='site-detail')
-    user = serializers.HyperlinkedRelatedField(view_name='user-detail')
-    role = serializers.HyperlinkedRelatedField(view_name='role-detail')
+    site = serializers.HyperlinkedRelatedField(view_name="site-detail")
+    user = serializers.HyperlinkedRelatedField(view_name="user-detail")
+    role = serializers.HyperlinkedRelatedField(view_name="role-detail")
+
     class Meta:
         model = SitePrivilege
-        fields = ('id',
-                  'url',
-                  'user',
-                  'site',
-                  'role')
+        fields = ("id", "url", "user", "site", "role")
+
 
 class InstanceSerializer(serializers.HyperlinkedModelSerializer):
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
-    image = serializers.HyperlinkedRelatedField(view_name='image-detail')
-    slice = serializers.HyperlinkedRelatedField(view_name='slice-detail')
-    deploymentNetwork = serializers.HyperlinkedRelatedField(view_name='deployment-detail')
-    node = serializers.HyperlinkedRelatedField(view_name='node-detail')
-    
-    #slice = serializers.PrimaryKeyRelatedField(read_only=True)
+    image = serializers.HyperlinkedRelatedField(view_name="image-detail")
+    slice = serializers.HyperlinkedRelatedField(view_name="slice-detail")
+    deploymentNetwork = serializers.HyperlinkedRelatedField(
+        view_name="deployment-detail"
+    )
+    node = serializers.HyperlinkedRelatedField(view_name="node-detail")
+
+    # slice = serializers.PrimaryKeyRelatedField(read_only=True)
 
     class Meta:
         model = Instance
-        fields = ('id',
-                  'url',
-                  'instance_id',
-                  'name',
-                  'instance_name',
-                  'ip',
-                  'image',
-                  'slice',
-                  'deploymentNetwork',
-                  'node')
+        fields = (
+            "id",
+            "url",
+            "instance_id",
+            "name",
+            "instance_name",
+            "ip",
+            "image",
+            "slice",
+            "deploymentNetwork",
+            "node",
+        )
+
 
 class UserSerializer(serializers.HyperlinkedModelSerializer):
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
-    site = serializers.HyperlinkedRelatedField(view_name='site-detail')
-    slice_privileges = serializers.HyperlinkedRelatedField(view_name='sliceprivilege-detail')
-    site_privileges = serializers.HyperlinkedRelatedField(view_name='siteprivilege-detail')
+    site = serializers.HyperlinkedRelatedField(view_name="site-detail")
+    slice_privileges = serializers.HyperlinkedRelatedField(
+        view_name="sliceprivilege-detail"
+    )
+    site_privileges = serializers.HyperlinkedRelatedField(
+        view_name="siteprivilege-detail"
+    )
+
     class Meta:
         model = User
-        fields = ('id',
-                  'url',
-                  'kuser_id', 
-                  'firstname', 
-                  'lastname',
-                  'email', 
-                  'password',
-                  'phone',
-                  'public_key', 
-                  'user_url',
-                  'is_admin',
-                  'slice_privileges',
-                  'site_privileges')
-                    
+        fields = (
+            "id",
+            "url",
+            "kuser_id",
+            "firstname",
+            "lastname",
+            "email",
+            "password",
+            "phone",
+            "public_key",
+            "user_url",
+            "is_admin",
+            "slice_privileges",
+            "site_privileges",
+        )
+
+
 class TagSerializer(serializers.HyperlinkedModelSerializer):
     # HyperlinkedModelSerializer doesn't include the id by default
     id = serializers.Field()
-    project = serializers.HyperlinkedRelatedField(view_name='project-detail')
-    #content_type = serializers.PrimaryKeyRelatedField(read_only=True)
-    content_type = serializers.RelatedField(source = "content_type")
-    content_object = serializers.RelatedField(source='content_object')
+    project = serializers.HyperlinkedRelatedField(view_name="project-detail")
+    # content_type = serializers.PrimaryKeyRelatedField(read_only=True)
+    content_type = serializers.RelatedField(source="content_type")
+    content_object = serializers.RelatedField(source="content_object")
+
     class Meta:
         model = Tag
-        fields = ('id', 
-                  'url',
-                  'project',
-                  'value',
-                  'content_type',
-                  'object_id',
-                  'content_object',
-                  'name')
+        fields = (
+            "id",
+            "url",
+            "project",
+            "value",
+            "content_type",
+            "object_id",
+            "content_object",
+            "name",
+        )
 
-serializerLookUp = { 
-                 Deployment: DeploymentSerializer,
-                 Image: ImageSerializer,
-                 Node: NodeSerializer,
-                 Project: ProjectSerializer,
-                 Reservation: ReservationSerializer,
-                 Role: RoleSerializer,
-                 ServiceClass: ServiceClassSerializer,
-                 ServiceResource: ServiceResourceSerializer,
-                 Site: SiteSerializer,
-                 SitePrivilege: SitePrivilegeSerializer,
-                 Slice: SliceSerializer,
-                 SlicePrivilege: SlicePrivilegeSerializer,
-                 Instance: InstanceSerializer,
-                 Tag: TagSerializer,
-                 User: UserSerializer,
-                 None: None,
-                }
 
+serializerLookUp = {
+    Deployment: DeploymentSerializer,
+    Image: ImageSerializer,
+    Node: NodeSerializer,
+    Project: ProjectSerializer,
+    Reservation: ReservationSerializer,
+    Role: RoleSerializer,
+    ServiceClass: ServiceClassSerializer,
+    ServiceResource: ServiceResourceSerializer,
+    Site: SiteSerializer,
+    SitePrivilege: SitePrivilegeSerializer,
+    Slice: SliceSerializer,
+    SlicePrivilege: SlicePrivilegeSerializer,
+    Instance: InstanceSerializer,
+    Tag: TagSerializer,
+    User: UserSerializer,
+    None: None,
+}
diff --git a/xos/coreapi/apihelper.py b/xos/coreapi/apihelper.py
index 2f21dbc..6e0109c 100644
--- a/xos/coreapi/apihelper.py
+++ b/xos/coreapi/apihelper.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
+from apistats import REQUEST_COUNT
 import base64
 import datetime
 import inspect
@@ -26,16 +26,22 @@
 
 from django.contrib.auth import authenticate as django_authenticate
 from django.db.models import F, Q
-from core.models import *
-from xos.exceptions import *
+from core.models import Site, User, XOSBase
+from xos.exceptions import (
+    XOSNotAuthenticated,
+    XOSPermissionDenied,
+    XOSNotFound,
+    XOSValidationError,
+)
 
 from importlib import import_module
 from django.conf import settings
 
 from xosconfig import Config
 from multistructlog import create_logger
-log = create_logger(Config().get('logging'))
-from apistats import REQUEST_COUNT
+
+log = create_logger(Config().get("logging"))
+
 
 class XOSDefaultSecurityContext(object):
     grant_access = True
@@ -44,21 +50,24 @@
 
 
 xos_anonymous_site = Site(
-    name='XOS Anonymous Site',
+    name="XOS Anonymous Site",
     enabled=True,
     hosts_nodes=False,
     hosts_users=True,
-    login_base='xos',
-    abbreviated_name='xos-anonymous')
+    login_base="xos",
+    abbreviated_name="xos-anonymous",
+)
 
 xos_anonymous_user = User(
-    username='XOS Anonymous User',
-    email='xos@example.com',
+    username="XOS Anonymous User",
+    email="xos@example.com",
     is_admin=False,
-    site=xos_anonymous_site)
+    site=xos_anonymous_site,
+)
 
 SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
 
+
 def translate_exceptions(model, method):
     """ this decorator translates XOS exceptions to grpc status codes """
 
@@ -69,8 +78,9 @@
             except Exception as e:
 
                 import traceback
+
                 tb = traceback.format_exc()
-                print tb
+                print(tb)
                 # TODO can we propagate it over the APIs?
 
                 if "context" in kwargs:
@@ -78,25 +88,35 @@
                 else:
                     context = args[2]
 
-                if hasattr(e, 'json_detail'):
+                if hasattr(e, "json_detail"):
                     context.set_details(e.json_detail)
-                elif hasattr(e, 'detail'):
+                elif hasattr(e, "detail"):
                     context.set_details(e.detail)
 
-                if (isinstance(e, XOSPermissionDenied)):
-                    REQUEST_COUNT.labels('xos-core', model, method, grpc.StatusCode.PERMISSION_DENIED).inc()
+                if isinstance(e, XOSPermissionDenied):
+                    REQUEST_COUNT.labels(
+                        "xos-core", model, method, grpc.StatusCode.PERMISSION_DENIED
+                    ).inc()
                     context.set_code(grpc.StatusCode.PERMISSION_DENIED)
-                elif (isinstance(e, XOSValidationError)):
-                    REQUEST_COUNT.labels('xos-core', model, method, grpc.StatusCode.INVALID_ARGUMENT).inc()
+                elif isinstance(e, XOSValidationError):
+                    REQUEST_COUNT.labels(
+                        "xos-core", model, method, grpc.StatusCode.INVALID_ARGUMENT
+                    ).inc()
                     context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
-                elif (isinstance(e, XOSNotAuthenticated)):
-                    REQUEST_COUNT.labels('xos-core', model, method, grpc.StatusCode.UNAUTHENTICATED).inc()
+                elif isinstance(e, XOSNotAuthenticated):
+                    REQUEST_COUNT.labels(
+                        "xos-core", model, method, grpc.StatusCode.UNAUTHENTICATED
+                    ).inc()
                     context.set_code(grpc.StatusCode.UNAUTHENTICATED)
-                elif (isinstance(e, XOSNotFound)):
-                    REQUEST_COUNT.labels('xos-core', model, method, grpc.StatusCode.NOT_FOUND).inc()
+                elif isinstance(e, XOSNotFound):
+                    REQUEST_COUNT.labels(
+                        "xos-core", model, method, grpc.StatusCode.NOT_FOUND
+                    ).inc()
                     context.set_code(grpc.StatusCode.NOT_FOUND)
                 raise
+
         return wrapper
+
     return decorator
 
 
@@ -106,17 +126,19 @@
 
 def benchmark(function):
     """ this decorator will report gRPC benchmark statistics every 10 seconds """
+
     def wrapper(*args, **kwargs):
         global bench_tStart
         global bench_ops
         result = function(*args, **kwargs)
         bench_ops = bench_ops + 1
         elap = time.time() - bench_tStart
-        if (elap >= 10):
-            print "performance %d" % (bench_ops / elap)
+        if elap >= 10:
+            print("performance %d" % (bench_ops / elap))
             bench_ops = 0
             bench_tStart = time.time()
         return result
+
     return wrapper
 
 
@@ -127,7 +149,7 @@
 
     def __init__(self):
         self.cached_creds = {}
-        self.timeout = 10          # keep cache entries around for 10s
+        self.timeout = 10  # keep cache entries around for 10s
         # lock to keep multiple callers from trimming at the same time
         self.lock = threading.Lock()
 
@@ -150,7 +172,8 @@
             # user)
             self.cached_creds[key] = {
                 "timeout": time.time() + self.timeout,
-                "user_id": user.id}
+                "user_id": user.id,
+            }
 
         return user
 
@@ -196,8 +219,7 @@
             return 0
         else:
             utc = pytz.utc
-            return (x - datetime.datetime(1970, 1,
-                                          1, tzinfo=utc)).total_seconds()
+            return (x - datetime.datetime(1970, 1, 1, tzinfo=utc)).total_seconds()
             # return time.mktime(x.timetuple())
 
     def convertForeignKey(self, x):
@@ -213,34 +235,35 @@
                 continue
 
             ftype = field.get_internal_type()
-            if (ftype == "CharField") or (
-                    ftype == "TextField") or (ftype == "SlugField"):
+            if (
+                (ftype == "CharField")
+                or (ftype == "TextField")
+                or (ftype == "SlugField")
+            ):
                 setattr(p_obj, field.name, str(getattr(obj, field.name)))
-            elif (ftype == "BooleanField"):
+            elif ftype == "BooleanField":
                 setattr(p_obj, field.name, getattr(obj, field.name))
-            elif (ftype == "AutoField"):
+            elif ftype == "AutoField":
                 setattr(p_obj, field.name, int(getattr(obj, field.name)))
-            elif (ftype == "IntegerField") or (ftype == "PositiveIntegerField") or (ftype == "BigIntegerField"):
+            elif (
+                (ftype == "IntegerField")
+                or (ftype == "PositiveIntegerField")
+                or (ftype == "BigIntegerField")
+            ):
                 setattr(p_obj, field.name, int(getattr(obj, field.name)))
-            elif (ftype == "ForeignKey"):
+            elif ftype == "ForeignKey":
                 setattr(
                     p_obj,
                     field.name + "_id",
-                    self.convertForeignKey(
-                        getattr(
-                            obj,
-                            field.name)))
-            elif (ftype == "DateTimeField"):
+                    self.convertForeignKey(getattr(obj, field.name)),
+                )
+            elif ftype == "DateTimeField":
                 setattr(
-                    p_obj,
-                    field.name,
-                    self.convertDateTime(
-                        getattr(
-                            obj,
-                            field.name)))
-            elif (ftype == "FloatField"):
+                    p_obj, field.name, self.convertDateTime(getattr(obj, field.name))
+                )
+            elif ftype == "FloatField":
                 setattr(p_obj, field.name, float(getattr(obj, field.name)))
-            elif (ftype == "GenericIPAddressField"):
+            elif ftype == "GenericIPAddressField":
                 setattr(p_obj, field.name, str(getattr(obj, field.name)))
 
         # Introspecting the django object for related objects is problematic due to _decl-style attics. The descendant
@@ -260,7 +283,10 @@
             related_name = field_name[:-4]
             if not hasattr(obj, related_name):
                 # if field doesn't exist in the django object, then ignore it
-                log.warning("Protobuf field %s doesn't have a corresponding django field" % field_name)
+                log.warning(
+                    "Protobuf field %s doesn't have a corresponding django field"
+                    % field_name
+                )
                 continue
 
             try:
@@ -287,10 +313,7 @@
         # counts.
 
         bases = inspect.getmro(obj.__class__)
-        bases = [
-            x for x in bases if issubclass(
-                x, XOSBase) or issubclass(
-                x, User)]
+        bases = [x for x in bases if issubclass(x, XOSBase) or issubclass(x, User)]
         p_obj.class_names = ",".join([x.__name__ for x in bases])
 
         p_obj.self_content_type_id = obj.get_content_type_key()
@@ -310,31 +333,38 @@
         for (fieldDesc, val) in message.ListFields():
             name = fieldDesc.name
             if name in fmap:
-                if (name == "id"):
+                if name == "id":
                     # don't let anyone set the id
                     continue
                 ftype = fmap[name].get_internal_type()
-                if (ftype == "CharField") or (
-                        ftype == "TextField") or (ftype == "SlugField"):
+                if (
+                    (ftype == "CharField")
+                    or (ftype == "TextField")
+                    or (ftype == "SlugField")
+                ):
                     args[name] = val
-                elif (ftype == "BooleanField"):
+                elif ftype == "BooleanField":
                     args[name] = val
-                elif (ftype == "AutoField"):
+                elif ftype == "AutoField":
                     args[name] = val
-                elif (ftype == "IntegerField") or (ftype == "PositiveIntegerField") or (ftype == "BigIntegerField"):
+                elif (
+                    (ftype == "IntegerField")
+                    or (ftype == "PositiveIntegerField")
+                    or (ftype == "BigIntegerField")
+                ):
                     args[name] = val
-                elif (ftype == "ForeignKey"):
+                elif ftype == "ForeignKey":
                     if val == 0:  # assume object id 0 means None
                         args[name] = None
                     else:
                         # field name already has "_id" at the end
                         args[name] = val
-                elif (ftype == "DateTimeField"):
+                elif ftype == "DateTimeField":
                     utc = pytz.utc
                     args[name] = datetime.datetime.fromtimestamp(val, tz=utc)
-                elif (ftype == "FloatField"):
+                elif ftype == "FloatField":
                     args[name] = val
-                elif (ftype == "GenericIPAddressField"):
+                elif ftype == "GenericIPAddressField":
                     args[name] = val
                 fset[name] = True
 
@@ -344,15 +374,17 @@
         # fix for possible django bug?
         # Unless we refresh the object, django will ignore every other m2m save
 
-        #djangoClass = djangoClass.__class__.objects.get(id=djangoClass.id)
+        # djangoClass = djangoClass.__class__.objects.get(id=djangoClass.id)
         djangoClass.refresh_from_db()
 
-        fmap={}
+        fmap = {}
         for m2m in djangoClass._meta.many_to_many:
             related_name = m2m.name
             if not related_name:
                 continue
-            if "+" in related_name:   # duplicated logic from related_objects; not sure if necessary
+            if (
+                "+" in related_name
+            ):  # duplicated logic from related_objects; not sure if necessary
                 continue
 
             fmap[m2m.name + "_ids"] = m2m
@@ -360,11 +392,11 @@
         fields_changed = []
         for (fieldDesc, val) in message.ListFields():
             if fieldDesc.name in fmap:
-                m2m = getattr(djangoClass,fmap[fieldDesc.name].name)
+                m2m = getattr(djangoClass, fmap[fieldDesc.name].name)
 
                 # remove items that are in the django object, but not in the proto object
                 for item in list(m2m.all()):
-                    if (not item.id in val):
+                    if item.id not in val:
                         m2m.remove(item.id)
                         fields_changed.append(fieldDesc.name)
 
@@ -382,7 +414,7 @@
         # to set.
 
         for name in update_fields:
-            if (name in fmap) and (not name in fields_changed):
+            if (name in fmap) and (name not in fields_changed):
                 m2m = getattr(djangoClass, fmap[name].name)
                 m2m.clear()
                 fields_changed.append(name)
@@ -414,8 +446,8 @@
             if not obj:
                 obj = djangoClass.objects.get(id=id)
             return obj
-        except djangoClass.DoesNotExist, e:
-            raise XOSNotFound(fields={'id': id, 'message': e.message})
+        except djangoClass.DoesNotExist as e:
+            raise XOSNotFound(fields={"id": id, "message": e.message})
 
     def xos_security_gate(self, obj, user, **access_types):
         sec_ctx = XOSDefaultSecurityContext()
@@ -427,8 +459,6 @@
         for k, v in access_types.items():
             setattr(sec_ctx, k, v)
 
-        obj_ctx = obj
-
         verdict, policy_name = obj.can_access(ctx=sec_ctx)
 
         # FIXME: This is the central point of enforcement for security policies
@@ -438,16 +468,19 @@
         if not verdict:
             #    logging.critical( ... )
             if obj.id:
-                object_descriptor = 'object %d' % obj.id
+                object_descriptor = "object %d" % obj.id
             else:
-                object_descriptor = 'new object'
+                object_descriptor = "new object"
 
             raise XOSPermissionDenied(
-                "User %(user_email)s cannot access %(django_class_name)s %(descriptor)s due to policy %(policy_name)s" % {
-                    'user_email': user.email,
-                    'django_class_name': obj.__class__.__name__,
-                    'policy_name': policy_name,
-                    'descriptor': object_descriptor})
+                "User %(user_email)s cannot access %(django_class_name)s %(descriptor)s due to policy %(policy_name)s"
+                % {
+                    "user_email": user.email,
+                    "django_class_name": obj.__class__.__name__,
+                    "policy_name": policy_name,
+                    "descriptor": object_descriptor,
+                }
+            )
 
     def xos_security_check(self, obj, user, **access_types):
         sec_ctx = XOSDefaultSecurityContext()
@@ -457,8 +490,6 @@
         for k, v in access_types.items():
             setattr(sec_ctx, k, v)
 
-        obj_ctx = obj
-
         verdict, _ = obj.can_access(ctx=sec_ctx)
         return verdict
 
@@ -483,7 +514,7 @@
 
             response = self.objToProto(new_obj)
             return response
-        except:
+        except BaseException:
             log.exception("Exception in apihelper.create")
             raise
 
@@ -498,7 +529,7 @@
             for (k, v) in args.iteritems():
                 setattr(obj, k, v)
 
-            m2m_field_names = [x.name+"_ids" for x in djangoClass._meta.many_to_many]
+            m2m_field_names = [x.name + "_ids" for x in djangoClass._meta.many_to_many]
 
             update_fields = []
             m2m_update_fields = []
@@ -528,7 +559,7 @@
 
             response = self.objToProto(obj)
             return response
-        except:
+        except BaseException:
             log.exception("Exception in apihelper.update")
             raise
 
@@ -540,7 +571,7 @@
 
             obj.delete()
             return Empty()
-        except:
+        except BaseException:
             log.exception("Exception in apihelper.delete")
             raise
 
@@ -577,15 +608,17 @@
         try:
             queryset = djangoClass.objects.all()
             filtered_queryset = (
-                elt for elt in queryset if self.xos_security_check(
-                    elt, user, read_access=True))
+                elt
+                for elt in queryset
+                if self.xos_security_check(elt, user, read_access=True)
+            )
 
             # FIXME: Implement auditing here
             # logging.info("User requested x objects, y objects were filtered out by policy z")
 
             response = self.querysetToProto(djangoClass, filtered_queryset)
             return response
-        except:
+        except BaseException:
             log.exception("Exception in apihelper.list")
             raise
 
@@ -607,8 +640,15 @@
                 query = self.build_filter(request, None)
                 queryset = djangoClass.objects.filter(query)
             elif request.kind == request.SYNCHRONIZER_DIRTY_OBJECTS:
-                query = (Q(enacted=None) | Q(enacted__lt=F('updated')) | Q(enacted__lt=F('changed_by_policy'))) \
-                        & Q(lazy_blocked=False) & Q(no_sync=False)
+                query = (
+                    (
+                        Q(enacted=None)
+                        | Q(enacted__lt=F("updated"))
+                        | Q(enacted__lt=F("changed_by_policy"))
+                    )
+                    & Q(lazy_blocked=False)
+                    & Q(no_sync=False)
+                )
                 query = self.build_filter(request, query)
                 queryset = djangoClass.objects.filter(query)
             elif request.kind == request.SYNCHRONIZER_DELETED_OBJECTS:
@@ -618,50 +658,55 @@
                 else:
                     queryset = djangoClass.deleted_objects.all()
             elif request.kind == request.SYNCHRONIZER_DIRTY_POLICIES:
-                query = (Q(policed=None) | Q(policed__lt=F('updated')) | Q(policed__lt=F('changed_by_step'))) \
-                        & Q(no_policy=False)
+                query = (
+                    Q(policed=None)
+                    | Q(policed__lt=F("updated"))
+                    | Q(policed__lt=F("changed_by_step"))
+                ) & Q(no_policy=False)
                 query = self.build_filter(request, query)
                 queryset = djangoClass.objects.filter(query)
             elif request.kind == request.SYNCHRONIZER_DELETED_POLICIES:
-                query = Q(policed__lt=F('updated')) | Q(policed=None)
+                query = Q(policed__lt=F("updated")) | Q(policed=None)
                 query = self.build_filter(request, query)
                 queryset = djangoClass.deleted_objects.filter(query)
             elif request.kind == request.ALL:
                 queryset = djangoClass.objects.all()
 
             filtered_queryset = (
-                elt for elt in queryset if self.xos_security_check(
-                    elt, user, read_access=True))
+                elt
+                for elt in queryset
+                if self.xos_security_check(elt, user, read_access=True)
+            )
 
             # FIXME: Implement auditing here
             # logging.info("User requested x objects, y objects were filtered out by policy z")
 
             response = self.querysetToProto(djangoClass, filtered_queryset)
             return response
-        except:
+        except BaseException:
             log.exception("Exception in apihelper.filter")
             raise
 
     def authenticate(self, context, required=True):
         for (k, v) in context.invocation_metadata():
-            if (k.lower() == "authorization"):
+            if k.lower() == "authorization":
                 (method, auth) = v.split(" ", 1)
-                if (method.lower() == "basic"):
+                if method.lower() == "basic":
                     auth = base64.b64decode(auth)
                     (username, password) = auth.split(":")
                     user = cached_authenticator.authenticate(
-                        username=username, password=password)
+                        username=username, password=password
+                    )
                     if not user:
                         raise XOSPermissionDenied(
-                            "failed to authenticate %s:%s" %
-                            (username, password))
+                            "failed to authenticate %s:%s" % (username, password)
+                        )
                     return user
-            elif (k.lower() == "x-xossession"):
+            elif k.lower() == "x-xossession":
                 s = SessionStore(session_key=v)
                 id = s.get("_auth_user_id", None)
                 if not id:
-                    raise XOSPermissionDenied(
-                        "failed to authenticate token %s" % v)
+                    raise XOSPermissionDenied("failed to authenticate token %s" % v)
                 user = User.objects.get(id=id)
                 log.info("authenticated sessionid %s as %s" % (v, user))
                 return user
diff --git a/xos/coreapi/apistats.py b/xos/coreapi/apistats.py
index a209e78..2f76605 100644
--- a/xos/coreapi/apistats.py
+++ b/xos/coreapi/apistats.py
@@ -16,33 +16,35 @@
 import time
 
 REQUEST_COUNT = Counter(
-    'grpc_request_count', 'GRPC Request Count',
-    ['app_name', 'model_name', 'endpoint', 'status']
+    "grpc_request_count",
+    "GRPC Request Count",
+    ["app_name", "model_name", "endpoint", "status"],
 )
 
 # TODO (teone) add caller as label for the counter (eg: GUI, TOSCA, SYNCHRONIZER)
 # TODO (teone) add user informations as label for the counter
 
 REQUEST_LATENCY = Histogram(
-    'grpc_request_latency_seconds', 'GRPC Request latency',
-    ['app_name', 'model_name', 'endpoint']
+    "grpc_request_latency_seconds",
+    "GRPC Request latency",
+    ["app_name", "model_name", "endpoint"],
 )
 
+
 def track_request_time(model, method):
     """
     This decorator register the request time of a request
     """
 
     def decorator(function):
-
         def wrapper(*args, **kwargs):
 
             start_time = time.time()
             res = function(*args, **kwargs)
             resp_time = time.time() - start_time
-            REQUEST_LATENCY.labels('xos-core', model, method).observe(resp_time)
+            REQUEST_LATENCY.labels("xos-core", model, method).observe(resp_time)
             return res
 
         return wrapper
 
-    return decorator
\ No newline at end of file
+    return decorator
diff --git a/xos/coreapi/app_list_builder.py b/xos/coreapi/app_list_builder.py
index afa2ebf..4aad92e 100644
--- a/xos/coreapi/app_list_builder.py
+++ b/xos/coreapi/app_list_builder.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,10 +14,12 @@
 
 import os
 
+
 def makedirs_if_noexist(pathname):
     if not os.path.exists(pathname):
         os.makedirs(pathname)
 
+
 class AppListBuilder(object):
     def __init__(self):
         self.app_metadata_dir = "/opt/xos/xos"
@@ -42,7 +43,10 @@
         # Generate the app list
         app_list_fn = os.path.join(self.app_metadata_dir, "xosbuilder_app_list")
         makedirs_if_noexist(os.path.dirname(app_list_fn))
-        file(app_list_fn, "w").write("\n".join(["services.%s" % x for x in app_names]) + "\n")
+        file(app_list_fn, "w").write(
+            "\n".join(["services.%s" % x for x in app_names]) + "\n"
+        )
+
 
 if __name__ == "__main__":
     AppListBuilder().generate_app_lists()
diff --git a/xos/coreapi/core_main.py b/xos/coreapi/core_main.py
index 948ebd1..5392534 100644
--- a/xos/coreapi/core_main.py
+++ b/xos/coreapi/core_main.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,29 +12,39 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
 import argparse
-import os
-import sys
-import time
+import prometheus_client
 
+# FIXME: should grpc_server initialize the Config?
 from grpc_server import XOSGrpcServer
 
 from xosconfig import Config
+from xoskafka import XOSKafkaProducer
 from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
 # create an single kafka producer connection for the core
-from xoskafka import XOSKafkaProducer
+
 XOSKafkaProducer.init()
 
-import prometheus_client
 
 def parse_args():
     parser = argparse.ArgumentParser()
-    parser.add_argument("--model_status", dest="model_status", type=int, default=0, help="status of model prep")
-    parser.add_argument("--model_output", dest="model_output", type=file, default=None, help="file containing output of model prep step")
+    parser.add_argument(
+        "--model_status",
+        dest="model_status",
+        type=int,
+        default=0,
+        help="status of model prep",
+    )
+    parser.add_argument(
+        "--model_output",
+        dest="model_output",
+        type=file,
+        default=None,
+        help="file containing output of model prep step",
+    )
     args = parser.parse_args()
 
     if args.model_output:
@@ -45,26 +54,30 @@
 
     return args
 
+
 def init_reaper():
     reaper = None
     try:
         from reaper import ReaperThread
+
         reaper = ReaperThread()
         reaper.start()
-    except:
+    except BaseException:
         log.exception("Failed to initialize reaper")
 
     return reaper
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     args = parse_args()
 
     # start the prometheus server
     # TODO (teone) consider moving this in a separate process so that it won't die when we load services
     prometheus_client.start_http_server(8000)
 
-    server = XOSGrpcServer(model_status = args.model_status,
-                           model_output = args.model_output)
+    server = XOSGrpcServer(
+        model_status=args.model_status, model_output=args.model_output
+    )
     server.start()
 
     if server.django_initialized:
diff --git a/xos/coreapi/dynamicbuild.py b/xos/coreapi/dynamicbuild.py
index ddf14d3..1c8e4f6 100644
--- a/xos/coreapi/dynamicbuild.py
+++ b/xos/coreapi/dynamicbuild.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -22,9 +21,11 @@
 
 from xosconfig import Config
 from multistructlog import create_logger
-log = create_logger(Config().get('logging'))
 
-DEFAULT_BASE_DIR="/opt/xos"
+log = create_logger(Config().get("logging"))
+
+DEFAULT_BASE_DIR = "/opt/xos"
+
 
 class DynamicBuilder(object):
     NOTHING_TO_DO = 0
@@ -37,7 +38,9 @@
         self.coreapi_dir = os.path.join(base_dir, "coreapi")
         self.protos_dir = os.path.join(base_dir, "coreapi/protos")
         self.app_metadata_dir = os.path.join(base_dir, "xos")
-        self.convenience_methods_dir = os.path.join(base_dir, "xos_client/xosapi/convenience")
+        self.convenience_methods_dir = os.path.join(
+            base_dir, "xos_client/xosapi/convenience"
+        )
 
     def pre_validate_file(self, item):
         # someone might be trying to trick us into writing files outside the designated directory
@@ -49,7 +52,7 @@
         try:
             os.write(handle, item.contents)
             os.close(handle)
-            if (os.system("python -m py_compile %s" % fn) != 0):
+            if os.system("python -m py_compile %s" % fn) != 0:
                 raise Exception("python file %s failed compile test" % item.filename)
         finally:
             os.remove(fn)
@@ -73,14 +76,14 @@
         if not os.path.exists(self.manifest_dir):
             return []
 
-        manifests=[]
+        manifests = []
         for fn in os.listdir(self.manifest_dir):
             if fn.endswith(".json"):
                 manifest_fn = os.path.join(self.manifest_dir, fn)
                 try:
                     manifest = json.loads(open(manifest_fn).read())
                     manifests.append(manifest)
-                except:
+                except BaseException:
                     log.exception("Error loading manifest", filename=manifest_fn)
         return manifests
 
@@ -89,7 +92,7 @@
         if os.path.exists(manifest_fn):
             try:
                 manifest = json.loads(open(manifest_fn).read())
-            except:
+            except BaseException:
                 log.exception("Error loading old manifest", filename=manifest_fn)
                 manifest = {}
         else:
@@ -106,7 +109,10 @@
         if hash == manifest.get("hash"):
             # The hash of the incoming request is identical to the manifest that we have saved, so this request is a
             # no-op.
-            log.info("Models are already up-to-date; skipping dynamic load.", name=request.name)
+            log.info(
+                "Models are already up-to-date; skipping dynamic load.",
+                name=request.name,
+            )
             return self.NOTHING_TO_DO
 
         self.pre_validate_models(request)
@@ -131,7 +137,10 @@
         if hash == manifest.get("hash"):
             # The hash of the incoming request is identical to the manifest that we have saved, so this request is a
             # no-op.
-            log.info("Models are already up-to-date; skipping dynamic unload.", name=request.name)
+            log.info(
+                "Models are already up-to-date; skipping dynamic unload.",
+                name=request.name,
+            )
             return self.NOTHING_TO_DO
 
         manifest = self.save_models(request, state="unload", hash=hash)
@@ -150,7 +159,7 @@
         m = hashlib.sha1()
         m.update(request.name)
         m.update(request.version)
-        if (state == "load"):
+        if state == "load":
             for item in request.xprotos:
                 m.update(item.filename)
                 m.update(item.contents)
@@ -181,19 +190,21 @@
             os.remove(manifest_fn)
 
         # convert the request to a manifest, so we can save it
-        service_manifest = {"name": request.name,
-                            "version": request.version,
-                            "hash": hash,
-                            "state": state,
-                            "dir": service_dir,
-                            "manifest_fn": manifest_fn,
-                            "dest_dir": os.path.join(self.services_dest_dir, request.name),
-                            "xprotos": [],
-                            "decls": [],
-                            "attics": [],
-                            "convenience_methods": []}
+        service_manifest = {
+            "name": request.name,
+            "version": request.version,
+            "hash": hash,
+            "state": state,
+            "dir": service_dir,
+            "manifest_fn": manifest_fn,
+            "dest_dir": os.path.join(self.services_dest_dir, request.name),
+            "xprotos": [],
+            "decls": [],
+            "attics": [],
+            "convenience_methods": [],
+        }
 
-        if (state == "load"):
+        if state == "load":
             for item in request.xprotos:
                 file(os.path.join(service_dir, item.filename), "w").write(item.contents)
                 service_manifest["xprotos"].append({"filename": item.filename})
@@ -208,16 +219,17 @@
                 if not os.path.exists(attic_dir):
                     os.makedirs(attic_dir)
                 for item in request.attics:
-                    file(os.path.join(attic_dir, item.filename), "w").write(item.contents)
+                    file(os.path.join(attic_dir, item.filename), "w").write(
+                        item.contents
+                    )
                     service_manifest["attics"].append({"filename": item.filename})
 
             for item in request.convenience_methods:
                 save_path = os.path.join(self.convenience_methods_dir, item.filename)
                 file(save_path, "w").write(item.contents)
-                service_manifest["convenience_methods"].append({
-                    "filename": item.filename,
-                    "path": save_path
-                })
+                service_manifest["convenience_methods"].append(
+                    {"filename": item.filename, "path": save_path}
+                )
 
         return service_manifest
 
@@ -225,41 +237,48 @@
         if not os.path.exists(manifest["dest_dir"]):
             os.makedirs(manifest["dest_dir"])
 
-        xproto_filenames = [os.path.join(manifest["dir"], x["filename"]) for x in manifest["xprotos"]]
+        xproto_filenames = [
+            os.path.join(manifest["dir"], x["filename"]) for x in manifest["xprotos"]
+        ]
 
         # Generate models
-        is_service = manifest["name"] != 'core'
+        is_service = manifest["name"] != "core"
 
-        args = XOSProcessorArgs(output = manifest["dest_dir"],
-                                attic = os.path.join(manifest["dir"], 'attic'),
-                                files = xproto_filenames)
+        args = XOSProcessorArgs(
+            output=manifest["dest_dir"],
+            attic=os.path.join(manifest["dir"], "attic"),
+            files=xproto_filenames,
+        )
 
         if is_service:
-            args.target = 'service.xtarget'
-            args.write_to_file = 'target'
+            args.target = "service.xtarget"
+            args.write_to_file = "target"
         else:
-            args.target = 'django.xtarget'
-            args.dest_extension = 'py'
-            args.write_to_file = 'model'
+            args.target = "django.xtarget"
+            args.dest_extension = "py"
+            args.write_to_file = "model"
 
         XOSProcessor.process(args)
 
         # Generate security checks
-        security_args = XOSProcessorArgs(output = manifest["dest_dir"],
-                                         target = 'django-security.xtarget',
-                                         dest_file = 'security.py',
-                                         write_to_file = 'single',
-                                         files = xproto_filenames)
+        security_args = XOSProcessorArgs(
+            output=manifest["dest_dir"],
+            target="django-security.xtarget",
+            dest_file="security.py",
+            write_to_file="single",
+            files=xproto_filenames,
+        )
 
         XOSProcessor.process(security_args)
 
         # Generate __init__.py
         if manifest["name"] == "core":
+
             class InitArgs:
                 output = manifest["dest_dir"]
-                target = 'init.xtarget'
-                dest_file = '__init__.py'
-                write_to_file = 'single'
+                target = "init.xtarget"
+                dest_file = "__init__.py"
+                write_to_file = "single"
                 files = xproto_filenames
 
             XOSProcessor.process(InitArgs())
diff --git a/xos/coreapi/grpc_client.py b/xos/coreapi/grpc_client.py
index 874b217..ca36cf0 100644
--- a/xos/coreapi/grpc_client.py
+++ b/xos/coreapi/grpc_client.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,45 +12,56 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
 import base64
 import grpc
 import orm
-from protos.common_pb2 import *
-from protos.xos_pb2 import *
-from protos.utility_pb2 import *
 from protos import xos_pb2_grpc, modeldefs_pb2_grpc, utility_pb2_grpc
 from google.protobuf.empty_pb2 import Empty
-from grpc import metadata_call_credentials, ChannelCredentials, composite_channel_credentials, ssl_channel_credentials
+from grpc import (
+    metadata_call_credentials,
+    composite_channel_credentials,
+    ssl_channel_credentials,
+)
 
-SERVER_CA="/usr/local/share/ca-certificates/local_certs.crt"
+SERVER_CA = "/usr/local/share/ca-certificates/local_certs.crt"
+
 
 class UsernamePasswordCallCredentials(grpc.AuthMetadataPlugin):
-  """Metadata wrapper for raw access token credentials."""
-  def __init__(self, username, password):
+    """Metadata wrapper for raw access token credentials."""
+
+    def __init__(self, username, password):
         self._username = username
         self._password = password
-  def __call__(self, context, callback):
-        basic_auth = "Basic %s" % base64.b64encode("%s:%s" % (self._username, self._password))
-        metadata = (('Authorization', basic_auth),)
+
+    def __call__(self, context, callback):
+        basic_auth = "Basic %s" % base64.b64encode(
+            "%s:%s" % (self._username, self._password)
+        )
+        metadata = (("Authorization", basic_auth),)
         callback(metadata, None)
 
+
 class SessionIdCallCredentials(grpc.AuthMetadataPlugin):
-  """Metadata wrapper for raw access token credentials."""
-  def __init__(self, sessionid):
+    """Metadata wrapper for raw access token credentials."""
+
+    def __init__(self, sessionid):
         self._sessionid = sessionid
-  def __call__(self, context, callback):
-        metadata = (('x-xossession', self._sessionid),)
+
+    def __call__(self, context, callback):
+        metadata = (("x-xossession", self._sessionid),)
         callback(metadata, None)
 
+
 class XOSClient(object):
     def __init__(self, hostname, port):
         self.hostname = hostname
         self.port = port
 
+
 class InsecureClient(XOSClient):
     def __init__(self, hostname, port=50055):
-        super(InsecureClient,self).__init__(hostname, port)
+        super(InsecureClient, self).__init__(hostname, port)
         self.channel = grpc.insecure_channel("%s:%d" % (self.hostname, self.port))
         self.stub = xos_pb2_grpc.xosStub(self.channel)
         self.modeldefs = modeldefs_pb2_grpc.modeldefsStub(self.channel)
@@ -59,32 +69,48 @@
 
         self.xos_orm = orm.ORMStub(self.stub, "xos")
 
-class SecureClient(XOSClient):
-    def __init__(self, hostname, port=50051, cacert=SERVER_CA, username=None, password=None, sessionid=None):
-        super(SecureClient,self).__init__(hostname, port)
 
-        server_ca = open(cacert,"r").read()
-        if (sessionid):
+class SecureClient(XOSClient):
+    def __init__(
+        self,
+        hostname,
+        port=50051,
+        cacert=SERVER_CA,
+        username=None,
+        password=None,
+        sessionid=None,
+    ):
+        super(SecureClient, self).__init__(hostname, port)
+
+        server_ca = open(cacert, "r").read()
+        if sessionid:
             call_creds = metadata_call_credentials(SessionIdCallCredentials(sessionid))
         else:
-            call_creds = metadata_call_credentials(UsernamePasswordCallCredentials(username, password))
+            call_creds = metadata_call_credentials(
+                UsernamePasswordCallCredentials(username, password)
+            )
         chan_creds = ssl_channel_credentials(server_ca)
         chan_creds = composite_channel_credentials(chan_creds, call_creds)
 
-        self.channel = grpc.secure_channel("%s:%d" % (self.hostname, self.port), chan_creds)
+        self.channel = grpc.secure_channel(
+            "%s:%d" % (self.hostname, self.port), chan_creds
+        )
         self.stub = xos_pb2_grpc.xosStub(self.channel)
         self.modeldefs = modeldefs_pb2_grpc.modeldefsStub(self.channel)
         self.utility = utility_pb2_grpc.utilityStub(self.channel)
 
         self.xos_orm = orm.ORMStub(self.stub, "xos")
 
+
 def main():  # self-test
     client = InsecureClient("xos-core.cord.lab")
-    print client.stub.ListUser(Empty())
+    print(client.stub.ListUser(Empty()))
 
-    client = SecureClient("xos-core.cord.lab", username="padmin@vicci.org", password="letmein")
-    print client.stub.ListUser(Empty())
+    client = SecureClient(
+        "xos-core.cord.lab", username="padmin@vicci.org", password="letmein"
+    )
+    print(client.stub.ListUser(Empty()))
 
-if __name__=="__main__":
+
+if __name__ == "__main__":
     main()
-
diff --git a/xos/coreapi/grpc_server.py b/xos/coreapi/grpc_server.py
index d4c73fa..340f8c3 100644
--- a/xos/coreapi/grpc_server.py
+++ b/xos/coreapi/grpc_server.py
@@ -16,40 +16,37 @@
 import os
 import sys
 import threading
-import uuid
 from collections import OrderedDict
-from os.path import abspath, basename, dirname, join, walk
+from os.path import abspath, basename, dirname, join
 import grpc
 from concurrent import futures
 import zlib
 
-xos_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/..')
-sys.path.append(xos_path)
-
+# initialize config and logger
 from xosconfig import Config
-
 from multistructlog import create_logger
 
+xos_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/..")
+sys.path.append(xos_path)
+
 Config.init()
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
-from protos import schema_pb2, dynamicload_pb2, schema_pb2_grpc, dynamicload_pb2_grpc
 from xos_dynamicload_api import DynamicLoadService
-from dynamicbuild import DynamicBuilder
-from google.protobuf.empty_pb2 import Empty
+from protos import schema_pb2, schema_pb2_grpc, dynamicload_pb2_grpc
 
-SERVER_KEY="/opt/cord_profile/core_api_key.pem"
-SERVER_CERT="/opt/cord_profile/core_api_cert.pem"
-SERVER_CA="/usr/local/share/ca-certificates/local_certs.crt"
+SERVER_KEY = "/opt/cord_profile/core_api_key.pem"
+SERVER_CERT = "/opt/cord_profile/core_api_cert.pem"
+SERVER_CA = "/usr/local/share/ca-certificates/local_certs.crt"
+
 
 class SchemaService(schema_pb2_grpc.SchemaServiceServicer):
-
     def __init__(self, thread_pool):
         self.thread_pool = thread_pool
         protos = self._load_schema()
-        self.schemas = schema_pb2.Schemas(protos=protos,
-                                          swagger_from='xos.proto',
-                                          yang_from='xos.proto')
+        self.schemas = schema_pb2.Schemas(
+            protos=protos, swagger_from="xos.proto", yang_from="xos.proto"
+        )
 
     def stop(self):
         pass
@@ -58,33 +55,32 @@
         """Pre-load schema file so that we can serve it up (file sizes
            are small enough to do so
         """
-        proto_dir = abspath(join(dirname(__file__), './protos'))
+        proto_dir = abspath(join(dirname(__file__), "./protos"))
 
         def find_files(dir, suffix):
-            proto_blacklist = ['schema.proto']
+            proto_blacklist = ["schema.proto"]
 
             proto_files = [
-                join(dir, fname) for fname in os.listdir(dir)
+                join(dir, fname)
+                for fname in os.listdir(dir)
                 if fname.endswith(suffix) and fname not in proto_blacklist
             ]
             return proto_files
 
         proto_map = OrderedDict()  # to have deterministic data
-        for proto_file in find_files(proto_dir, '.proto'):
-            with open(proto_file, 'r') as f:
+        for proto_file in find_files(proto_dir, ".proto"):
+            with open(proto_file, "r") as f:
                 proto_content = f.read()
             fname = basename(proto_file)
             # assure no two files have the same basename
             assert fname not in proto_map
 
-            desc_file = proto_file.replace('.proto', '.desc')
-            with open(desc_file, 'r') as f:
+            desc_file = proto_file.replace(".proto", ".desc")
+            with open(desc_file, "r") as f:
                 descriptor_content = zlib.compress(f.read())
 
             proto_map[fname] = schema_pb2.ProtoFile(
-                file_name=fname,
-                proto=proto_content,
-                descriptor=descriptor_content
+                file_name=fname, proto=proto_content, descriptor=descriptor_content
             )
 
         return proto_map.values()
@@ -93,23 +89,25 @@
         """Return current schema files and descriptor"""
         return self.schemas
 
-class XOSGrpcServer(object):
 
+class XOSGrpcServer(object):
     def __init__(self, port=50055, model_status=0, model_output=""):
         self.port = port
         self.model_status = model_status
         self.model_output = model_output
-        log.info('Initializing GRPC Server', port = port)
+        log.info("Initializing GRPC Server", port=port)
         self.thread_pool = futures.ThreadPoolExecutor(max_workers=1)
         self.server = grpc.server(self.thread_pool)
         self.django_initialized = False
         self.django_apps = []
 
-        server_key = open(SERVER_KEY,"r").read()
-        server_cert = open(SERVER_CERT,"r").read()
-        server_ca = open(SERVER_CA,"r").read()
+        server_key = open(SERVER_KEY, "r").read()
+        server_cert = open(SERVER_CERT, "r").read()
+        server_ca = open(SERVER_CA, "r").read()
 
-        self.credentials = grpc.ssl_server_credentials([(server_key, server_cert)], server_ca, False)
+        self.credentials = grpc.ssl_server_credentials(
+            [(server_key, server_cert)], server_ca, False
+        )
 
         self.delayed_shutdown_timer = None
         self.exit_event = threading.Event()
@@ -119,70 +117,86 @@
     def init_django(self):
         try:
             import django
+
             os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
             django.setup()
             from django.apps import apps
+
             self.django_apps = apps
             self.django_initialized = True
-        except:
+        except BaseException:
             log.exception("Failed to initialize django")
 
     def register_core(self):
         from xos_grpc_api import XosService
-        from protos import xos_pb2, xos_pb2_grpc
+        from protos import xos_pb2_grpc
 
-        self.register("xos", xos_pb2_grpc.add_xosServicer_to_server, XosService(self.thread_pool))
+        self.register(
+            "xos", xos_pb2_grpc.add_xosServicer_to_server, XosService(self.thread_pool)
+        )
 
     def register_utility(self):
         from xos_utility_api import UtilityService
-        from protos import utility_pb2, utility_pb2_grpc
+        from protos import utility_pb2_grpc
 
-        self.register("utility", utility_pb2_grpc.add_utilityServicer_to_server, UtilityService(self.thread_pool))
+        self.register(
+            "utility",
+            utility_pb2_grpc.add_utilityServicer_to_server,
+            UtilityService(self.thread_pool),
+        )
 
     def register_modeldefs(self):
         from xos_modeldefs_api import ModelDefsService
         from protos import modeldefs_pb2_grpc
 
-        self.register("modeldefs", modeldefs_pb2_grpc.add_modeldefsServicer_to_server, ModelDefsService(self.thread_pool))
+        self.register(
+            "modeldefs",
+            modeldefs_pb2_grpc.add_modeldefsServicer_to_server,
+            ModelDefsService(self.thread_pool),
+        )
 
     def start(self):
-        log.info('Starting GRPC Server')
+        log.info("Starting GRPC Server")
 
-        self.register("schema",
-                      schema_pb2_grpc.add_SchemaServiceServicer_to_server,
-                      SchemaService(self.thread_pool))
+        self.register(
+            "schema",
+            schema_pb2_grpc.add_SchemaServiceServicer_to_server,
+            SchemaService(self.thread_pool),
+        )
 
         dynamic_load_service = DynamicLoadService(self.thread_pool, self)
-        self.register("dynamicload",
-                      dynamicload_pb2_grpc.add_dynamicloadServicer_to_server,
-                      dynamic_load_service)
+        self.register(
+            "dynamicload",
+            dynamicload_pb2_grpc.add_dynamicloadServicer_to_server,
+            dynamic_load_service,
+        )
 
-        if (self.model_status == 0):
+        if self.model_status == 0:
             self.init_django()
 
-        if (self.django_initialized):
+        if self.django_initialized:
             dynamic_load_service.set_django_apps(self.django_apps)
             self.register_core()
             self.register_utility()
             self.register_modeldefs()
 
         # open port
-        self.server.add_insecure_port('[::]:%s' % self.port)
+        self.server.add_insecure_port("[::]:%s" % self.port)
 
         self.server.add_secure_port("[::]:50051", self.credentials)
 
         # strat the server
         self.server.start()
 
-        log.info('GRPC Server Started')
+        log.info("GRPC Server Started")
         return self
 
     def stop(self, grace=0):
-        log.info('Stopping GRPC Server')
+        log.info("Stopping GRPC Server")
         for service in self.services:
             service.stop()
         self.server.stop(grace)
-        log.info('stopped')
+        log.info("stopped")
 
     def stop_and_exit(self):
         log.info("Stop and Exit")
diff --git a/xos/coreapi/orm.py b/xos/coreapi/orm.py
index a59e736..5fae722 100644
--- a/xos/coreapi/orm.py
+++ b/xos/coreapi/orm.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,6 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.empty_pb2 import Empty
 
 """
 Django-like ORM layer for gRPC
@@ -32,15 +33,13 @@
 """
 import grpc_client, orm
 c=grpc_client.SecureClient("xos-core.cord.lab", username="padmin@vicci.org", password="letmein")
-u=c.xos_orm.User.objects.get(id=1)

-"""

+u=c.xos_orm.User.objects.get(id=1)
+"""
 
-import functools
-from google.protobuf.empty_pb2 import Empty
 
-from google.protobuf import symbol_database as _symbol_database
 _sym_db = _symbol_database.Default()
 
+
 class ORMWrapper(object):
     """ Wraps a protobuf object to provide ORM features """
 
@@ -52,20 +51,25 @@
         super(ORMWrapper, self).__setattr__("dependent", None)
         super(ORMWrapper, self).__setattr__("reverse_cache", {})
         super(ORMWrapper, self).__setattr__("is_new", is_new)
-        fkmap=self.gen_fkmap()
+        fkmap = self.gen_fkmap()
         super(ORMWrapper, self).__setattr__("_fkmap", fkmap)
-        reverse_fkmap=self.gen_reverse_fkmap()
+        reverse_fkmap = self.gen_reverse_fkmap()
         super(ORMWrapper, self).__setattr__("_reverse_fkmap", reverse_fkmap)
 
     def gen_fkmap(self):
         fkmap = {}
 
         for (name, field) in self._wrapped_class.DESCRIPTOR.fields_by_name.items():
-           if name.endswith("_id"):
-               foreignKey = field.GetOptions().Extensions._FindExtensionByName("xos.foreignKey")
-               fk = field.GetOptions().Extensions[foreignKey]
-               if fk:
-                   fkmap[name[:-3]] = {"src_fieldName": name, "modelName": fk.modelName}
+            if name.endswith("_id"):
+                foreignKey = field.GetOptions().Extensions._FindExtensionByName(
+                    "xos.foreignKey"
+                )
+                fk = field.GetOptions().Extensions[foreignKey]
+                if fk:
+                    fkmap[name[:-3]] = {
+                        "src_fieldName": name,
+                        "modelName": fk.modelName,
+                    }
 
         return fkmap
 
@@ -73,11 +77,16 @@
         reverse_fkmap = {}
 
         for (name, field) in self._wrapped_class.DESCRIPTOR.fields_by_name.items():
-           if name.endswith("_ids"):
-               reverseForeignKey = field.GetOptions().Extensions._FindExtensionByName("xos.reverseForeignKey")
-               fk = field.GetOptions().Extensions[reverseForeignKey]
-               if fk:
-                   reverse_fkmap[name[:-4]] = {"src_fieldName": name, "modelName": fk.modelName}
+            if name.endswith("_ids"):
+                reverseForeignKey = field.GetOptions().Extensions._FindExtensionByName(
+                    "xos.reverseForeignKey"
+                )
+                fk = field.GetOptions().Extensions[reverseForeignKey]
+                if fk:
+                    reverse_fkmap[name[:-4]] = {
+                        "src_fieldName": name,
+                        "modelName": fk.modelName,
+                    }
 
         return reverse_fkmap
 
@@ -86,7 +95,7 @@
             return ORMWrapper(self.cache[name], self.stub)
 
         fk_entry = self._fkmap[name]
-        id=self.stub.make_ID(id=getattr(self, fk_entry["src_fieldName"]))
+        id = self.stub.make_ID(id=getattr(self, fk_entry["src_fieldName"]))
         dest_model = self.stub.invoke("Get%s" % fk_entry["modelName"], id)
 
         self.cache[name] = dest_model
@@ -96,7 +105,11 @@
     def reverse_fk_resolve(self, name):
         if name not in self.reverse_cache:
             fk_entry = self._reverse_fkmap[name]
-            self.cache[name] = ORMLocalObjectManager(self.stub, fk_entry["modelName"], getattr(self, fk_entry["src_fieldName"]))
+            self.cache[name] = ORMLocalObjectManager(
+                self.stub,
+                fk_entry["modelName"],
+                getattr(self, fk_entry["src_fieldName"]),
+            )
 
         return self.cache[name]
 
@@ -114,7 +127,7 @@
 
     def __setattr__(self, name, value):
         if name in self.__dict__:
-            super(ORMWrapper,self).__setattr__(name, value)
+            super(ORMWrapper, self).__setattr__(name, value)
         else:
             setattr(self._wrapped_class, name, value)
 
@@ -123,16 +136,21 @@
 
     def save(self):
         if self.is_new:
-           new_class = self.stub.invoke("Create%s" % self._wrapped_class.__class__.__name__, self._wrapped_class)
-           self._wrapped_class = new_class
-           self.is_new = False
+            new_class = self.stub.invoke(
+                "Create%s" % self._wrapped_class.__class__.__name__, self._wrapped_class
+            )
+            self._wrapped_class = new_class
+            self.is_new = False
         else:
-           self.stub.invoke("Update%s" % self._wrapped_class.__class__.__name__, self._wrapped_class)
+            self.stub.invoke(
+                "Update%s" % self._wrapped_class.__class__.__name__, self._wrapped_class
+            )
 
     def delete(self):
         id = self.stub.make_ID(id=self._wrapped_class.id)
         self.stub.invoke("Delete%s" % self._wrapped_class.__class__.__name__, id)
 
+
 class ORMLocalObjectManager(object):
     """ Manages a local list of objects """
 
@@ -148,7 +166,9 @@
 
         models = []
         for id in self._idList:
-            models.append(self._stub.invoke("Get%s" % self._modelName, self._stub.make_ID(id=id)))
+            models.append(
+                self._stub.invoke("Get%s" % self._modelName, self._stub.make_ID(id=id))
+            )
 
         self._cache = models
 
@@ -156,7 +176,8 @@
 
     def all(self):
         models = self.resolve_queryset()
-        return [ORMWrapper(x,self._stub) for x in models]
+        return [ORMWrapper(x, self._stub) for x in models]
+
 
 class ORMObjectManager(object):
     """ Manages a remote list of objects """
@@ -170,7 +191,7 @@
         return ORMWrapper(obj, self._stub)
 
     def wrap_list(self, obj):
-        result=[]
+        result = []
         for item in obj.items:
             result.append(ORMWrapper(item, self._stub))
         return result
@@ -179,25 +200,29 @@
         return self.wrap_list(self._stub.invoke("List%s" % self._modelName, Empty()))
 
     def get(self, id):
-        return self.wrap_single(self._stub.invoke("Get%s" % self._modelName, self._stub.make_ID(id=id)))
+        return self.wrap_single(
+            self._stub.invoke("Get%s" % self._modelName, self._stub.make_ID(id=id))
+        )
 
     def new(self, **kwargs):
         full_model_name = "%s.%s" % (self._packageName, self._modelName)
         cls = _sym_db._classes[full_model_name]
         return ORMWrapper(cls(), self._stub, is_new=True)
 
+
 class ORMModelClass(object):
     def __init__(self, stub, model_name, package_name):
         self.objects = ORMObjectManager(stub, model_name, package_name)
 
+
 class ORMStub(object):
     def __init__(self, stub, package_name):
         self.grpc_stub = stub
 
         for name in dir(stub):
-           if name.startswith("Get"):
-               model_name = name[3:]
-               setattr(self,model_name, ORMModelClass(self, model_name, package_name))
+            if name.startswith("Get"):
+                model_name = name[3:]
+                setattr(self, model_name, ORMModelClass(self, model_name, package_name))
 
     def invoke(self, name, request):
         method = getattr(self.grpc_stub, name)
@@ -207,14 +232,13 @@
         return _sym_db._classes["xos.ID"](id=id)
 
 
-#def wrap_get(*args, **kwargs):
+# def wrap_get(*args, **kwargs):
 #    stub=kwargs.pop("stub")
 #    getmethod=kwargs.pop("getmethod")
 #    result = getmethod(*args, **kwargs)
 #    return ORMWrapper(result)
 #
-#def wrap_stub(stub):
+# def wrap_stub(stub):
 #    for name in dir(stub):
 #        if name.startswith("Get"):
 #            setattr(stub, name, functools.partial(wrap_get, stub=stub, getmethod=getattr(stub,name)))
-
diff --git a/xos/coreapi/protos/__init__.py b/xos/coreapi/protos/__init__.py
index 65fdf5b..b0fb0b2 100644
--- a/xos/coreapi/protos/__init__.py
+++ b/xos/coreapi/protos/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,6 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
-
diff --git a/xos/coreapi/reaper.py b/xos/coreapi/reaper.py
index 0dd2d15..6080e1a 100644
--- a/xos/coreapi/reaper.py
+++ b/xos/coreapi/reaper.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
 """ Reaper
 
     The reaper implements permanent deletion of soft-deleted objects.
@@ -26,33 +24,23 @@
 
 import os
 import sys
+import traceback
 import threading
+import time
+
+import django
+from django.db import reset_queries, router
 
 if __name__ == "__main__":
-    import django
-    sys.path.append('/opt/xos')
+    sys.path.append("/opt/xos")
     os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
 
-from datetime import datetime
-import django
-from django.db import reset_queries
-from django.db.models import F, Q
-from django.db.models.signals import post_save
-from django.db.transaction import atomic
-from django.dispatch import receiver
-from django.utils import timezone
-from django.db import models as django_models
-from core.models.xosbase import XOSCollector
-from django.db import router
-
-import pdb
-import time
-import traceback
-
 from xosconfig import Config
 from multistructlog import create_logger
+log = create_logger(Config().get("logging"))
 
-log = create_logger(Config().get('logging'))
+from core.models.xosbase import XOSCollector
+
 
 class ReaperThread(threading.Thread):
     daemon = True
@@ -65,19 +53,20 @@
     def check_db_connection_okay(self):
         # django implodes if the database connection is closed by docker-compose
         from django import db
+
         try:
             db.connection.cursor()
-        except Exception, e:
+        except Exception as e:
             if "connection already closed" in traceback.format_exc():
-               log.exception("XXX connection already closed", e = e)
-               try:
-    #               if db.connection:
-    #                   db.connection.close()
-                   db.close_old_connections()
-               except Exception,e:
-                    log.exception("XXX we failed to fix the failure", e = e)
+                log.exception("XXX connection already closed", e=e)
+                try:
+                    #               if db.connection:
+                    #                   db.connection.close()
+                    db.close_old_connections()
+                except Exception as e:
+                    log.exception("XXX we failed to fix the failure", e=e)
             else:
-               log.exception("XXX some other error", e = e)
+                log.exception("XXX some other error", e=e)
 
     def journal_object(self, o, operation, msg=None, timestamp=None):
         # not implemented at this time
@@ -89,106 +78,114 @@
         """
         collector = XOSCollector(using=router.db_for_write(m.__class__, instance=m))
         collector.collect([m])
-        deps=[]
+        deps = []
         for (k, models) in collector.data.items():
             for model in models:
-                if model==m:
+                if model == m:
                     # collector will return ourself; ignore it.
                     continue
                 if issubclass(m.__class__, model.__class__):
                     # collector will return our parent classes; ignore them.
                     continue
-    # We don't actually need this check, as with multiple passes the reaper can
-    # clean up a hierarchy of objects.
-    #            if getattr(model, "backend_need_reap", False):
-    #                # model is already marked for reaping; ignore it.
-    #                continue
+                # We don't actually need this check, as with multiple passes the reaper can
+                # clean up a hierarchy of objects.
+                #            if getattr(model, "backend_need_reap", False):
+                #                # model is already marked for reaping; ignore it.
+                #                continue
                 deps.append(model)
         return deps
 
     def run_reaper_once(self):
-            objects = []
-            deleted_objects = []
+        # logger.debug("REAPER: run_reaper_once()")
 
-            # logger.debug("REAPER: run_reaper_once()")
+        self.check_db_connection_okay()
 
-            self.check_db_connection_okay()
+        # Reap non-sync'd models here
+        # models_to_reap = [Slice,Network,NetworkSlice]
 
-            # Reap non-sync'd models here
-            # models_to_reap = [Slice,Network,NetworkSlice]
+        models_to_reap = django.apps.apps.get_models(include_auto_created=False)
+        for m in models_to_reap:
+            if not hasattr(m, "deleted_objects"):
+                continue
 
-            models_to_reap = django.apps.apps.get_models(include_auto_created=False)
-            for m in models_to_reap:
-                if not hasattr(m, "deleted_objects"):
+            dobjs = m.deleted_objects.all()
+            for d in dobjs:
+                if hasattr(d, "_meta") and hasattr(d._meta, "proxy") and d._meta.proxy:
+                    # skip proxy objects; we'll get the base instead
                     continue
 
-                dobjs = m.deleted_objects.all()
-                for d in dobjs:
-                    if hasattr(d,"_meta") and hasattr(d._meta,"proxy") and d._meta.proxy:
-                        # skip proxy objects; we'll get the base instead
-                        continue
+                if (not getattr(d, "backend_need_reap", False)) and getattr(
+                    d, "backend_need_delete", False
+                ):
+                    self.journal_object(d, "reaper.need_delete")
+                    log.info("skipping because it has need_delete set", object=d)
+                    continue
 
-                    if (not getattr(d, "backend_need_reap", False)) and getattr(d, "backend_need_delete", False):
-                        self.journal_object(d, "reaper.need_delete")
-                        log.info("skipping because it has need_delete set", object = d)
-                        continue
+                if (not getattr(d, "backend_need_reap", False)) and getattr(
+                    d, "backend_need_delete_policy", False
+                ):
+                    log.info("skipping because it has need_delete_policy set", object=d)
+                    continue
 
-                    if (not getattr(d, "backend_need_reap", False)) and getattr(d, "backend_need_delete_policy", False):
-                        log.info("skipping because it has need_delete_policy set", object = d)
-                        continue
+                if hasattr(d, "leaf_model"):
+                    d = d.leaf_model
 
-                    if hasattr(d, "leaf_model"):
-                        d = d.leaf_model
+                cascade_set = self.get_cascade_set(d)
+                if cascade_set:
+                    self.journal_object(
+                        d,
+                        "reaper.cascade_set",
+                        msg=",".join([str(m) for m in cascade_set]),
+                    )
+                    log.info(
+                        "REAPER: cannot purge object because its cascade_set is nonempty",
+                        object=d,
+                        cascade_set=",".join([str(m) for m in cascade_set]),
+                    )
+                    continue
 
-                    cascade_set = self.get_cascade_set(d)
-                    if cascade_set:
-                        self.journal_object(d, "reaper.cascade_set", msg=",".join([str(m) for m in cascade_set]))
-                        log.info('REAPER: cannot purge object because its cascade_set is nonempty',object = d, cascade_set = ",".join([str(m) for m in cascade_set]))
-                        continue
+                self.journal_object(d, "reaper.purge")
+                log.info("REAPER: purging object", object=d)
+                try:
+                    d.delete(purge=True)
+                except BaseException:
+                    self.journal_object(d, "reaper.purge.exception")
+                    log.error("REAPER: exception purging object", object=d)
+                    traceback.print_exc()
 
-                    self.journal_object(d, "reaper.purge")
-                    log.info('REAPER: purging object',object = d)
-                    try:
-                        d.delete(purge=True)
-                    except:
-                        self.journal_object(d, "reaper.purge.exception")
-                        log.error('REAPER: exception purging object', object = d)
-                        traceback.print_exc()
+        try:
+            reset_queries()
+        except BaseException:
+            # this shouldn't happen, but in case it does, catch it...
+            log.exception("REAPER: exception in reset_queries")
 
-            try:
-                reset_queries()
-            except:
-                # this shouldn't happen, but in case it does, catch it...
-                log.exception("REAPER: exception in reset_queries")
-
-            # logger.debug("REAPER: finished run_reaper_once()")
+        # logger.debug("REAPER: finished run_reaper_once()")
 
     def run(self):
-        while (not self.terminate_signal):
+        while not self.terminate_signal:
             start = time.time()
             try:
                 self.run_reaper_once()
-            except:
+            except BaseException:
                 log.exception("REAPER: Exception in run loop")
 
-            telap = time.time()-start
-            if telap<self.interval:
+            telap = time.time() - start
+            if telap < self.interval:
                 time.sleep(self.interval - telap)
 
     def stop(self):
         self.terminate_signal = True
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     django.setup()
 
     reaper = ReaperThread()
     reaper.start()
 
-    import time
     _ONE_DAY_IN_SECONDS = 60 * 60 * 24
     try:
-        while 1:
+        while True:
             time.sleep(_ONE_DAY_IN_SECONDS)
     except KeyboardInterrupt:
         reaper.stop()
-
diff --git a/xos/coreapi/test_dynamicbuild.py b/xos/coreapi/test_dynamicbuild.py
index ce65e40..eda5b4c 100644
--- a/xos/coreapi/test_dynamicbuild.py
+++ b/xos/coreapi/test_dynamicbuild.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,37 +15,42 @@
 import json
 import os
 import shutil
-import sys
 import tempfile
 import unittest
 from mock import patch
 
 from xosconfig import Config
 
-class DynamicLoadItem():
+
+class DynamicLoadItem:
     def __init__(self, **kwargs):
-        for (k,v) in kwargs.items():
+        for (k, v) in kwargs.items():
             setattr(self, k, v)
 
-class DynamicLoadRequest():
+
+class DynamicLoadRequest:
     def __init__(self, **kwargs):
         self.xprotos = []
         self.decls = []
         self.attics = []
-        for (k,v) in kwargs.items():
+        for (k, v) in kwargs.items():
             setattr(self, k, v)
 
-class DynamicUnloadRequest():
+
+class DynamicUnloadRequest:
     def __init__(self, **kwargs):
-        for (k,v) in kwargs.items():
+        for (k, v) in kwargs.items():
             setattr(self, k, v)
 
+
 class TestDynamicBuild(unittest.TestCase):
     def setUp(self):
         global dynamicbuild
 
-        config = basic_conf = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/test_config.yaml")
-        Config.clear() # in case left unclean by a previous test case
+        config = os.path.abspath(
+            os.path.dirname(os.path.realpath(__file__)) + "/test_config.yaml"
+        )
+        Config.clear()  # in case left unclean by a previous test case
         Config.init(config)
 
         import dynamicbuild
@@ -57,45 +61,60 @@
 
 message ExampleService (Service){
     option verbose_name = "Example Service";
-    required string service_message = 1 [help_text = "Service Message to Display", max_length = 254, null = False, db_index = False, blank = False];
+    required string service_message = 1 [help_text = "Service Message to Display",
+      max_length = 254, null = False, db_index = False, blank = False];
 }
 
 message Color (XOSBase){
      option verbose_name = "Color";
-     required string name = 1 [help_text = "Name for this color", db_index = False, max_length = 256, null = False, blank = False];
-     required string html_code = 2 [help_text = "Code for this color", db_index = False, max_length = 256, null = False, blank = False];
+     required string name = 1 [help_text = "Name for this color", db_index = False,
+       max_length = 256, null = False, blank = False];
+     required string html_code = 2 [help_text = "Code for this color", db_index = False,
+       max_length = 256, null = False, blank = False];
 }
 
 message ExampleServiceInstance (TenantWithContainer){
      option verbose_name = "Example Service Instance";
-     required string tenant_message = 1 [help_text = "Tenant Message to Display", max_length = 254, null = False, db_index = False, blank = False];
-     optional manytoone foreground_color->Color:serviceinstance_foreground_colors = 3 [db_index = True, null = True, blank = True];
-     optional manytoone background_color->Color:serviceinstance_background_colors = 3 [db_index = True, null = True, blank = True];
+     required string tenant_message = 1 [help_text = "Tenant Message to Display",
+       max_length = 254, null = False, db_index = False, blank = False];
+     optional manytoone foreground_color->Color:serviceinstance_foreground_colors = 3 [db_index = True,
+       null = True, blank = True];
+     optional manytoone background_color->Color:serviceinstance_background_colors = 3 [db_index = True,
+       null = True, blank = True];
 }
 
 message EmbeddedImage (XOSBase){
      option verbose_name = "Embedded Image";
-     required string name = 1 [help_text = "Name for this image", db_index = False, max_length = 256, null = False, blank = False];
-     required string url = 2 [help_text = "URL for this image", db_index = False, max_length = 256, null = False, blank = False];
-     optional manytoone serviceinstance->ExampleServiceInstance:embedded_images = 3 [db_index = True, null = True, blank = True];
+     required string name = 1 [help_text = "Name for this image", db_index = False,
+       max_length = 256, null = False, blank = False];
+     required string url = 2 [help_text = "URL for this image", db_index = False,
+       max_length = 256, null = False, blank = False];
+     optional manytoone serviceinstance->ExampleServiceInstance:embedded_images = 3 [db_index = True,
+       null = True, blank = True];
 }
         """
 
-        self.example_xproto_item = DynamicLoadItem(filename = "exampleservice.xproto",
-                               contents = self.example_xproto)
+        self.example_xproto_item = DynamicLoadItem(
+            filename="exampleservice.xproto", contents=self.example_xproto
+        )
 
-        self.example_request = DynamicLoadRequest(name = "exampleservice",
-                                                  version = "1",
-                                                  xprotos = [self.example_xproto_item],
-                                                  convenience_methods = [])
+        self.example_request = DynamicLoadRequest(
+            name="exampleservice",
+            version="1",
+            xprotos=[self.example_xproto_item],
+            convenience_methods=[],
+        )
 
-        self.example_unload_request = DynamicUnloadRequest(name = "exampleservice",
-                                                  version = "1")
+        self.example_unload_request = DynamicUnloadRequest(
+            name="exampleservice", version="1"
+        )
 
-        self.builder = dynamicbuild.DynamicBuilder(base_dir = self.base_dir)
+        self.builder = dynamicbuild.DynamicBuilder(base_dir=self.base_dir)
 
     def tearDown(self):
-        if os.path.abspath(self.base_dir).startswith("/tmp"):   # be paranoid about recursive deletes
+        if os.path.abspath(self.base_dir).startswith(
+            "/tmp"
+        ):  # be paranoid about recursive deletes
             shutil.rmtree(self.base_dir)
 
     def test_pre_validate_file(self):
@@ -106,12 +125,20 @@
 
     def test_generate_request_hash(self):
         hash = self.builder.generate_request_hash(self.example_request, state="load")
-        self.assertEqual(hash, "162de5012a8399883344085cbc232a2e627c5091")
+        self.assertEqual(hash, "44951ff370c565c796f05f7c3fce67f9a4d4d3f6")
 
     def test_handle_loadmodels_request(self):
-        with patch.object(dynamicbuild.DynamicBuilder, "save_models", wraps=self.builder.save_models) as save_models, \
-             patch.object(dynamicbuild.DynamicBuilder, "run_xosgenx_service", wraps=self.builder.run_xosgenx_service) as run_xosgenx_service, \
-             patch.object(dynamicbuild.DynamicBuilder, "remove_service", wraps=self.builder.remove_service) as remove_service:
+        with patch.object(
+            dynamicbuild.DynamicBuilder, "save_models", wraps=self.builder.save_models
+        ) as save_models, patch.object(
+            dynamicbuild.DynamicBuilder,
+            "run_xosgenx_service",
+            wraps=self.builder.run_xosgenx_service,
+        ) as run_xosgenx_service, patch.object(
+            dynamicbuild.DynamicBuilder,
+            "remove_service",
+            wraps=self.builder.remove_service,
+        ) as remove_service:
             result = self.builder.handle_loadmodels_request(self.example_request)
 
             save_models.assert_called()
@@ -121,7 +148,11 @@
             self.assertEqual(result, self.builder.SOMETHING_CHANGED)
 
             self.assertTrue(os.path.exists(self.builder.manifest_dir))
-            self.assertTrue(os.path.exists(os.path.join(self.builder.manifest_dir, "exampleservice.json")))
+            self.assertTrue(
+                os.path.exists(
+                    os.path.join(self.builder.manifest_dir, "exampleservice.json")
+                )
+            )
 
             service_dir = os.path.join(self.base_dir, "services", "exampleservice")
 
@@ -130,14 +161,28 @@
             self.assertTrue(os.path.exists(os.path.join(service_dir, "models.py")))
             self.assertTrue(os.path.exists(os.path.join(service_dir, "security.py")))
 
-            manifest = json.loads(open(os.path.join(self.builder.manifest_dir, "exampleservice.json"), "r").read())
+            manifest = json.loads(
+                open(
+                    os.path.join(self.builder.manifest_dir, "exampleservice.json"), "r"
+                ).read()
+            )
             self.assertEqual(manifest.get("state"), "load")
 
     def test_handle_unloadmodels_request(self):
-        with patch.object(dynamicbuild.DynamicBuilder, "save_models", wraps=self.builder.save_models) as save_models, \
-             patch.object(dynamicbuild.DynamicBuilder, "run_xosgenx_service", wraps=self.builder.run_xosgenx_service) as run_xosgenx_service, \
-             patch.object(dynamicbuild.DynamicBuilder, "remove_service", wraps=self.builder.remove_service) as remove_service:
-            result = self.builder.handle_unloadmodels_request(self.example_unload_request)
+        with patch.object(
+            dynamicbuild.DynamicBuilder, "save_models", wraps=self.builder.save_models
+        ) as save_models, patch.object(
+            dynamicbuild.DynamicBuilder,
+            "run_xosgenx_service",
+            wraps=self.builder.run_xosgenx_service,
+        ) as run_xosgenx_service, patch.object(
+            dynamicbuild.DynamicBuilder,
+            "remove_service",
+            wraps=self.builder.remove_service,
+        ) as remove_service:
+            result = self.builder.handle_unloadmodels_request(
+                self.example_unload_request
+            )
 
             save_models.assert_called()
             run_xosgenx_service.assert_not_called()
@@ -146,9 +191,17 @@
             self.assertEqual(result, self.builder.SOMETHING_CHANGED)
 
             self.assertTrue(os.path.exists(self.builder.manifest_dir))
-            self.assertTrue(os.path.exists(os.path.join(self.builder.manifest_dir, "exampleservice.json")))
+            self.assertTrue(
+                os.path.exists(
+                    os.path.join(self.builder.manifest_dir, "exampleservice.json")
+                )
+            )
 
-            manifest = json.loads(open(os.path.join(self.builder.manifest_dir, "exampleservice.json"), "r").read())
+            manifest = json.loads(
+                open(
+                    os.path.join(self.builder.manifest_dir, "exampleservice.json"), "r"
+                ).read()
+            )
             self.assertEqual(manifest.get("state"), "unload")
 
     def test_handle_loadmodels_request_twice(self):
@@ -166,13 +219,15 @@
 
         self.assertEqual(manifest["name"], self.example_request.name)
         self.assertEqual(manifest["version"], self.example_request.version)
-        self.assertEqual(manifest["hash"], "162de5012a8399883344085cbc232a2e627c5091")
+        self.assertEqual(manifest["hash"], "44951ff370c565c796f05f7c3fce67f9a4d4d3f6")
         self.assertEqual(manifest["dir"], dynamic_dir)
         self.assertEqual(manifest["dest_dir"], service_dir)
         self.assertEqual(len(manifest["xprotos"]), 1)
 
     def test_save_models_precomputed_hash(self):
-        manifest = self.builder.save_models(self.example_request, state="load", hash="1234")
+        manifest = self.builder.save_models(
+            self.example_request, state="load", hash="1234"
+        )
 
         dynamic_dir = os.path.join(self.base_dir, "dynamic_services", "exampleservice")
         service_dir = os.path.join(self.base_dir, "services", "exampleservice")
@@ -185,37 +240,36 @@
         self.assertEqual(len(manifest["xprotos"]), 1)
 
     def test_pre_validate_python_good(self):
-        good_python = \
-"""
+        good_python = """
 import foo
 
 x=1
 y="abc"
 """
-        python_item = DynamicLoadItem(filename="somefile.py",
-                                      contents=good_python)
+        python_item = DynamicLoadItem(filename="somefile.py", contents=good_python)
 
         self.builder.pre_validate_python(python_item)
 
     def test_pre_validate_python_bad(self):
-        bad_python = \
-"""
+        bad_python = """
 import foo
 
 this is not valid code
 y="abc"
 """
-        python_item = DynamicLoadItem(filename="somefile.py",
-                                      contents=bad_python)
+        python_item = DynamicLoadItem(filename="somefile.py", contents=bad_python)
 
         with self.assertRaises(Exception) as e:
-             self.builder.pre_validate_python(python_item)
+            self.builder.pre_validate_python(python_item)
 
-        self.assertEqual(e.exception.message, "python file somefile.py failed compile test")
+        self.assertEqual(
+            e.exception.message, "python file somefile.py failed compile test"
+        )
 
 
 def main():
     unittest.main()
 
+
 if __name__ == "__main__":
     main()
diff --git a/xos/coreapi/tests/api_user_crud.py b/xos/coreapi/tests/api_user_crud.py
index 215e643..6d45bb7 100644
--- a/xos/coreapi/tests/api_user_crud.py
+++ b/xos/coreapi/tests/api_user_crud.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,42 +12,46 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
+import string
+import random
+from testconfig import USERNAME, PASSWORD
+import grpc_client
 import sys
+
 sys.path.append("..")
 
-import grpc_client
-from testconfig import *
 
-print "api_user_crud"
+print("api_user_crud")
 
-#c=grpc_client.InsecureClient("localhost")
-c=grpc_client.SecureClient("xos-core.cord.lab", username=USERNAME, password=PASSWORD)
-u=grpc_client.User()
-import random, string
-u.email=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
-u.site_id=1
-u2=c.stub.CreateUser(u)
+# c=grpc_client.InsecureClient("localhost")
+c = grpc_client.SecureClient("xos-core.cord.lab", username=USERNAME, password=PASSWORD)
+u = grpc_client.User()
+
+u.email = "".join(
+    random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+)
+u.site_id = 1
+u2 = c.stub.CreateUser(u)
 
 # update the user
-u2.password="foobar"
+u2.password = "foobar"
 c.stub.UpdateUser(u2)
 
 # do a listall and make sure user exists
 u_all = c.stub.ListUser(grpc_client.Empty()).items
 u_all = [x for x in u_all if x.email == u.email]
-assert(len(u_all)==1)
+assert len(u_all) == 1
 
-u3=c.stub.GetUser(grpc_client.ID(id=u2.id))
-assert(u3.id == u2.id)
-assert(u3.password=="foobar")
+u3 = c.stub.GetUser(grpc_client.ID(id=u2.id))
+assert u3.id == u2.id
+assert u3.password == "foobar"
 
 c.stub.DeleteUser(grpc_client.ID(id=u3.id))
 
 # make sure it is deleted
 u_all = c.stub.ListUser(grpc_client.Empty()).items
 u_all = [x for x in u_all if x.email == u.email]
-assert(len(u_all)==0)
+assert len(u_all) == 0
 
-print "    okay"
-
+print("    okay")
diff --git a/xos/coreapi/tests/apihelper_test.py b/xos/coreapi/tests/apihelper_test.py
index fd65868..89b343c 100644
--- a/xos/coreapi/tests/apihelper_test.py
+++ b/xos/coreapi/tests/apihelper_test.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,36 +15,41 @@
 # NOTE: This unit test requires django at this time due to dependencies in apihelper.py.  It must be run from inside
 #       a django-supporting environment, such as the core or ui containers.
 
+import apihelper
+import time
+from mock import patch
+import unittest
 import os
 import sys
 
 sys.path.append("..")
 
 if __name__ == "__main__":
-    import django
-    sys.path.append('/opt/xos')
+
+    sys.path.append("/opt/xos")
     os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
 
-import unittest
-from mock import patch
-import mock
-import time
-
-import apihelper
 
 def side_effect_bad_password(*args, **kwargs):
     raise Exception()
 
+
 class MockObject:
     def __init__(self, **kwargs):
-        for (k,v) in kwargs.items():
+        for (k, v) in kwargs.items():
             setattr(self, k, v)
 
+
 class TestCachedAuthenticator(unittest.TestCase):
-    @patch('apihelper.User.objects')
-    @patch('apihelper.django_authenticate')
+    @patch("apihelper.User.objects")
+    @patch("apihelper.django_authenticate")
     def test_authenticate_notcached(self, mock_django_authenticate, mock_user_filter):
-        the_user = MockObject(id=123, email="testuser@test.com", username="testuser@test.com", password="foobar")
+        the_user = MockObject(
+            id=123,
+            email="testuser@test.com",
+            username="testuser@test.com",
+            password="foobar",
+        )
         mock_django_authenticate.return_value = the_user
         mock_user_filter.return_value = [the_user]
 
@@ -55,23 +59,35 @@
 
         mock_django_authenticate.assert_called()
 
-    @patch('apihelper.User.objects')
-    @patch('apihelper.django_authenticate')
-    def test_authenticate_notcached_badpassword(self, mock_django_authenticate, mock_user_filter):
-        the_user = MockObject(id=123, email="testuser@test.com", username="testuser@test.com", password="foobar")
+    @patch("apihelper.User.objects")
+    @patch("apihelper.django_authenticate")
+    def test_authenticate_notcached_badpassword(
+        self, mock_django_authenticate, mock_user_filter
+    ):
+        the_user = MockObject(
+            id=123,
+            email="testuser@test.com",
+            username="testuser@test.com",
+            password="foobar",
+        )
         mock_django_authenticate.side_effect = side_effect_bad_password
         mock_user_filter.return_value = [the_user]
 
         ca = apihelper.CachedAuthenticator()
-        with self.assertRaises(Exception) as e:
-            result = ca.authenticate("testuser@test.com", "not_foobar")
+        with self.assertRaises(Exception):
+            ca.authenticate("testuser@test.com", "not_foobar")
 
         mock_django_authenticate.assert_called()
 
-    @patch('apihelper.User.objects')
-    @patch('apihelper.django_authenticate')
+    @patch("apihelper.User.objects")
+    @patch("apihelper.django_authenticate")
     def test_authenticate_cached(self, mock_django_authenticate, mock_user_filter):
-        the_user = MockObject(id=123, email="testuser@test.com", username="testuser@test.com", password="foobar")
+        the_user = MockObject(
+            id=123,
+            email="testuser@test.com",
+            username="testuser@test.com",
+            password="foobar",
+        )
         mock_django_authenticate.return_value = the_user
         mock_user_filter.return_value = [the_user]
 
@@ -84,21 +100,38 @@
         mock_django_authenticate.assert_not_called()
 
     def test_trim(self):
-        user_one = MockObject(id=123, email="testuser@test.com", username="testuser@test.com", password="foobar")
-        user_two = MockObject(id=124, email="testuser4@test.com", username="testuser@test.com", password="foobar4")
+        user_one = MockObject(
+            id=123,
+            email="testuser@test.com",
+            username="testuser@test.com",
+            password="foobar",
+        )
+        user_two = MockObject(
+            id=124,
+            email="testuser4@test.com",
+            username="testuser@test.com",
+            password="foobar4",
+        )
 
         ca = apihelper.CachedAuthenticator()
 
         key_one = "%s:%s" % (user_one.username, user_one.password)
-        ca.cached_creds[key_one] = {"timeout": time.time() - 11, "user_id": user_one.id}  # this will get trimmed
+        ca.cached_creds[key_one] = {
+            "timeout": time.time() - 11,
+            "user_id": user_one.id,
+        }  # this will get trimmed
 
         key_two = "%s:%s" % (user_two.username, user_two.password)
-        ca.cached_creds[key_two] = {"timeout": time.time() + 10, "user_id": user_two.id}  # this will not
+        ca.cached_creds[key_two] = {
+            "timeout": time.time() + 10,
+            "user_id": user_two.id,
+        }  # this will not
 
         ca.trim()
 
-        assert(len(ca.cached_creds.keys()) == 1)
-        assert(ca.cached_creds.values()[0]["user_id"] == user_two.id)
+        assert len(ca.cached_creds.keys()) == 1
+        assert ca.cached_creds.values()[0]["user_id"] == user_two.id
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/xos/coreapi/tests/orm_user_crud.py b/xos/coreapi/tests/orm_user_crud.py
index f9c6229..d1f1afe 100644
--- a/xos/coreapi/tests/orm_user_crud.py
+++ b/xos/coreapi/tests/orm_user_crud.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,54 +12,58 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
+import string
+import random
+from testconfig import USERNAME, PASSWORD
+import grpc_client
 import sys
+
 sys.path.append("..")
 
-import grpc_client
-from testconfig import *
+print("orm_user_crud")
 
-print "orm_user_crud"
-
-c=grpc_client.SecureClient("xos-core.cord.lab", username=USERNAME, password=PASSWORD)
+c = grpc_client.SecureClient("xos-core.cord.lab", username=USERNAME, password=PASSWORD)
 
 # create a new user and save it
-u=c.xos_orm.User.objects.new()
-assert(u.id==0)
-import random, string
-u.email=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
-u.site_id=1
+u = c.xos_orm.User.objects.new()
+assert u.id == 0
+
+u.email = "".join(
+    random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+)
+u.site_id = 1
 u.save()
 
 # when we created the user, he should be assigned an id
 orig_id = u.id
-assert(orig_id!=0)
+assert orig_id != 0
 
 # site object should be populated
-assert(u.site is not None)
+assert u.site is not None
 
 # site object should have a backpointer to user
 u_all = u.site.users.all()
 u_all = [x for x in u_all if x.email == u.email]
-assert(len(u_all)==1)
+assert len(u_all) == 1
 
 # update the user
-u.password="foobar"
+u.password = "foobar"
 u.save()
 
 # update should not have changed it
-assert(u.id==orig_id)
+assert u.id == orig_id
 
 # check a listall and make sure the user is listed
 u_all = c.xos_orm.User.objects.all()
 u_all = [x for x in u_all if x.email == u.email]
-assert(len(u_all)==1)
+assert len(u_all) == 1
 u2 = u_all[0]
-assert(u2.id == u.id)
+assert u2.id == u.id
 
 # get and make sure the password was updated
 u3 = c.xos_orm.User.objects.get(id=orig_id)
-assert(u3.password=="foobar")
+assert u3.password == "foobar"
 
 # delete the user
 u3.delete()
@@ -68,7 +71,6 @@
 # make sure it is deleted
 u_all = c.xos_orm.User.objects.all()
 u_all = [x for x in u_all if x.email == u.email]
-assert(len(u_all)==0)
+assert len(u_all) == 0
 
-print "    okay"
-
+print("    okay")
diff --git a/xos/coreapi/tests/testconfig.py b/xos/coreapi/tests/testconfig.py
index addf729..4c925c9 100644
--- a/xos/coreapi/tests/testconfig.py
+++ b/xos/coreapi/tests/testconfig.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,6 +12,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-USERNAME="xosadmin@opencord.org"
-PASSWORD="mypassword"
+USERNAME = "xosadmin@opencord.org"
+PASSWORD = "mypassword"
diff --git a/xos/coreapi/tests/tosca.py b/xos/coreapi/tests/tosca.py
index a7cdd5c..b38f323 100644
--- a/xos/coreapi/tests/tosca.py
+++ b/xos/coreapi/tests/tosca.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,18 +12,22 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
+from testconfig import USERNAME, PASSWORD
+import grpc_client
 import random
 import string
 import sys
+
 sys.path.append("..")
 
-import grpc_client
-from testconfig import *
 
-SLICE_NAME="mysite_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
+SLICE_NAME = "mysite_" + "".join(
+    random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+)
 
-TOSCA_RECIPE="""tosca_definitions_version: tosca_simple_yaml_1_0
+TOSCA_RECIPE = (
+    """tosca_definitions_version: tosca_simple_yaml_1_0
 
 description: Just some test...
 
@@ -42,36 +45,38 @@
           - slice:
                 node: mysite
                 relationship: tosca.relationships.MemberOfSite
-""" % SLICE_NAME
+"""
+    % SLICE_NAME
+)
 
-print "tosca_test"
+print("tosca_test")
 
-c=grpc_client.SecureClient("xos-core.cord.lab", username=USERNAME, password=PASSWORD)
-request=grpc_client.ToscaRequest()
+c = grpc_client.SecureClient("xos-core.cord.lab", username=USERNAME, password=PASSWORD)
+request = grpc_client.ToscaRequest()
 request.recipe = TOSCA_RECIPE
 
-print "Execute"
+print("Execute")
 
-response=c.utility.RunTosca(request)
+response = c.utility.RunTosca(request)
 
 if response.status == response.SUCCESS:
-    print "  success"
+    print("  success")
 else:
-    print "  failure"
+    print("  failure")
 
 for line in response.messages.split("\n"):
-    print "    %s" % line
+    print("    %s" % line)
 
-print "Destroy"
+print("Destroy")
 
 response = c.utility.DestroyTosca(request)
 
 if response.status == response.SUCCESS:
-    print "  success"
+    print("  success")
 else:
-    print "  failure"
+    print("  failure")
 
 for line in response.messages.split("\n"):
-    print "    %s" % line
+    print("    %s" % line)
 
-print "Done"
+print("Done")
diff --git a/xos/coreapi/try_models.py b/xos/coreapi/try_models.py
index c8675cd..90bbe0e 100644
--- a/xos/coreapi/try_models.py
+++ b/xos/coreapi/try_models.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,7 +17,7 @@
 
 import django
 
-xos_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/..')
+xos_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/..")
 sys.path.append(xos_path)
 
 os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
diff --git a/xos/coreapi/unload_unwanted_apps.py b/xos/coreapi/unload_unwanted_apps.py
index a4a1120..30a618d 100644
--- a/xos/coreapi/unload_unwanted_apps.py
+++ b/xos/coreapi/unload_unwanted_apps.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,6 +16,7 @@
 import os
 import shutil
 
+
 class AppUnloader(object):
     def __init__(self):
         self.manifests_dir = "/opt/xos/dynamic_services/manifests"
@@ -41,9 +41,10 @@
         os.system("cd /opt/xos; python ./manage.py migrate %s zero" % manifest["name"])
 
         # be paranoid about calling rmtree
-        assert(os.path.abspath(manifest["dest_dir"]).startswith("/opt/xos"))
+        assert os.path.abspath(manifest["dest_dir"]).startswith("/opt/xos")
 
         shutil.rmtree(manifest["dest_dir"])
 
+
 if __name__ == "__main__":
     AppUnloader().unload_all_eligible()
diff --git a/xos/coreapi/wait_for_db.py b/xos/coreapi/wait_for_db.py
index 939d4ed..04707e8 100644
--- a/xos/coreapi/wait_for_db.py
+++ b/xos/coreapi/wait_for_db.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,8 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
 import psycopg2
-import sys
 import time
 import traceback
 
@@ -22,27 +21,27 @@
 
 Config.init()
 
+
 def wait_for_database():
     while True:
-        db_name = Config.get("database.name")
         db_user = Config.get("database.username")
         db_password = Config.get("database.password")
         db_host = "xos-db"  # TODO: this should be configurable
-        db_port = 5432      # TODO: this should be configurable
+        db_port = 5432  # TODO: this should be configurable
 
         try:
-            myConnection = psycopg2.connect(host = db_host, port = db_port,
-                                            user = db_user, password = db_password)
-
-
+            myConnection = psycopg2.connect(
+                host=db_host, port=db_port, user=db_user, password=db_password
+            )
 
             myConnection.close()
 
             # Exit on successful connection
-            print "Database is available"
+            print("Database is available")
             return
-        except:
+        except BaseException:
             traceback.print_exc("Exception while connecting to db")
             time.sleep(1)
 
+
 wait_for_database()
diff --git a/xos/coreapi/xos_dynamicload_api.py b/xos/coreapi/xos_dynamicload_api.py
index 1f9cd48..b02af06 100644
--- a/xos/coreapi/xos_dynamicload_api.py
+++ b/xos/coreapi/xos_dynamicload_api.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,23 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import base64
-import fnmatch
-import os
-import sys
-import time
-import traceback
 from protos import dynamicload_pb2
 from protos import dynamicload_pb2_grpc
-from google.protobuf.empty_pb2 import Empty
-
-from importlib import import_module
 
 from xosutil.autodiscover_version import autodiscover_version_of_main
 from dynamicbuild import DynamicBuilder
 from apistats import REQUEST_COUNT, track_request_time
 import grpc
 
+
 class DynamicLoadService(dynamicload_pb2_grpc.dynamicloadServicer):
     def __init__(self, thread_pool, server):
         self.thread_pool = thread_pool
@@ -52,16 +43,22 @@
             builder = DynamicBuilder()
             result = builder.handle_loadmodels_request(request)
 
-            if (result == builder.SOMETHING_CHANGED):
+            if result == builder.SOMETHING_CHANGED:
                 self.server.delayed_shutdown(5)
 
             response = dynamicload_pb2.LoadModelsReply()
             response.status = response.SUCCESS
-            REQUEST_COUNT.labels('xos-core', "DynamicLoad", "LoadModels", grpc.StatusCode.OK).inc()
+            REQUEST_COUNT.labels(
+                "xos-core", "DynamicLoad", "LoadModels", grpc.StatusCode.OK
+            ).inc()
             return response
-        except Exception, e:
-            import traceback; traceback.print_exc()
-            REQUEST_COUNT.labels('xos-core', "DynamicLoad", "LoadModels", grpc.StatusCode.INTERNAL).inc()
+        except Exception as e:
+            import traceback
+
+            traceback.print_exc()
+            REQUEST_COUNT.labels(
+                "xos-core", "DynamicLoad", "LoadModels", grpc.StatusCode.INTERNAL
+            ).inc()
             raise e
 
     @track_request_time("DynamicLoad", "UnloadModels")
@@ -70,16 +67,22 @@
             builder = DynamicBuilder()
             result = builder.handle_unloadmodels_request(request)
 
-            if (result == builder.SOMETHING_CHANGED):
+            if result == builder.SOMETHING_CHANGED:
                 self.server.delayed_shutdown(5)
 
             response = dynamicload_pb2.LoadModelsReply()
             response.status = response.SUCCESS
-            REQUEST_COUNT.labels('xos-core', "DynamicLoad", "UnloadModels", grpc.StatusCode.OK).inc()
+            REQUEST_COUNT.labels(
+                "xos-core", "DynamicLoad", "UnloadModels", grpc.StatusCode.OK
+            ).inc()
             return response
-        except Exception, e:
-            import traceback; traceback.print_exc()
-            REQUEST_COUNT.labels('xos-core', "DynamicLoad", "UnloadModels", grpc.StatusCode.INTERNAL).inc()
+        except Exception as e:
+            import traceback
+
+            traceback.print_exc()
+            REQUEST_COUNT.labels(
+                "xos-core", "DynamicLoad", "UnloadModels", grpc.StatusCode.INTERNAL
+            ).inc()
             raise e
 
     @track_request_time("DynamicLoad", "GetLoadStatus")
@@ -116,11 +119,17 @@
                 item.state = "present"
             else:
                 item.state = "load"
-            REQUEST_COUNT.labels('xos-core', "DynamicLoad", "GetLoadStatus", grpc.StatusCode.OK).inc()
+            REQUEST_COUNT.labels(
+                "xos-core", "DynamicLoad", "GetLoadStatus", grpc.StatusCode.OK
+            ).inc()
             return response
-        except Exception, e:
-            import traceback; traceback.print_exc()
-            REQUEST_COUNT.labels('xos-core', "DynamicLoad", "GetLoadStatus", grpc.StatusCode.INTERNAL).inc()
+        except Exception as e:
+            import traceback
+
+            traceback.print_exc()
+            REQUEST_COUNT.labels(
+                "xos-core", "DynamicLoad", "GetLoadStatus", grpc.StatusCode.INTERNAL
+            ).inc()
             raise e
 
     @track_request_time("DynamicLoad", "GetConvenienceMethods")
@@ -137,12 +146,19 @@
                     item = response.convenience_methods.add()
                     item.filename = cm["filename"]
                     item.contents = open(cm["path"]).read()
-            REQUEST_COUNT.labels('xos-core', "DynamicLoad", "GetConvenienceMethods", grpc.StatusCode.OK).inc()
+            REQUEST_COUNT.labels(
+                "xos-core", "DynamicLoad", "GetConvenienceMethods", grpc.StatusCode.OK
+            ).inc()
             return response
 
-        except Exception, e:
-            import traceback; traceback.print_exc()
-            REQUEST_COUNT.labels('xos-core', "DynamicLoad", "GetConvenienceMethods", grpc.StatusCode.INTERNAL).inc()
+        except Exception as e:
+            import traceback
+
+            traceback.print_exc()
+            REQUEST_COUNT.labels(
+                "xos-core",
+                "DynamicLoad",
+                "GetConvenienceMethods",
+                grpc.StatusCode.INTERNAL,
+            ).inc()
             raise e
-
-
diff --git a/xos/coreapi/xos_modeldefs_api.py b/xos/coreapi/xos_modeldefs_api.py
index 3c96d65..edbfac9 100644
--- a/xos/coreapi/xos_modeldefs_api.py
+++ b/xos/coreapi/xos_modeldefs_api.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,10 +12,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
 import yaml
 from protos import modeldefs_pb2, modeldefs_pb2_grpc
 import grpc
-from xos.exceptions import *
 from apihelper import XOSAPIHelperMixin
 from apistats import REQUEST_COUNT, track_request_time
 from xosconfig import Config
@@ -25,9 +24,10 @@
 
 log = create_logger(Config().get('logging'))
 
-def yaml_to_grpc(yaml_repr, grpc_container, yaml_key = None, grpc_parent = None):
+
+def yaml_to_grpc(yaml_repr, grpc_container, yaml_key=None, grpc_parent=None):
     if isinstance(yaml_repr, dict):
-        for k,v in yaml_repr.items():
+        for k, v in yaml_repr.items():
             grpc_sub_container = getattr(grpc_container, k)
             yaml_to_grpc(v, grpc_sub_container, k, grpc_container)
     elif isinstance(yaml_repr, list):
@@ -53,6 +53,7 @@
             log.exception("Failed to set attribute %s on element %s has it value is %s and has the wrong type %s" % (yaml_key, grpc_parent.__class__.__name__, yaml_repr, type(yaml_repr)))
             raise e
 
+
 class ModelDefsService(modeldefs_pb2_grpc.modeldefsServicer, XOSAPIHelperMixin):
     def __init__(self, thread_pool):
         self.thread_pool = thread_pool
@@ -62,22 +63,23 @@
 
     @track_request_time("Modeldefs", "ListModelDefs")
     def ListModelDefs(self, request, context):
-        ystr = open('protos/modeldefs.yaml').read()
+        ystr = open("protos/modeldefs.yaml").read()
         yaml_repr = yaml.load(ystr)
 
         modeldefs = modeldefs_pb2.ModelDefs()
 
         yaml_to_grpc(yaml_repr, modeldefs)
 
-        REQUEST_COUNT.labels('xos-core', "Modeldefs", "ListModelDefs", grpc.StatusCode.OK).inc()
+        REQUEST_COUNT.labels(
+            "xos-core", "Modeldefs", "ListModelDefs", grpc.StatusCode.OK
+        ).inc()
         return modeldefs
 
 
-if __name__=='__main__':
-    ystr = open('protos/modeldefs.yaml').read()
-
+if __name__ == "__main__":
+    ystr = open("protos/modeldefs.yaml").read()
     yaml_repr = yaml.load(ystr)
 
     modeldefs = modeldefs_pb2.ModelDefs()
     yaml_to_grpc(yaml_repr, modeldefs)
-    print modeldefs
+    print(modeldefs)
diff --git a/xos/coreapi/xos_utility_api.py b/xos/coreapi/xos_utility_api.py
index d48ad4e..41ca4d0 100644
--- a/xos/coreapi/xos_utility_api.py
+++ b/xos/coreapi/xos_utility_api.py
@@ -12,56 +12,58 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-import base64
+import inspect
+from apistats import REQUEST_COUNT, track_request_time
+import grpc
+from apihelper import XOSAPIHelperMixin, translate_exceptions
+from xos.exceptions import XOSNotAuthenticated
+from core.models import ServiceInstance
+from django.db.models import F, Q
+import django.apps
+from django.contrib.auth import authenticate as django_authenticate
 import fnmatch
 import os
 import sys
-import time
 import traceback
 from protos import utility_pb2, utility_pb2_grpc
 from google.protobuf.empty_pb2 import Empty
 
 from importlib import import_module
 from django.conf import settings
+
 SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
 
-from django.contrib.auth import authenticate as django_authenticate
-import django.apps
-from django.db.models import F,Q
-from core.models import *
-from xos.exceptions import *
-from apihelper import XOSAPIHelperMixin, translate_exceptions
-import grpc
-from apistats import REQUEST_COUNT, track_request_time
 
 # The Tosca engine expects to be run from /opt/xos/tosca/ or equivalent. It
 # needs some sys.path fixing up.
-import inspect
+
 currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
 toscadir = os.path.join(currentdir, "../tosca")
 
+
 def is_internal_model(model):
     """ things to be excluded from the dirty_models endpoints """
-    if 'django' in model.__module__:
+    if "django" in model.__module__:
         return True
-    if 'cors' in model.__module__:
+    if "cors" in model.__module__:
         return True
-    if 'contenttypes' in model.__module__:
+    if "contenttypes" in model.__module__:
         return True
-    if 'core.models.journal' in model.__module__:  # why?
+    if "core.models.journal" in model.__module__:  # why?
         return True
-    if 'core.models.project' in model.__module__:  # why?
+    if "core.models.project" in model.__module__:  # why?
         return True
     return False
 
+
 def get_xproto(folder):
     matches = []
     for root, dirnames, filenames in os.walk(folder):
-        for filename in fnmatch.filter(filenames, '*.xproto'):
+        for filename in fnmatch.filter(filenames, "*.xproto"):
             matches.append(os.path.join(root, filename))
     return matches
 
+
 class UtilityService(utility_pb2_grpc.utilityServicer, XOSAPIHelperMixin):
     def __init__(self, thread_pool):
         self.thread_pool = thread_pool
@@ -76,48 +78,51 @@
         if not request.username:
             raise XOSNotAuthenticated("No username")
 
-        u=django_authenticate(username=request.username, password=request.password)
+        u = django_authenticate(username=request.username, password=request.password)
         if not u:
-            raise XOSNotAuthenticated("Failed to authenticate user %s" % request.username)
+            raise XOSNotAuthenticated(
+                "Failed to authenticate user %s" % request.username
+            )
 
         session = SessionStore()
         auth = {"username": request.username, "password": request.password}
         session["auth"] = auth
-        session['_auth_user_id'] = u.pk
-        session['_auth_user_backend'] = u.backend
+        session["_auth_user_id"] = u.pk
+        session["_auth_user_backend"] = u.backend
         session.save()
 
         response = utility_pb2.LoginResponse()
         response.sessionid = session.session_key
 
-        REQUEST_COUNT.labels('xos-core', "Utilities", "Login", grpc.StatusCode.OK).inc()
+        REQUEST_COUNT.labels("xos-core", "Utilities", "Login", grpc.StatusCode.OK).inc()
         return response
 
     @translate_exceptions("Utilities", "Logout")
     @track_request_time("Utilities", "Logout")
     def Logout(self, request, context):
         for (k, v) in context.invocation_metadata():
-            if (k.lower()=="x-xossession"):
+            if k.lower() == "x-xossession":
                 s = SessionStore(session_key=v)
                 if "_auth_user_id" in s:
                     del s["_auth_user_id"]
                     s.save()
-        REQUEST_COUNT.labels('xos-core', "Utilities", "Login", grpc.StatusCode.OK).inc()
+        REQUEST_COUNT.labels("xos-core", "Utilities", "Login", grpc.StatusCode.OK).inc()
         return Empty()
 
     # FIXME are we still using these?
     @translate_exceptions("Utilities", "RunTosca")
     @track_request_time("Utilities", "RunTosca")
     def RunTosca(self, request, context):
-        user=self.authenticate(context, required=True)
+        user = self.authenticate(context, required=True)
 
         sys_path_save = sys.path
         try:
             sys.path.append(toscadir)
             from tosca.engine import XOSTosca
+
             xt = XOSTosca(request.recipe, parent_dir=toscadir, log_to_console=False)
             xt.execute(user)
-        except:
+        except BaseException:
             response = utility_pb2.ToscaResponse()
             response.status = response.ERROR
             response.messages = traceback.format_exc()
@@ -134,15 +139,16 @@
     @translate_exceptions("Utilities", "DestryTosca")
     @track_request_time("Utilities", "DestryTosca")
     def DestroyTosca(self, request, context):
-        user=self.authenticate(context, required=True)
+        user = self.authenticate(context, required=True)
 
         sys_path_save = sys.path
         try:
             sys.path.append(toscadir)
             from tosca.engine import XOSTosca
+
             xt = XOSTosca(request.recipe, parent_dir=toscadir, log_to_console=False)
             xt.destroy(user)
-        except:
+        except BaseException:
             response = utility_pb2.ToscaResponse()
             response.status = response.ERROR
             response.messages = traceback.format_exc()
@@ -155,19 +161,22 @@
         response.messages = "\n".join(xt.log_msgs)
 
         return response
+
     # end FIXME
 
     @translate_exceptions("Utilities", "NoOp")
     @track_request_time("Utilities", "NoOp")
     def NoOp(self, request, context):
-        REQUEST_COUNT.labels('xos-core', "Utilities", "NoOp", grpc.StatusCode.OK).inc()
+        REQUEST_COUNT.labels("xos-core", "Utilities", "NoOp", grpc.StatusCode.OK).inc()
         return Empty()
 
     @translate_exceptions("Utilities", "AuthenticatedNoOp")
     @track_request_time("Utilities", "AuthenticatedNoOp")
     def AuthenticatedNoOp(self, request, context):
         self.authenticate(context, required=True)
-        REQUEST_COUNT.labels('xos-core', "Utilities", "AuthenticatedNoOp", grpc.StatusCode.OK).inc()
+        REQUEST_COUNT.labels(
+            "xos-core", "Utilities", "AuthenticatedNoOp", grpc.StatusCode.OK
+        ).inc()
         return Empty()
 
     @translate_exceptions("Utilities", "ListDirtyModels")
@@ -180,23 +189,27 @@
             if is_internal_model(model):
                 continue
             fieldNames = [x.name for x in model._meta.fields]
-            if (not "enacted" in fieldNames) or (not "updated" in fieldNames):
+            if ("enacted" not in fieldNames) or ("updated" not in fieldNames):
                 continue
-            if (request.class_name) and (not fnmatch.fnmatch(model.__name__, request.class_name)):
+            if (request.class_name) and (
+                not fnmatch.fnmatch(model.__name__, request.class_name)
+            ):
                 continue
-            objs = model.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
+            objs = model.objects.filter(Q(enacted__lt=F("updated")) | Q(enacted=None))
             for obj in objs:
                 item = dirty_models.items.add()
                 item.class_name = model.__name__
                 item.id = obj.id
 
-        REQUEST_COUNT.labels('xos-core', "Utilities", "ListDirtyModels", grpc.StatusCode.OK).inc()
+        REQUEST_COUNT.labels(
+            "xos-core", "Utilities", "ListDirtyModels", grpc.StatusCode.OK
+        ).inc()
         return dirty_models
 
     @translate_exceptions("Utilities", "SetDirtyModels")
     @track_request_time("Utilities", "SetDirtyModels")
     def SetDirtyModels(self, request, context):
-        user=self.authenticate(context, required=True)
+        user = self.authenticate(context, required=True)
 
         dirty_models = utility_pb2.ModelList()
 
@@ -205,9 +218,11 @@
             if is_internal_model(model):
                 continue
             fieldNames = [x.name for x in model._meta.fields]
-            if (not "enacted" in fieldNames) or (not "updated" in fieldNames):
+            if ("enacted" not in fieldNames) or ("updated" not in fieldNames):
                 continue
-            if (request.class_name) and (not fnmatch.fnmatch(model.__name__, request.class_name)):
+            if (request.class_name) and (
+                not fnmatch.fnmatch(model.__name__, request.class_name)
+            ):
                 continue
             objs = model.objects.all()
             for obj in objs:
@@ -217,13 +232,15 @@
                     item = dirty_models.items.add()
                     item.class_name = model.__name__
                     item.id = obj.id
-                except Exception, e:
+                except Exception as e:
                     item = dirty_models.items.add()
                     item.class_name = model.__name__
                     item.id = obj.id
                     item.info = str(e)
 
-        REQUEST_COUNT.labels('xos-core', "Utilities", "SetDirtyModels", grpc.StatusCode.OK).inc()
+        REQUEST_COUNT.labels(
+            "xos-core", "Utilities", "SetDirtyModels", grpc.StatusCode.OK
+        ).inc()
         return dirty_models
 
     @translate_exceptions("Utilities", "GetXproto")
@@ -231,13 +248,19 @@
     def GetXproto(self, request, context):
         res = utility_pb2.XProtos()
 
-        core_dir = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/../core/models/')
+        core_dir = os.path.abspath(
+            os.path.dirname(os.path.realpath(__file__)) + "/../core/models/"
+        )
         core_xprotos = get_xproto(core_dir)
 
-        service_dir = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/../services')
+        service_dir = os.path.abspath(
+            os.path.dirname(os.path.realpath(__file__)) + "/../services"
+        )
         services_xprotos = get_xproto(service_dir)
 
-        dynamic_service_dir = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/../dynamic_services')
+        dynamic_service_dir = os.path.abspath(
+            os.path.dirname(os.path.realpath(__file__)) + "/../dynamic_services"
+        )
         dynamic_services_xprotos = get_xproto(dynamic_service_dir)
 
         xprotos = core_xprotos + services_xprotos + dynamic_services_xprotos
@@ -250,7 +273,9 @@
             xproto += content
 
         res.xproto = xproto
-        REQUEST_COUNT.labels('xos-core', "Utilities", "GetXproto", grpc.StatusCode.OK).inc()
+        REQUEST_COUNT.labels(
+            "xos-core", "Utilities", "GetXproto", grpc.StatusCode.OK
+        ).inc()
         return res
 
     @translate_exceptions("Utilities", "GetPopulatedServiceInstances")
@@ -263,7 +288,6 @@
 
         si = ServiceInstance.objects.get(id=request.id)
 
-
         # populate the response object
         response.id = si.id
         response.leaf_model_name = si.leaf_model_name
@@ -283,11 +307,15 @@
 
         for l in subscribed_links:
             if l.subscriber_service_instance:
-                response.subscribed_service_instances.append(l.provider_service_instance_id)
+                response.subscribed_service_instances.append(
+                    l.provider_service_instance_id
+                )
             elif l.subscriber_service:
                 response.subscribed_service.append(l.subscriber_service.id)
             elif l.subscriber_network:
                 response.subscribed_network.append(l.subscriber_network.id)
 
-        REQUEST_COUNT.labels('xos-core', "Utilities", "GetPopulatedServiceInstances", grpc.StatusCode.OK).inc()
+        REQUEST_COUNT.labels(
+            "xos-core", "Utilities", "GetPopulatedServiceInstances", grpc.StatusCode.OK
+        ).inc()
         return response
diff --git a/xos/generate/__init__.py b/xos/generate/__init__.py
index d4e8062..b0fb0b2 100644
--- a/xos/generate/__init__.py
+++ b/xos/generate/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/xos/generate/dependency_walker.py b/xos/generate/dependency_walker.py
index e5684bb..274c8af 100644
--- a/xos/generate/dependency_walker.py
+++ b/xos/generate/dependency_walker.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,6 +17,7 @@
 
 # NOTE is this used or replaced by `new_base/dependency_walker_new.py`?
 
+from __future__ import print_function
 import os
 import imp
 from xosconfig import Config
@@ -34,16 +34,16 @@
 from multistructlog import create_logger
 
 Config.init()
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
 missing_links = {}
 
 try:
-    dep_data = open(Config.get('dependency_graph')).read()
+    dep_data = open(Config.get("dependency_graph")).read()
 except BaseException:
     raise Exception(
-        '[XOS-Dependency-Walker] File %s not found' %
-        Config.get('dependency_graph'))
+        "[XOS-Dependency-Walker] File %s not found" % Config.get("dependency_graph")
+    )
 
 dependencies = json.loads(dep_data)
 
@@ -57,10 +57,10 @@
 
 
 def plural(name):
-    if (name.endswith('s')):
-        return name + 'es'
+    if name.endswith("s"):
+        return name + "es"
     else:
-        return name + 's'
+        return name + "s"
 
 
 def walk_deps(fn, object):
@@ -95,14 +95,13 @@
             try:
                 peer = getattr(object, link)
             except AttributeError:
-                if model + '.' + link not in missing_links:
+                if model + "." + link not in missing_links:
                     log.exception(
-                        "Model missing link for dependency.",
-                        model=model,
-                        link=link)
-                    missing_links[model + '.' + link] = True
+                        "Model missing link for dependency.", model=model, link=link
+                    )
+                    missing_links[model + "." + link] = True
 
-        if (peer):
+        if peer:
             try:
                 peer_objects = peer.all()
             except AttributeError:
@@ -112,7 +111,7 @@
 
             for o in peer_objects:
                 # if (isinstance(o,XOSBase)):
-                if (hasattr(o, 'updated')):
+                if hasattr(o, "updated"):
                     fn(o, object)
                     ret.append(o)
                 # Uncomment the following line to enable recursion
@@ -121,16 +120,16 @@
 
 
 def p(x, source):
-    print x, x.__class__.__name__
+    print(x, x.__class__.__name__)
     return
 
 
 def main():
     # pdb.set_trace()
-    s = Slice.objects.filter(name='princeton_sapan62')
+    s = Slice.objects.filter(name="princeton_sapan62")
     # pdb.set_trace()
-    print walk_inv_deps(p, s[0])
+    print(walk_inv_deps(p, s[0]))
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     main()
diff --git a/xos/manage.py b/xos/manage.py
index 0378afa..c81ffb9 100644
--- a/xos/manage.py
+++ b/xos/manage.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,7 +18,7 @@
 import sys
 
 if __name__ == "__main__":
-    os.chdir('..')  # <<<---This is what you want to add
+    os.chdir("..")  # <<<---This is what you want to add
     os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
 
     from django.core.management import execute_from_command_line
diff --git a/xos/services/__init__.py b/xos/services/__init__.py
index d4e8062..b0fb0b2 100644
--- a/xos/services/__init__.py
+++ b/xos/services/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/xos/synchronizers/__init__.py b/xos/synchronizers/__init__.py
index 65fdf5b..b0fb0b2 100644
--- a/xos/synchronizers/__init__.py
+++ b/xos/synchronizers/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,6 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
-
diff --git a/xos/synchronizers/model_policy.py b/xos/synchronizers/model_policy.py
index c2eda3a..5374e01 100644
--- a/xos/synchronizers/model_policy.py
+++ b/xos/synchronizers/model_policy.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,179 +12,232 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-from core.models import *
-from datetime import datetime
+from __future__ import print_function
+from multistructlog import create_logger
+from xosconfig import Config
+from core.models import Privilege, imp
 from django.db import reset_queries
 from django.db.models import F, Q
-from django.db.models.signals import post_save
-from django.db.transaction import atomic
-from django.dispatch import receiver
 from django.utils import timezone
-from django.db import models as django_models
-from generate.dependency_walker import *
+from generate.dependency_walker import walk_deps, walk_inv_deps
 
-import pdb
+import os
 import time
 import traceback
 
 modelPolicyEnabled = True
-bad_instances=[]
+bad_instances = []
 
 model_policies = {}
 
-from xosconfig import Config
-from multistructlog import create_logger
 
 Config.init()
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
+
 
 def EnableModelPolicy(x):
     global modelPolicyEnabled
     modelPolicyEnabled = x
 
+
 def update_wp(d, o):
     try:
         save_fields = []
-        if (d.write_protect != o.write_protect):
+        if d.write_protect != o.write_protect:
             d.write_protect = o.write_protect
-            save_fields.append('write_protect')
-        if (save_fields):
+            save_fields.append("write_protect")
+        if save_fields:
             d.save(update_fields=save_fields)
-    except AttributeError,e:
+    except AttributeError as e:
         raise e
 
+
 def update_dep(d, o):
     try:
-        print 'Trying to update %s'%d
+        print("Trying to update %s" % d)
         save_fields = []
-        if (d.updated < o.updated):
-            save_fields = ['updated']
+        if d.updated < o.updated:
+            save_fields = ["updated"]
 
-        if (save_fields):
+        if save_fields:
             d.save(update_fields=save_fields)
-    except AttributeError,e:
-        log.exception("AttributeError in update_dep", e = e)
+    except AttributeError as e:
+        log.exception("AttributeError in update_dep", e=e)
         raise e
-    except Exception,e:
-        log.exception("Exception in update_dep", e = e)
+    except Exception as e:
+        log.exception("Exception in update_dep", e=e)
+
 
 def delete_if_inactive(d, o):
     try:
         d.delete()
-        print "Deleted %s (%s)"%(d,d.__class__.__name__)
-    except:
+        print("Deleted %s (%s)" % (d, d.__class__.__name__))
+    except BaseException:
         pass
     return
 
+
 def load_model_policies(policies_dir=None):
     global model_policies
 
     if policies_dir is None:
-            policies_dir = Config().observer_model_policies_dir
+        policies_dir = Config().observer_model_policies_dir
 
     for fn in os.listdir(policies_dir):
-            pathname = os.path.join(policies_dir,fn)
-            if os.path.isfile(pathname) and fn.startswith("model_policy_") and fn.endswith(".py") and (fn!="__init__.py"):
-                model_policies[fn[:-3]] = imp.load_source(fn[:-3],pathname)
+        pathname = os.path.join(policies_dir, fn)
+        if (
+            os.path.isfile(pathname)
+            and fn.startswith("model_policy_")
+            and fn.endswith(".py")
+            and (fn != "__init__.py")
+        ):
+            model_policies[fn[:-3]] = imp.load_source(fn[:-3], pathname)
 
-    logger.debug("Loaded model polices %s from %s" % (",".join(model_policies.keys()), policies_dir))
+    log.debug(
+        "Loaded model polices %s from %s"
+        % (",".join(model_policies.keys()), policies_dir)
+    )
 
-#@atomic
+
+# @atomic
 def execute_model_policy(instance, deleted):
     # Automatic dirtying
-    if (instance in bad_instances):
+    if instance in bad_instances:
         return
 
     # These are the models whose children get deleted when they are
-    delete_policy_models = ['Slice','Instance','Network']
+    delete_policy_models = ["Slice", "Instance", "Network"]
     sender_name = instance.__class__.__name__
-    policy_name = 'model_policy_%s'%sender_name
-    noargs = False
+    policy_name = "model_policy_%s" % sender_name
 
-    if (not deleted):
+    if not deleted:
         walk_inv_deps(update_dep, instance)
         walk_deps(update_wp, instance)
-    elif (sender_name in delete_policy_models):
+    elif sender_name in delete_policy_models:
         walk_inv_deps(delete_if_inactive, instance)
 
     try:
-        policy_handler = model_policies.get(policy_name, None) # getattr(model_policies, policy_name, None)
-        log.debug("MODEL POLICY: handler %s %s",policy_name = policy_name, policy_handler = policy_handler)
+        policy_handler = model_policies.get(
+            policy_name, None
+        )  # getattr(model_policies, policy_name, None)
+        log.debug(
+            "MODEL POLICY: handler %s %s",
+            policy_name=policy_name,
+            policy_handler=policy_handler,
+        )
         if policy_handler is not None:
-            if (deleted):
+            if deleted:
                 try:
                     policy_handler.handle_delete(instance)
                 except AttributeError:
                     pass
             else:
                 policy_handler.handle(instance)
-        log.debug("MODEL POLICY: completed handler", policy_name = policy_name, policy_handler = policy_handler)
-    except Exception, e:
-        log.exception("MODEL POLICY: Exception when running handler", e = e)
+        log.debug(
+            "MODEL POLICY: completed handler",
+            policy_name=policy_name,
+            policy_handler=policy_handler,
+        )
+    except Exception as e:
+        log.exception("MODEL POLICY: Exception when running handler", e=e)
 
     try:
-        instance.policed=timezone.now()
-        instance.save(update_fields=['policed'])
-    except:
-        log.exception('MODEL POLICY: Object is defective', object = instance)
+        instance.policed = timezone.now()
+        instance.save(update_fields=["policed"])
+    except BaseException:
+        log.exception("MODEL POLICY: Object is defective", object=instance)
         bad_instances.append(instance)
 
-def noop(o,p):
-        pass
+
+def noop(o, p):
+    pass
+
 
 def check_db_connection_okay():
     # django implodes if the database connection is closed by docker-compose
     from django import db
+
     try:
         db.connection.cursor()
-    except Exception, e:
+    except Exception as e:
         if "connection already closed" in traceback.format_exc():
-           log.error("XXX connection already closed")
-           try:
-#               if db.connection:
-#                   db.connection.close()
-               db.close_old_connections()
-           except Exception,f:
-               log.exception("XXX we failed to fix the failure", e = f)
+            log.error("XXX connection already closed")
+            try:
+                #               if db.connection:
+                #                   db.connection.close()
+                db.close_old_connections()
+            except Exception as f:
+                log.exception("XXX we failed to fix the failure", e=f)
         else:
-           log.exception("XXX some other error", e = e)
+            log.exception("XXX some other error", e=e)
+
 
 def run_policy():
     load_model_policies()
 
-    while (True):
+    while True:
         start = time.time()
         try:
             run_policy_once()
-        except Exception,e:
+        except Exception as e:
             log.exception("MODEL_POLICY: Exception in run_policy()", e)
 
-        if (time.time()-start<1):
+        if time.time() - start < 1:
             time.sleep(1)
 
+
 def run_policy_once():
-        from core.models import Instance,Slice,Controller,Network,User,SlicePrivilege,Site,SitePrivilege,Image,ControllerSlice,ControllerUser,ControllerSite
-        models = [Controller, Site, SitePrivilege, Image, ControllerSlice, ControllerSite, ControllerUser, User, Slice, Network, Instance, SlicePrivilege, Privilege]
-        objects = []
-        deleted_objects = []
+    from core.models import (
+        Instance,
+        Slice,
+        Controller,
+        Network,
+        User,
+        SlicePrivilege,
+        Site,
+        SitePrivilege,
+        Image,
+        ControllerSlice,
+        ControllerUser,
+        ControllerSite,
+    )
 
-        check_db_connection_okay()
+    models = [
+        Controller,
+        Site,
+        SitePrivilege,
+        Image,
+        ControllerSlice,
+        ControllerSite,
+        ControllerUser,
+        User,
+        Slice,
+        Network,
+        Instance,
+        SlicePrivilege,
+        Privilege,
+    ]
+    objects = []
+    deleted_objects = []
 
-        for m in models:
-            res = m.objects.filter((Q(policed__lt=F('updated')) | Q(policed=None)) & Q(no_policy=False))
-            objects.extend(res)
-            res = m.deleted_objects.filter(Q(policed__lt=F('updated')) | Q(policed=None))
-            deleted_objects.extend(res)
+    check_db_connection_okay()
 
-        for o in objects:
-            execute_model_policy(o, o.deleted)
+    for m in models:
+        res = m.objects.filter(
+            (Q(policed__lt=F("updated")) | Q(policed=None)) & Q(no_policy=False)
+        )
+        objects.extend(res)
+        res = m.deleted_objects.filter(Q(policed__lt=F("updated")) | Q(policed=None))
+        deleted_objects.extend(res)
 
-        for o in deleted_objects:
-            execute_model_policy(o, True)
+    for o in objects:
+        execute_model_policy(o, o.deleted)
 
-        try:
-            reset_queries()
-        except Exception,e:
-            # this shouldn't happen, but in case it does, catch it...
-            log.exception("MODEL POLICY: exception in reset_queries", e = e)
+    for o in deleted_objects:
+        execute_model_policy(o, True)
+
+    try:
+        reset_queries()
+    except Exception as e:
+        # this shouldn't happen, but in case it does, catch it...
+        log.exception("MODEL POLICY: exception in reset_queries", e=e)
diff --git a/xos/synchronizers/new_base/SyncInstanceUsingAnsible.py b/xos/synchronizers/new_base/SyncInstanceUsingAnsible.py
index 7b52a8d..1ffe3e2 100644
--- a/xos/synchronizers/new_base/SyncInstanceUsingAnsible.py
+++ b/xos/synchronizers/new_base/SyncInstanceUsingAnsible.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -48,7 +47,7 @@
     def defer_sync(self, o, reason):
         # zdw, 2017-02-18 - is raising the exception here necessary? - seems like
         # it's just logging the same thing twice
-        self.log.info("defer object", object = str(o), reason = reason, **o.tologdict())
+        self.log.info("defer object", object=str(o), reason=reason, **o.tologdict())
         raise DeferredException("defer object %s due to %s" % (str(o), reason))
 
     def get_extra_attributes(self, o):
@@ -78,7 +77,9 @@
             template_name = self.template_name
         tStart = time.time()
         run_template_ssh(template_name, fields, object=o)
-        self.log.info("playbook execution time", time = int(time.time() - tStart), **o.tologdict())
+        self.log.info(
+            "playbook execution time", time=int(time.time() - tStart), **o.tologdict()
+        )
 
     def pre_sync_hook(self, o, fields):
         pass
@@ -101,7 +102,11 @@
 
     def get_key_name(self, instance):
         if instance.isolation == "vm":
-            if instance.slice and instance.slice.service and instance.slice.service.private_key_fn:
+            if (
+                instance.slice
+                and instance.slice.service
+                and instance.slice.service.private_key_fn
+            ):
                 key_name = instance.slice.service.private_key_fn
             else:
                 raise Exception("Make sure to set private_key_fn in the service")
@@ -118,27 +123,29 @@
         # return all of the fields that tell Ansible how to talk to the context
         # that's setting up the container.
 
-        if (instance.isolation == "vm"):
+        if instance.isolation == "vm":
             # legacy where container was configured by sync_vcpetenant.py
 
-            fields = {"instance_name": instance.name,
-                      "hostname": instance.node.name,
-                      "instance_id": instance.instance_id,
-                      "username": "ubuntu",
-                      "ssh_ip": instance.get_ssh_ip(),
-                      }
+            fields = {
+                "instance_name": instance.name,
+                "hostname": instance.node.name,
+                "instance_id": instance.instance_id,
+                "username": "ubuntu",
+                "ssh_ip": instance.get_ssh_ip(),
+            }
 
-        elif (instance.isolation == "container"):
+        elif instance.isolation == "container":
             # container on bare metal
             node = self.get_node(instance)
             hostname = node.name
-            fields = {"hostname": hostname,
-                      "baremetal_ssh": True,
-                      "instance_name": "rootcontext",
-                      "username": "root",
-                      "container_name": "%s-%s" % (instance.slice.name, str(instance.id))
-                      # ssh_ip is not used for container-on-metal
-                      }
+            fields = {
+                "hostname": hostname,
+                "baremetal_ssh": True,
+                "instance_name": "rootcontext",
+                "username": "root",
+                "container_name": "%s-%s" % (instance.slice.name, str(instance.id))
+                # ssh_ip is not used for container-on-metal
+            }
         else:
             # container in a VM
             if not instance.parent:
@@ -149,13 +156,14 @@
                 raise Exception("Container-in-VM parent has no service")
             if not instance.parent.slice.service.private_key_fn:
                 raise Exception("Container-in-VM parent service has no private_key_fn")
-            fields = {"hostname": instance.parent.node.name,
-                      "instance_name": instance.parent.name,
-                      "instance_id": instance.parent.instance_id,
-                      "username": "ubuntu",
-                      "ssh_ip": instance.parent.get_ssh_ip(),
-                      "container_name": "%s-%s" % (instance.slice.name, str(instance.id))
-                      }
+            fields = {
+                "hostname": instance.parent.node.name,
+                "instance_name": instance.parent.name,
+                "instance_id": instance.parent.instance_id,
+                "username": "ubuntu",
+                "ssh_ip": instance.parent.get_ssh_ip(),
+                "container_name": "%s-%s" % (instance.slice.name, str(instance.id)),
+            }
 
         key_name = self.get_key_name(instance)
         if not os.path.exists(key_name):
@@ -170,22 +178,33 @@
         if not instance.deleted:
             cslice = ControllerSlice.objects.get(slice_id=instance.slice.id)
             if not cslice:
-                raise Exception("Controller slice object for %s does not exist" % instance.slice.name)
+                raise Exception(
+                    "Controller slice object for %s does not exist"
+                    % instance.slice.name
+                )
 
             cuser = ControllerUser.objects.get(user_id=instance.creator.id)
             if not cuser:
-                raise Exception("Controller user object for %s does not exist" % instance.creator)
+                raise Exception(
+                    "Controller user object for %s does not exist" % instance.creator
+                )
 
-            fields.update({"keystone_tenant_id": cslice.tenant_id,
-                           "keystone_user_id": cuser.kuser_id,
-                           "rabbit_user": getattr(instance.controller, "rabbit_user", None),
-                           "rabbit_password": getattr(instance.controller, "rabbit_password", None),
-                           "rabbit_host": getattr(instance.controller, "rabbit_host", None)})
+            fields.update(
+                {
+                    "keystone_tenant_id": cslice.tenant_id,
+                    "keystone_user_id": cuser.kuser_id,
+                    "rabbit_user": getattr(instance.controller, "rabbit_user", None),
+                    "rabbit_password": getattr(
+                        instance.controller, "rabbit_password", None
+                    ),
+                    "rabbit_host": getattr(instance.controller, "rabbit_host", None),
+                }
+            )
 
         return fields
 
     def sync_record(self, o):
-        self.log.info("sync'ing object", object = str(o), **o.tologdict())
+        self.log.info("sync'ing object", object=str(o), **o.tologdict())
 
         self.prepare_record(o)
 
@@ -198,12 +217,13 @@
                 # UNTESTED
 
                 (hostname, container_name) = self.get_external_sync(o)
-                fields = {"hostname": hostname,
-                          "baremetal_ssh": True,
-                          "instance_name": "rootcontext",
-                          "username": "root",
-                          "container_name": container_name
-                          }
+                fields = {
+                    "hostname": hostname,
+                    "baremetal_ssh": True,
+                    "instance_name": "rootcontext",
+                    "username": "root",
+                    "container_name": container_name,
+                }
                 key_name = self.get_node_key(node)
                 if not os.path.exists(key_name):
                     raise Exception("Node key %s does not exist" % key_name)
@@ -225,7 +245,9 @@
 
                 fields = self.get_ansible_fields(instance)
 
-        fields["ansible_tag"] = getattr(o, "ansible_tag", o.__class__.__name__ + "_" + str(o.id))
+        fields["ansible_tag"] = getattr(
+            o, "ansible_tag", o.__class__.__name__ + "_" + str(o.id)
+        )
 
         # If 'o' defines a 'sync_attributes' list, then we'll copy those
         # attributes into the Ansible recipe's field list automatically.
@@ -243,10 +265,14 @@
         try:
             # TODO: This may be broken, as get_controller() does not exist in convenience wrapper
             controller = o.get_controller()
-            controller_register = json.loads(o.node.site_deployment.controller.backend_register)
+            controller_register = json.loads(
+                o.node.site_deployment.controller.backend_register
+            )
 
-            if (controller_register.get('disabled', False)):
-                raise InnocuousException('Controller %s is disabled' % o.node.site_deployment.controller.name)
+            if controller_register.get("disabled", False):
+                raise InnocuousException(
+                    "Controller %s is disabled" % o.node.site_deployment.controller.name
+                )
         except AttributeError:
             pass
 
@@ -265,15 +291,18 @@
 
             # XXX - this probably needs more work...
 
-            fields = {"hostname": instance,
-                      "instance_id": "ubuntu",  # this is the username to log into
-                      "private_key": service.key,
-                      }
+            fields = {
+                "hostname": instance,
+                "instance_id": "ubuntu",  # this is the username to log into
+                "private_key": service.key,
+            }
         else:
             # sync to an XOS instance
             fields = self.get_ansible_fields(instance)
 
-            fields["ansible_tag"] = getattr(o, "ansible_tag", o.__class__.__name__ + "_" + str(o.id))
+            fields["ansible_tag"] = getattr(
+                o, "ansible_tag", o.__class__.__name__ + "_" + str(o.id)
+            )
 
         # If 'o' defines a 'sync_attributes' list, then we'll copy those
         # attributes into the Ansible recipe's field list automatically.
@@ -284,9 +313,8 @@
         if hasattr(self, "map_delete_inputs"):
             fields.update(self.map_delete_inputs(o))
 
-        fields['delete'] = True
+        fields["delete"] = True
         res = self.run_playbook(o, fields)
 
         if hasattr(self, "map_delete_outputs"):
             self.map_delete_outputs(o, res)
-
diff --git a/xos/synchronizers/new_base/__init__.py b/xos/synchronizers/new_base/__init__.py
index d4e8062..b0fb0b2 100644
--- a/xos/synchronizers/new_base/__init__.py
+++ b/xos/synchronizers/new_base/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/xos/synchronizers/new_base/ansible_helper.py b/xos/synchronizers/new_base/ansible_helper.py
index 198047f..c607607 100644
--- a/xos/synchronizers/new_base/ansible_helper.py
+++ b/xos/synchronizers/new_base/ansible_helper.py
@@ -14,6 +14,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
 import jinja2
 import tempfile
 import os
@@ -32,25 +33,30 @@
 
 from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
 
 step_dir = Config.get("steps_dir")
 sys_dir = Config.get("sys_dir")
 
-os_template_loader = jinja2.FileSystemLoader( searchpath=[step_dir, "/opt/xos/synchronizers/shared_templates"])
+os_template_loader = jinja2.FileSystemLoader(
+    searchpath=[step_dir, "/opt/xos/synchronizers/shared_templates"]
+)
 os_template_env = jinja2.Environment(loader=os_template_loader)
 
+
 def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
-    return ''.join(random.choice(chars) for _ in range(size))
+    return "".join(random.choice(chars) for _ in range(size))
+
 
 def shellquote(s):
     return "'" + s.replace("'", "'\\''") + "'"
 
+
 def get_playbook_fn(opts, path):
     if not opts.get("ansible_tag", None):
         # if no ansible_tag is in the options, then generate a unique one
-        objname= id_generator()
+        objname = id_generator()
         opts = opts.copy()
         opts["ansible_tag"] = objname
 
@@ -61,19 +67,22 @@
         os.makedirs(pathed_sys_dir)
 
     # symlink steps/roles into sys/roles so that playbooks can access roles
-    roledir = os.path.join(step_dir,"roles")
+    roledir = os.path.join(step_dir, "roles")
     rolelink = os.path.join(pathed_sys_dir, "roles")
     if os.path.isdir(roledir) and not os.path.islink(rolelink):
-        os.symlink(roledir,rolelink)
+        os.symlink(roledir, rolelink)
 
-    return (opts, os.path.join(pathed_sys_dir,objname))
+    return (opts, os.path.join(pathed_sys_dir, objname))
+
 
 def run_playbook(ansible_hosts, ansible_config, fqp, opts):
-    args = {"ansible_hosts": ansible_hosts,
-            "ansible_config": ansible_config,
-            "fqp": fqp,
-            "opts": opts,
-            "config_file": Config.get_config_file()}
+    args = {
+        "ansible_hosts": ansible_hosts,
+        "ansible_config": ansible_config,
+        "fqp": fqp,
+        "opts": opts,
+        "config_file": Config.get_config_file(),
+    }
 
     keep_temp_files = Config.get("keep_temp_files")
 
@@ -81,7 +90,7 @@
     args_fn = None
     result_fn = None
     try:
-        log.info("creating args file",dir = dir)
+        log.info("creating args file", dir=dir)
 
         args_fn = os.path.join(dir, "args")
         result_fn = os.path.join(dir, "result")
@@ -95,11 +104,11 @@
         result = pickle.loads(open(result_fn).read())
 
         if hasattr(result, "exception"):
-            log.error("Exception in playbook",exception = result["exception"])
+            log.error("Exception in playbook", exception=result["exception"])
 
         stats = result.get("stats", None)
         aresults = result.get("aresults", None)
-    except Exception,e:
+    except Exception as e:
         log.exception("Exception running ansible_main")
         stats = None
         aresults = None
@@ -113,13 +122,23 @@
 
     return (stats, aresults)
 
-def run_template(name, opts, path='', expected_num=None, ansible_config=None, ansible_hosts=None, run_ansible_script=None, object=None):
+
+def run_template(
+    name,
+    opts,
+    path="",
+    expected_num=None,
+    ansible_config=None,
+    ansible_hosts=None,
+    run_ansible_script=None,
+    object=None,
+):
     template = os_template_env.get_template(name)
     buffer = template.render(opts)
 
     (opts, fqp) = get_playbook_fn(opts, path)
 
-    f = open(fqp,'w')
+    f = open(fqp, "w")
     f.write(buffer)
     f.flush()
 
@@ -130,92 +149,96 @@
     stats,aresults = q.get()
     p.join()
     """
-    stats,aresults = run_playbook(ansible_hosts,ansible_config,fqp,opts)
+    stats, aresults = run_playbook(ansible_hosts, ansible_config, fqp, opts)
 
     error_msg = []
 
-    output_file = fqp + '.out'
+    output_file = fqp + ".out"
     try:
-        if (aresults is None):
-            raise ValueError("Error executing playbook %s"%fqp)
+        if aresults is None:
+            raise ValueError("Error executing playbook %s" % fqp)
 
         ok_results = []
         total_unreachable = 0
         failed = 0
 
-        ofile = open(output_file, 'w')
+        ofile = open(output_file, "w")
 
         for x in aresults:
             if not x.is_failed() and not x.is_unreachable() and not x.is_skipped():
                 ok_results.append(x)
             elif x.is_unreachable():
-                failed+=1
-                total_unreachable+=1
+                failed += 1
+                total_unreachable += 1
                 try:
-                    error_msg.append(x._result['msg'])
-                except:
+                    error_msg.append(x._result["msg"])
+                except BaseException:
                     pass
             elif x.is_failed():
-                failed+=1
+                failed += 1
                 try:
-                    error_msg.append(x._result['msg'])
-                except:
+                    error_msg.append(x._result["msg"])
+                except BaseException:
                     pass
 
             # FIXME (zdw, 2017-02-19) - may not be needed with new callback logging
 
-            ofile.write('%s: %s\n'%(x._task, str(x._result)))
+            ofile.write("%s: %s\n" % (x._task, str(x._result)))
 
-            if (object):
+            if object:
                 oprops = object.tologdict()
                 ansible = x._result
-                oprops['xos_type']='ansible'
-                oprops['ansible_result']=json.dumps(ansible)
+                oprops["xos_type"] = "ansible"
+                oprops["ansible_result"] = json.dumps(ansible)
 
                 if failed == 0:
-                    oprops['ansible_status']='OK'
+                    oprops["ansible_status"] = "OK"
                 else:
-                    oprops['ansible_status']='FAILED'
+                    oprops["ansible_status"] = "FAILED"
 
-                log.info('Ran Ansible task',task = x._task, **oprops)
-
+                log.info("Ran Ansible task", task=x._task, **oprops)
 
         ofile.close()
 
         if (expected_num is not None) and (len(ok_results) != expected_num):
-            raise ValueError('Unexpected num %s!=%d' % (str(expected_num), len(ok_results)) )
+            raise ValueError(
+                "Unexpected num %s!=%d" % (str(expected_num), len(ok_results))
+            )
 
-        if (failed):
-            raise ValueError('Ansible playbook failed.')
+        if failed:
+            raise ValueError("Ansible playbook failed.")
 
         # NOTE(smbaker): Playbook errors are slipping through where `aresults` does not show any failed tasks, but
         # `stats` does show them. See CORD-3169.
         hosts = sorted(stats.processed.keys())
         for h in hosts:
             t = stats.summarize(h)
-            if t['unreachable'] > 0:
-                raise ValueError("Ansible playbook reported unreachable for host %s" % h)
-            if t['failures'] > 0:
+            if t["unreachable"] > 0:
+                raise ValueError(
+                    "Ansible playbook reported unreachable for host %s" % h
+                )
+            if t["failures"] > 0:
                 raise ValueError("Ansible playbook reported failures for host %s" % h)
 
-    except ValueError,e:
+    except ValueError as e:
         if error_msg:
             try:
-                error = ' // '.join(error_msg)
-            except:
+                error = " // ".join(error_msg)
+            except BaseException:
                 error = "failed to join error_msg"
             raise Exception(error)
         else:
             raise
 
-    processed_results = map(lambda x:x._result, ok_results)
-    return processed_results[1:] # 0 is setup
+    processed_results = map(lambda x: x._result, ok_results)
+    return processed_results[1:]  # 0 is setup
 
-def run_template_ssh(name, opts, path='', expected_num=None, object=None):
+
+def run_template_ssh(name, opts, path="", expected_num=None, object=None):
     instance_name = opts["instance_name"]
     hostname = opts["hostname"]
     private_key = opts["private_key"]
-    baremetal_ssh = opts.get("baremetal_ssh",False)
+    baremetal_ssh = opts.get("baremetal_ssh", False)
     if baremetal_ssh:
         # no instance_id or ssh_ip for baremetal
         # we never proxy to baremetal
@@ -225,8 +248,8 @@
         ssh_ip = opts["ssh_ip"]
         proxy_ssh = Config.get("proxy_ssh.enabled")
 
-        if (not ssh_ip):
-            raise Exception('IP of ssh proxy not available. Synchronization deferred')
+        if not ssh_ip:
+            raise Exception("IP of ssh proxy not available. Synchronization deferred")
 
     (opts, fqp) = get_playbook_fn(opts, path)
     private_key_pathname = fqp + ".key"
@@ -246,15 +269,21 @@
             # If proxy_ssh_key is known, then we can proxy into the compute
             # node without needing to have the OpenCloud sshd machinery in
             # place.
-            proxy_command = "ProxyCommand ssh -q -i %s -o StrictHostKeyChecking=no %s@%s nc %s 22" % (proxy_ssh_key, proxy_ssh_user, hostname, ssh_ip)
+            proxy_command = (
+                "ProxyCommand ssh -q -i %s -o StrictHostKeyChecking=no %s@%s nc %s 22"
+                % (proxy_ssh_key, proxy_ssh_user, hostname, ssh_ip)
+            )
         else:
-            proxy_command = "ProxyCommand ssh -q -i %s -o StrictHostKeyChecking=no %s@%s" % (private_key_pathname, instance_id, hostname)
+            proxy_command = (
+                "ProxyCommand ssh -q -i %s -o StrictHostKeyChecking=no %s@%s"
+                % (private_key_pathname, instance_id, hostname)
+            )
         f.write('ssh_args = -o "%s"\n' % proxy_command)
-    f.write('scp_if_ssh = True\n')
-    f.write('pipelining = True\n')
-    f.write('\n[defaults]\n')
-    f.write('host_key_checking = False\n')
-    f.write('timeout = 30\n')
+    f.write("scp_if_ssh = True\n")
+    f.write("pipelining = True\n")
+    f.write("\n[defaults]\n")
+    f.write("host_key_checking = False\n")
+    f.write("timeout = 30\n")
     f.close()
 
     f = open(hosts_pathname, "w")
@@ -263,22 +292,34 @@
     f.close()
 
     # SSH will complain if private key is world or group readable
-    os.chmod(private_key_pathname, 0600)
+    os.chmod(private_key_pathname, 0o600)
 
-    print "ANSIBLE_CONFIG=%s" % config_pathname
-    print "ANSIBLE_HOSTS=%s" % hosts_pathname
+    print("ANSIBLE_CONFIG=%s" % config_pathname)
+    print("ANSIBLE_HOSTS=%s" % hosts_pathname)
 
-    return run_template(name, opts, path, ansible_config = config_pathname, ansible_hosts = hosts_pathname, run_ansible_script="/opt/xos/synchronizers/base/run_ansible_verbose", object=object)
-
+    return run_template(
+        name,
+        opts,
+        path,
+        ansible_config=config_pathname,
+        ansible_hosts=hosts_pathname,
+        run_ansible_script="/opt/xos/synchronizers/base/run_ansible_verbose",
+        object=object,
+    )
 
 
 def main():
-    run_template('ansible/sync_user_deployments.yaml',{ "endpoint" : "http://172.31.38.128:5000/v2.0/",
-             "name" : "Sapan Bhatia",
-             "email": "gwsapan@gmail.com",
-             "password": "foobar",
-             "admin_user":"admin",
-             "admin_password":"6a789bf69dd647e2",
-             "admin_tenant":"admin",
-             "tenant":"demo",
-             "roles":['user','admin'] })
+    run_template(
+        "ansible/sync_user_deployments.yaml",
+        {
+            "endpoint": "http://172.31.38.128:5000/v2.0/",
+            "name": "Sapan Bhatia",
+            "email": "gwsapan@gmail.com",
+            "password": "foobar",
+            "admin_user": "admin",
+            "admin_password": "6a789bf69dd647e2",
+            "admin_tenant": "admin",
+            "tenant": "demo",
+            "roles": ["user", "admin"],
+        },
+    )
diff --git a/xos/synchronizers/new_base/ansible_main.py b/xos/synchronizers/new_base/ansible_main.py
index c2b5097..08283a4 100644
--- a/xos/synchronizers/new_base/ansible_main.py
+++ b/xos/synchronizers/new_base/ansible_main.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,52 +16,55 @@
 import os
 import pickle
 import sys
-#import json
+
+# import json
 import traceback
 from xosconfig import Config
 
 sys.path.append("/opt/xos")
 
+
 def run_playbook(ansible_hosts, ansible_config, fqp, opts):
     try:
         if ansible_config:
-           os.environ["ANSIBLE_CONFIG"] = ansible_config
+            os.environ["ANSIBLE_CONFIG"] = ansible_config
         else:
-           try:
-               del os.environ["ANSIBLE_CONFIG"]
-           except KeyError:
-               pass
+            try:
+                del os.environ["ANSIBLE_CONFIG"]
+            except KeyError:
+                pass
 
         if ansible_hosts:
-           os.environ["ANSIBLE_HOSTS"] = ansible_hosts
+            os.environ["ANSIBLE_HOSTS"] = ansible_hosts
         else:
-           try:
-               del os.environ["ANSIBLE_HOSTS"]
-           except KeyError:
-               pass
+            try:
+                del os.environ["ANSIBLE_HOSTS"]
+            except KeyError:
+                pass
 
         import ansible_runner
+
         reload(ansible_runner)
 
         # Dropped support for observer_pretend - to be redone
         runner = ansible_runner.Runner(
-            playbook=fqp,
-            run_data=opts,
-            host_file=ansible_hosts)
+            playbook=fqp, run_data=opts, host_file=ansible_hosts
+        )
 
-        stats,aresults = runner.run()
-    except Exception, e:
+        stats, aresults = runner.run()
+    except Exception as e:
         return {"stats": None, "aresults": None, "exception": traceback.format_exc()}
 
     return {"stats": stats, "aresults": aresults}
 
+
 def main():
     input_fn = sys.argv[1]
     result_fn = sys.argv[2]
 
     args = pickle.loads(open(input_fn).read())
 
-    Config.init(args['config_file'], 'synchronizer-config-schema.yaml')
+    Config.init(args["config_file"], "synchronizer-config-schema.yaml")
 
     ansible_hosts = args["ansible_hosts"]
     ansible_config = args["ansible_config"]
@@ -73,5 +75,6 @@
 
     open(result_fn, "w").write(pickle.dumps(result))
 
+
 if __name__ == "__main__":
     main()
diff --git a/xos/synchronizers/new_base/ansible_runner.py b/xos/synchronizers/new_base/ansible_runner.py
index 5b02cf5..d20feb5 100644
--- a/xos/synchronizers/new_base/ansible_runner.py
+++ b/xos/synchronizers/new_base/ansible_runner.py
@@ -14,6 +14,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from multistructlog import create_logger
+from xosconfig import Config
+from ansible.plugins.callback import CallbackBase
+from ansible.utils.display import Display
+from ansible.executor import playbook_executor
+from ansible.parsing.dataloader import DataLoader
+from ansible.vars.manager import VariableManager
+from ansible.inventory.manager import InventoryManager
+from tempfile import NamedTemporaryFile
 import os
 import sys
 import pdb
@@ -21,44 +30,35 @@
 import uuid
 
 from ansible import constants
+
 constants = reload(constants)
 
-from tempfile import NamedTemporaryFile
-from ansible.inventory.manager import InventoryManager
-from ansible.vars.manager import VariableManager
-from ansible.parsing.dataloader import DataLoader
-from ansible.executor import playbook_executor
-from ansible.utils.display import Display
-from ansible.plugins.callback import CallbackBase
 
-from xosconfig import Config
-from multistructlog import create_logger
-
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
 
 class ResultCallback(CallbackBase):
 
     CALLBACK_VERSION = 2.0
-    CALLBACK_NAME = 'resultcallback'
-    CALLBACK_TYPE = 'programmatic'
+    CALLBACK_NAME = "resultcallback"
+    CALLBACK_TYPE = "programmatic"
 
     def __init__(self):
         super(ResultCallback, self).__init__()
         self.results = []
         self.uuid = str(uuid.uuid1())
-        self.playbook_status = 'OK'
+        self.playbook_status = "OK"
 
     def v2_playbook_on_start(self, playbook):
         self.playbook = playbook._file_name
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "playbook start",
-            'ansible_status': "OK",
-            'ansible_playbook': self.playbook
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "playbook start",
+            "ansible_status": "OK",
+            "ansible_playbook": self.playbook,
         }
-        log.info("PLAYBOOK START", playbook = self.playbook, **log_extra)
+        log.info("PLAYBOOK START", playbook=self.playbook, **log_extra)
 
     def v2_playbook_on_stats(self, stats):
         host_stats = {}
@@ -66,200 +66,208 @@
             host_stats[host] = stats.summarize(host)
 
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "playbook stats",
-            'ansible_status': self.playbook_status,
-            'ansible_playbook': self.playbook,
-            'ansible_result': json.dumps(host_stats)
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "playbook stats",
+            "ansible_status": self.playbook_status,
+            "ansible_playbook": self.playbook,
+            "ansible_result": json.dumps(host_stats),
         }
 
-        if self.playbook_status == 'OK':
-            log.info("PLAYBOOK END", playbook = self.playbook, **log_extra)
+        if self.playbook_status == "OK":
+            log.info("PLAYBOOK END", playbook=self.playbook, **log_extra)
         else:
-            log.error("PLAYBOOK END", playbook = self.playbook, **log_extra)
+            log.error("PLAYBOOK END", playbook=self.playbook, **log_extra)
 
     def v2_playbook_on_play_start(self, play):
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "play start",
-            'ansible_status': self.playbook_status,
-            'ansible_playbook': self.playbook
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "play start",
+            "ansible_status": self.playbook_status,
+            "ansible_playbook": self.playbook,
         }
-        log.debug("PLAY START",play_name = play.name, **log_extra)
+        log.debug("PLAY START", play_name=play.name, **log_extra)
 
     def v2_runner_on_ok(self, result, **kwargs):
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "task",
-            'ansible_status': "OK",
-            'ansible_result': json.dumps(result._result),
-            'ansible_task': result._task,
-            'ansible_playbook': self.playbook,
-            'ansible_host': result._host.get_name()
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "task",
+            "ansible_status": "OK",
+            "ansible_result": json.dumps(result._result),
+            "ansible_task": result._task,
+            "ansible_playbook": self.playbook,
+            "ansible_host": result._host.get_name(),
         }
-        log.debug("OK", task = str(result._task), **log_extra)
+        log.debug("OK", task=str(result._task), **log_extra)
         self.results.append(result)
 
     def v2_runner_on_failed(self, result, **kwargs):
         self.playbook_status = "FAILED"
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "task",
-            'ansible_status': "FAILED",
-            'ansible_result': json.dumps(result._result),
-            'ansible_task': result._task,
-            'ansible_playbook': self.playbook,
-            'ansible_host': result._host.get_name()
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "task",
+            "ansible_status": "FAILED",
+            "ansible_result": json.dumps(result._result),
+            "ansible_task": result._task,
+            "ansible_playbook": self.playbook,
+            "ansible_host": result._host.get_name(),
         }
-        log.error("FAILED", task = str(result._task), **log_extra)
+        log.error("FAILED", task=str(result._task), **log_extra)
         self.results.append(result)
 
     def v2_runner_on_async_failed(self, result, **kwargs):
         self.playbook_status = "FAILED"
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "task",
-            'ansible_status': "ASYNC FAILED",
-            'ansible_result': json.dumps(result._result),
-            'ansible_task': result._task,
-            'ansible_playbook': self.playbook,
-            'ansible_host': result._host.get_name()
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "task",
+            "ansible_status": "ASYNC FAILED",
+            "ansible_result": json.dumps(result._result),
+            "ansible_task": result._task,
+            "ansible_playbook": self.playbook,
+            "ansible_host": result._host.get_name(),
         }
-        log.error("ASYNC FAILED", task = str(result._task), **log_extra)
+        log.error("ASYNC FAILED", task=str(result._task), **log_extra)
 
     def v2_runner_on_skipped(self, result, **kwargs):
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "task",
-            'ansible_status': "SKIPPED",
-            'ansible_result': json.dumps(result._result),
-            'ansible_task': result._task,
-            'ansible_playbook': self.playbook,
-            'ansible_host': result._host.get_name()
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "task",
+            "ansible_status": "SKIPPED",
+            "ansible_result": json.dumps(result._result),
+            "ansible_task": result._task,
+            "ansible_playbook": self.playbook,
+            "ansible_host": result._host.get_name(),
         }
-        log.debug("SKIPPED", task = str(result._task), **log_extra)
+        log.debug("SKIPPED", task=str(result._task), **log_extra)
         self.results.append(result)
 
     def v2_runner_on_unreachable(self, result, **kwargs):
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "task",
-            'ansible_status': "UNREACHABLE",
-            'ansible_result': json.dumps(result._result),
-            'ansible_task': result._task,
-            'ansible_playbook': self.playbook,
-            'ansible_host': result._host.get_name()
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "task",
+            "ansible_status": "UNREACHABLE",
+            "ansible_result": json.dumps(result._result),
+            "ansible_task": result._task,
+            "ansible_playbook": self.playbook,
+            "ansible_host": result._host.get_name(),
         }
-        log.error("UNREACHABLE", task = str(result._task), **log_extra)
+        log.error("UNREACHABLE", task=str(result._task), **log_extra)
         self.results.append(result)
 
     def v2_runner_retry(self, result, **kwargs):
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "task",
-            'ansible_status': "RETRY",
-            'ansible_result': json.dumps(result._result),
-            'ansible_task': result._task,
-            'ansible_playbook': self.playbook,
-            'ansible_host': result._host.get_name()
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "task",
+            "ansible_status": "RETRY",
+            "ansible_result": json.dumps(result._result),
+            "ansible_task": result._task,
+            "ansible_playbook": self.playbook,
+            "ansible_host": result._host.get_name(),
         }
-        log.warning("RETRYING - attempt", task =str(result._task), attempt = result._result['attempts'], **log_extra)
+        log.warning(
+            "RETRYING - attempt",
+            task=str(result._task),
+            attempt=result._result["attempts"],
+            **log_extra
+        )
         self.results.append(result)
 
     def v2_playbook_on_handler_task_start(self, task, **kwargs):
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "task",
-            'ansible_status': "HANDLER",
-            'ansible_task': task.get_name().strip(),
-            'ansible_playbook': self.playbook,
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "task",
+            "ansible_status": "HANDLER",
+            "ansible_task": task.get_name().strip(),
+            "ansible_playbook": self.playbook,
             # 'ansible_host': result._host.get_name()
         }
-        log.debug("HANDLER", task = task.get_name().strip(), **log_extra)
+        log.debug("HANDLER", task=task.get_name().strip(), **log_extra)
 
     def v2_playbook_on_import_for_host(self, result, imported_file):
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "import",
-            'ansible_status': "IMPORT",
-            'ansible_result': json.dumps(result._result),
-            'ansible_playbook': self.playbook,
-            'ansible_host': result._host.get_name()
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "import",
+            "ansible_status": "IMPORT",
+            "ansible_result": json.dumps(result._result),
+            "ansible_playbook": self.playbook,
+            "ansible_host": result._host.get_name(),
         }
-        log.debug("IMPORT", imported_file =imported_file, **log_extra)
+        log.debug("IMPORT", imported_file=imported_file, **log_extra)
         self.results.append(result)
 
     def v2_playbook_on_not_import_for_host(self, result, missing_file):
         log_extra = {
-            'xos_type': "ansible",
-            'ansible_uuid': self.uuid,
-            'ansible_type': "import",
-            'ansible_status': "MISSING IMPORT",
-            'ansible_result': json.dumps(result._result),
-            'ansible_playbook': self.playbook,
-            'ansible_host': result._host.get_name()
+            "xos_type": "ansible",
+            "ansible_uuid": self.uuid,
+            "ansible_type": "import",
+            "ansible_status": "MISSING IMPORT",
+            "ansible_result": json.dumps(result._result),
+            "ansible_playbook": self.playbook,
+            "ansible_host": result._host.get_name(),
         }
-        log.debug("MISSING IMPORT", missing = missing_file, **log_extra)
+        log.debug("MISSING IMPORT", missing=missing_file, **log_extra)
         self.results.append(result)
 
+
 class Options(object):
     """
     Options class to replace Ansible OptParser
     """
-    def __init__(self,
-                 ask_pass=None,
-                 ask_su_pass=None,
-                 ask_sudo_pass=None,
-                 become=None,
-                 become_ask_pass=None,
-                 become_method=None,
-                 become_user=None,
-                 check=None,
-                 connection=None,
-                 diff=None,
-                 flush_cache=None,
-                 force_handlers=None,
-                 forks=1,
-                 listtags=None,
-                 listtasks=None,
-                 module_path=None,
-                 new_vault_password_file=None,
-                 one_line=None,
-                 output_file=None,
-                 poll_interval=None,
-                 private_key_file=None,
-                 remote_user=None,
-                 scp_extra_args=None,
-                 seconds=None,
-                 sftp_extra_args=None,
-                 skip_tags=None,
-                 ssh_common_args=None,
-                 ssh_extra_args=None,
-                 sudo=None,
-                 sudo_user=None,
-                 syntax=None,
-                 tags=None,
-                 timeout=None,
-                 tree=None,
-                 vault_password_files=None,
-                 ask_vault_pass=None,
-                 extra_vars=None,
-                 inventory=None,
-                 listhosts=None,
-                 module_paths=None,
-                 subset=None,
-                 verbosity=None
-                 ):
+
+    def __init__(
+        self,
+        ask_pass=None,
+        ask_su_pass=None,
+        ask_sudo_pass=None,
+        become=None,
+        become_ask_pass=None,
+        become_method=None,
+        become_user=None,
+        check=None,
+        connection=None,
+        diff=None,
+        flush_cache=None,
+        force_handlers=None,
+        forks=1,
+        listtags=None,
+        listtasks=None,
+        module_path=None,
+        new_vault_password_file=None,
+        one_line=None,
+        output_file=None,
+        poll_interval=None,
+        private_key_file=None,
+        remote_user=None,
+        scp_extra_args=None,
+        seconds=None,
+        sftp_extra_args=None,
+        skip_tags=None,
+        ssh_common_args=None,
+        ssh_extra_args=None,
+        sudo=None,
+        sudo_user=None,
+        syntax=None,
+        tags=None,
+        timeout=None,
+        tree=None,
+        vault_password_files=None,
+        ask_vault_pass=None,
+        extra_vars=None,
+        inventory=None,
+        listhosts=None,
+        module_paths=None,
+        subset=None,
+        verbosity=None,
+    ):
 
         if tags:
             self.tags = tags
@@ -308,21 +316,23 @@
         self.vault_password_files = vault_password_files
         self.verbosity = verbosity
 
-class Runner(object):
 
-    def __init__(self, playbook, run_data, private_key_file=None, verbosity=0, host_file=None):
+class Runner(object):
+    def __init__(
+        self, playbook, run_data, private_key_file=None, verbosity=0, host_file=None
+    ):
 
         self.playbook = playbook
         self.run_data = run_data
 
         self.options = Options()
-        self.options.output_file = playbook + '.result'
+        self.options.output_file = playbook + ".result"
         self.options.private_key_file = private_key_file
         self.options.verbosity = verbosity
-        self.options.connection = 'ssh'  # Need a connection type "smart" or "ssh"
-        #self.options.become = True
-        self.options.become_method = 'sudo'
-        self.options.become_user = 'root'
+        self.options.connection = "ssh"  # Need a connection type "smart" or "ssh"
+        # self.options.become = True
+        self.options.become_method = "sudo"
+        self.options.become_user = "root"
 
         # Set global verbosity
         self.display = Display()
@@ -332,24 +342,26 @@
         playbook_executor.verbosity = self.options.verbosity
 
         # Become Pass Needed if not logging in as user root
-        #passwords = {'become_pass': become_pass}
+        # passwords = {'become_pass': become_pass}
 
         # Gets data from YAML/JSON files
         self.loader = DataLoader()
         try:
-            self.loader.set_vault_password(os.environ['VAULT_PASS'])
+            self.loader.set_vault_password(os.environ["VAULT_PASS"])
         except AttributeError:
             pass
 
         # Set inventory, using most of above objects
-        if (host_file):
-            self.inventory = InventoryManager(loader=self.loader, sources = host_file)
+        if host_file:
+            self.inventory = InventoryManager(loader=self.loader, sources=host_file)
         else:
             self.inventory = InventoryManager(loader=self.loader)
 
         # All the variables from all the various places
-        self.variable_manager = VariableManager(loader=self.loader, inventory=self.inventory)
-        self.variable_manager.extra_vars = {} # self.run_data
+        self.variable_manager = VariableManager(
+            loader=self.loader, inventory=self.inventory
+        )
+        self.variable_manager.extra_vars = {}  # self.run_data
 
         # Setup playbook executor, but don't run until run() called
         self.pbex = playbook_executor.PlaybookExecutor(
@@ -358,17 +370,19 @@
             variable_manager=self.variable_manager,
             loader=self.loader,
             options=self.options,
-            passwords={})
+            passwords={},
+        )
 
     def run(self):
-        os.environ['REQUESTS_CA_BUNDLE'] = '/usr/local/share/ca-certificates/local_certs.crt'
+        os.environ[
+            "REQUESTS_CA_BUNDLE"
+        ] = "/usr/local/share/ca-certificates/local_certs.crt"
         callback = ResultCallback()
         self.pbex._tqm._stdout_callback = callback
 
         self.pbex.run()
         stats = self.pbex._tqm._stats
 
-        #os.remove(self.hosts.name)
+        # os.remove(self.hosts.name)
 
-        return stats,callback.results
-
+        return stats, callback.results
diff --git a/xos/synchronizers/new_base/apiaccessor.py b/xos/synchronizers/new_base/apiaccessor.py
index a215e00..a56381b 100644
--- a/xos/synchronizers/new_base/apiaccessor.py
+++ b/xos/synchronizers/new_base/apiaccessor.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,6 +17,7 @@
 import datetime
 import time
 
+
 class CoreApiModelAccessor(ModelAccessor):
     def __init__(self, orm):
         self.orm = orm
@@ -26,33 +26,41 @@
     def get_all_model_classes(self):
         all_model_classes = {}
         for k in self.orm.all_model_names:
-            all_model_classes[k] = getattr(self.orm,k)
+            all_model_classes[k] = getattr(self.orm, k)
         return all_model_classes
 
     def fetch_pending(self, main_objs, deletion=False):
-        if (type(main_objs) is not list):
-                main_objs=[main_objs]
+        if not isinstance(main_objs, list):
+            main_objs = [main_objs]
 
         objs = []
         for main_obj in main_objs:
-            if (not deletion):
-                lobjs = main_obj.objects.filter_special(main_obj.objects.SYNCHRONIZER_DIRTY_OBJECTS)
+            if not deletion:
+                lobjs = main_obj.objects.filter_special(
+                    main_obj.objects.SYNCHRONIZER_DIRTY_OBJECTS
+                )
             else:
-                lobjs = main_obj.objects.filter_special(main_obj.objects.SYNCHRONIZER_DELETED_OBJECTS)
+                lobjs = main_obj.objects.filter_special(
+                    main_obj.objects.SYNCHRONIZER_DELETED_OBJECTS
+                )
             objs.extend(lobjs)
 
         return objs
 
     def fetch_policies(self, main_objs, deletion=False):
-        if (type(main_objs) is not list):
-                main_objs=[main_objs]
+        if not isinstance(main_objs, list):
+            main_objs = [main_objs]
 
         objs = []
         for main_obj in main_objs:
-            if (not deletion):
-                lobjs = main_obj.objects.filter_special(main_obj.objects.SYNCHRONIZER_DIRTY_POLICIES)
+            if not deletion:
+                lobjs = main_obj.objects.filter_special(
+                    main_obj.objects.SYNCHRONIZER_DIRTY_POLICIES
+                )
             else:
-                lobjs = main_obj.objects.filter_special(main_obj.objects.SYNCHRONIZER_DELETED_POLICIES)
+                lobjs = main_obj.objects.filter_special(
+                    main_obj.objects.SYNCHRONIZER_DELETED_POLICIES
+                )
             objs.extend(lobjs)
 
         return objs
@@ -67,7 +75,9 @@
 
     def now(self):
         """ Return the current time for timestamping purposes """
-        return (datetime.datetime.utcnow()-datetime.datetime.fromtimestamp(0)).total_seconds()
+        return (
+            datetime.datetime.utcnow() - datetime.datetime.fromtimestamp(0)
+        ).total_seconds()
 
     def is_type(self, obj, name):
         return obj._wrapped_class.__class__.__name__ == name
@@ -80,6 +90,3 @@
 
     def create_obj(self, cls, **kwargs):
         return cls.objects.new(**kwargs)
-
-
-
diff --git a/xos/synchronizers/new_base/backend.py b/xos/synchronizers/new_base/backend.py
index 00e43f2..2074445 100644
--- a/xos/synchronizers/new_base/backend.py
+++ b/xos/synchronizers/new_base/backend.py
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
 import os
 import inspect
 import imp
@@ -28,28 +29,32 @@
 from xosconfig import Config
 from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
 
 class Backend:
-
-    def __init__(self, log = log):
+    def __init__(self, log=log):
         self.log = log
         pass
 
     def load_sync_step_modules(self, step_dir):
         sync_steps = []
 
-        self.log.info("Loading sync steps", step_dir = step_dir)
+        self.log.info("Loading sync steps", step_dir=step_dir)
 
         for fn in os.listdir(step_dir):
-            pathname = os.path.join(step_dir,fn)
-            if os.path.isfile(pathname) and fn.endswith(".py") and (fn!="__init__.py") and (not fn.startswith("test")):
+            pathname = os.path.join(step_dir, fn)
+            if (
+                os.path.isfile(pathname)
+                and fn.endswith(".py")
+                and (fn != "__init__.py")
+                and (not fn.startswith("test"))
+            ):
 
                 # we need to extend the path to load modules in the step_dir
                 sys_path_save = sys.path
                 sys.path.append(step_dir)
-                module = imp.load_source(fn[:-3],pathname)
+                module = imp.load_source(fn[:-3], pathname)
 
                 self.log.debug("Loaded file: %s", pathname)
 
@@ -59,7 +64,7 @@
                 for classname in dir(module):
                     c = getattr(module, classname, None)
 
-                    #if classname.startswith("Sync"):
+                    # if classname.startswith("Sync"):
                     #    print classname, c, inspect.isclass(c), issubclass(c, SyncStep), hasattr(c,"provides")
 
                     # make sure 'c' is a descendent of SyncStep and has a
@@ -69,10 +74,14 @@
                     if inspect.isclass(c):
                         bases = inspect.getmro(c)
                         base_names = [b.__name__ for b in bases]
-                        if ('SyncStep' in base_names) and (hasattr(c,"provides") or hasattr(c,"observes")) and (c not in sync_steps):
+                        if (
+                            ("SyncStep" in base_names)
+                            and (hasattr(c, "provides") or hasattr(c, "observes"))
+                            and (c not in sync_steps)
+                        ):
                             sync_steps.append(c)
 
-        self.log.info("Loaded sync steps", steps = sync_steps)
+        self.log.info("Loaded sync steps", steps=sync_steps)
 
         return sync_steps
 
@@ -94,7 +103,9 @@
                 # start the observer
                 self.log.info("Starting XOSObserver", sync_steps=sync_steps)
                 observer = XOSObserver(sync_steps, self.log)
-                observer_thread = threading.Thread(target=observer.run,name='synchronizer')
+                observer_thread = threading.Thread(
+                    target=observer.run, name="synchronizer"
+                )
                 observer_thread.start()
 
         else:
@@ -105,7 +116,9 @@
             self.log.info("Starting XOSPullStepEngine", pull_steps_dir=pull_steps_dir)
             pull_steps_engine = XOSPullStepEngine()
             pull_steps_engine.load_pull_step_modules(pull_steps_dir)
-            pull_steps_thread = threading.Thread(target=pull_steps_engine.start, name="pull_step_engine")
+            pull_steps_thread = threading.Thread(
+                target=pull_steps_engine.start, name="pull_step_engine"
+            )
             pull_steps_thread.start()
         else:
             self.log.info("Skipping pull step engine due to no pull_steps_dir dir.")
@@ -122,15 +135,21 @@
         # start model policies thread
         policies_dir = Config.get("model_policies_dir")
         if policies_dir:
-            policy_engine = XOSPolicyEngine(policies_dir=policies_dir, log = self.log)
-            model_policy_thread = threading.Thread(target=policy_engine.run, name="policy_engine")
-            model_policy_thread.is_policy_thread=True
+            policy_engine = XOSPolicyEngine(policies_dir=policies_dir, log=self.log)
+            model_policy_thread = threading.Thread(
+                target=policy_engine.run, name="policy_engine"
+            )
+            model_policy_thread.is_policy_thread = True
             model_policy_thread.start()
         else:
-            self.log.info("Skipping model policies thread due to no model_policies dir.")
+            self.log.info(
+                "Skipping model policies thread due to no model_policies dir."
+            )
 
         if (not observer_thread) and (not model_policy_thread) and (not event_engine):
-            self.log.info("No sync steps, no policies, and no event steps. Synchronizer exiting.")
+            self.log.info(
+                "No sync steps, no policies, and no event steps. Synchronizer exiting."
+            )
             # the caller will exit with status 0
             return
 
@@ -138,11 +157,10 @@
             try:
                 time.sleep(1000)
             except KeyboardInterrupt:
-                print "exiting due to keyboard interrupt"
+                print("exiting due to keyboard interrupt")
                 # TODO: See about setting the threads as daemons
                 if observer_thread:
                     observer_thread._Thread__stop()
                 if model_policy_thread:
                     model_policy_thread._Thread__stop()
                 sys.exit(1)
-
diff --git a/xos/synchronizers/new_base/backend_modelpolicy.py b/xos/synchronizers/new_base/backend_modelpolicy.py
index 792730c..a8e826b 100644
--- a/xos/synchronizers/new_base/backend_modelpolicy.py
+++ b/xos/synchronizers/new_base/backend_modelpolicy.py
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
 import os
 import inspect
 import imp
@@ -24,7 +25,7 @@
 from xosconfig import Config
 from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
 
 class Backend:
@@ -33,6 +34,7 @@
         policies_dir = Config("model_policies_dir")
         if policies_dir:
             from synchronizers.model_policy import run_policy
+
             model_policy_thread = threading.Thread(target=run_policy)
             model_policy_thread.start()
         else:
@@ -43,8 +45,7 @@
             try:
                 time.sleep(1000)
             except KeyboardInterrupt:
-                print "exiting due to keyboard interrupt"
+                print("exiting due to keyboard interrupt")
                 if model_policy_thread:
                     model_policy_thread._Thread__stop()
                 sys.exit(1)
-
diff --git a/xos/synchronizers/new_base/deleter.py b/xos/synchronizers/new_base/deleter.py
index e14c840..894164a 100644
--- a/xos/synchronizers/new_base/deleter.py
+++ b/xos/synchronizers/new_base/deleter.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,15 +15,16 @@
 
 # NOTE this appear not to be used, can we delete it?
 
+
 class Deleter:
-	model=None # Must be overridden
+    model = None  # Must be overridden
 
-        def __init__(self, *args, **kwargs):
-                pass
+    def __init__(self, *args, **kwargs):
+        pass
 
-	def call(self, pk, model_dict):
-		# Fetch object from XOS db and delete it
-		pass
+    def call(self, pk, model_dict):
+        # Fetch object from XOS db and delete it
+        pass
 
-	def __call__(self, *args, **kwargs):
-		return self.call(*args, **kwargs)
+    def __call__(self, *args, **kwargs):
+        return self.call(*args, **kwargs)
diff --git a/xos/synchronizers/new_base/dependency_walker_new.py b/xos/synchronizers/new_base/dependency_walker_new.py
index c68f15e..138c26d 100644
--- a/xos/synchronizers/new_base/dependency_walker_new.py
+++ b/xos/synchronizers/new_base/dependency_walker_new.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,6 +19,7 @@
 #       access. Verify whether or not that's true and reconcile with
 #       generate/dependency_walker.py
 
+from __future__ import print_function
 import os
 import imp
 import inspect
@@ -33,17 +33,17 @@
 from xosconfig import Config
 from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
 missing_links = {}
 
 if Config.get("dependency_graph"):
     dep_data = open(Config.get("dependency_graph")).read()
 else:
-    dep_data = '{}'
+    dep_data = "{}"
 
 dependencies = json.loads(dep_data)
-dependencies = {k:[item[0] for item in items] for k,items in dependencies.items()}
+dependencies = {k: [item[0] for item in items] for k, items in dependencies.items()}
 
 inv_dependencies = {}
 for k, lst in dependencies.items():
@@ -55,17 +55,17 @@
 
 
 def plural(name):
-    if name.endswith('s'):
-        return name + 'es'
+    if name.endswith("s"):
+        return name + "es"
     else:
-        return name + 's'
+        return name + "s"
 
 
 def walk_deps(fn, object):
     model = object.__class__.__name__
     try:
         deps = dependencies[model]
-    except:
+    except BaseException:
         deps = []
     return __walk_deps(fn, object, deps)
 
@@ -74,7 +74,7 @@
     model = object.__class__.__name__
     try:
         deps = inv_dependencies[model]
-    except:
+    except BaseException:
         deps = []
     return __walk_deps(fn, object, deps)
 
@@ -93,21 +93,25 @@
             try:
                 peer = getattr(object, link)
             except AttributeError:
-                if not missing_links.has_key(model + '.' + link):
-                    print "Model %s missing link for dependency %s" % (model, link)
-                    log.exception("WARNING: Model missing link for dependency.", model = model, link = link)
-                    missing_links[model + '.' + link] = True
+                if model + "." + link not in missing_links:
+                    print("Model %s missing link for dependency %s" % (model, link))
+                    log.exception(
+                        "WARNING: Model missing link for dependency.",
+                        model=model,
+                        link=link,
+                    )
+                    missing_links[model + "." + link] = True
 
-        if (peer):
+        if peer:
             try:
                 peer_objects = peer.all()
             except AttributeError:
                 peer_objects = [peer]
-            except:
+            except BaseException:
                 peer_objects = []
 
             for o in peer_objects:
-                if (hasattr(o, 'updated')):
+                if hasattr(o, "updated"):
                     fn(o, object)
                     ret.append(o)
                 # Uncomment the following line to enable recursion
diff --git a/xos/synchronizers/new_base/event_engine.py b/xos/synchronizers/new_base/event_engine.py
index acbc57d..e5e18d1 100644
--- a/xos/synchronizers/new_base/event_engine.py
+++ b/xos/synchronizers/new_base/event_engine.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -22,8 +21,7 @@
 from xosconfig import Config
 
 
-class XOSKafkaMessage():
-
+class XOSKafkaMessage:
     def __init__(self, consumer_msg):
 
         self.topic = consumer_msg.topic()
@@ -56,25 +54,31 @@
     def create_kafka_consumer(self):
         # use the service name as the group id
         consumer_config = {
-            'group.id': Config().get('name'),
-            'bootstrap.servers': ','.join(self.bootstrap_servers),
-            'default.topic.config': {'auto.offset.reset': 'smallest'},
+            "group.id": Config().get("name"),
+            "bootstrap.servers": ",".join(self.bootstrap_servers),
+            "default.topic.config": {"auto.offset.reset": "smallest"},
         }
 
         return confluent_kafka.Consumer(**consumer_config)
 
     def run(self):
         if (not self.step.topics) and (not self.step.pattern):
-            raise Exception("Neither topics nor pattern is defined for step %s" % self.step.__name__)
+            raise Exception(
+                "Neither topics nor pattern is defined for step %s" % self.step.__name__
+            )
 
         if self.step.topics and self.step.pattern:
-            raise Exception("Both topics and pattern are defined for step %s. Choose one." %
-                            self.step.__name__)
+            raise Exception(
+                "Both topics and pattern are defined for step %s. Choose one."
+                % self.step.__name__
+            )
 
-        self.log.info("Waiting for events",
-                      topic=self.step.topics,
-                      pattern=self.step.pattern,
-                      step=self.step.__name__)
+        self.log.info(
+            "Waiting for events",
+            topic=self.step.topics,
+            pattern=self.step.pattern,
+            step=self.step.__name__,
+        )
 
         while True:
             try:
@@ -88,12 +92,14 @@
                     elif self.step.pattern:
                         self.consumer.subscribe(self.step.pattern)
 
-            except confluent_kafka.KafkaError._ALL_BROKERS_DOWN, e:
-                self.log.warning("No brokers available on %s, %s" % (self.bootstrap_servers, e))
+            except confluent_kafka.KafkaError._ALL_BROKERS_DOWN as e:
+                self.log.warning(
+                    "No brokers available on %s, %s" % (self.bootstrap_servers, e)
+                )
                 time.sleep(20)
                 continue
 
-            except confluent_kafka.KafkaError, e:
+            except confluent_kafka.KafkaError as e:
                 # Maybe Kafka has not started yet. Log the exception and try again in a second.
                 self.log.exception("Exception in kafka loop: %s" % e)
                 time.sleep(1)
@@ -107,8 +113,10 @@
 
             if msg.error():
                 if msg.error().code() == confluent_kafka.KafkaError._PARTITION_EOF:
-                    self.log.debug("Reached end of kafka topic %s, partition: %s, offset: %d" %
-                              (msg.topic(), msg.partition(), msg.offset()))
+                    self.log.debug(
+                        "Reached end of kafka topic %s, partition: %s, offset: %d"
+                        % (msg.topic(), msg.partition(), msg.offset())
+                    )
                 else:
                     self.log.exception("Error in kafka message: %s" % msg.error())
 
@@ -116,13 +124,19 @@
                 # wrap parsing the event in a class
                 event_msg = XOSKafkaMessage(msg)
 
-                self.log.info("Processing event", event_msg=event_msg, step=self.step.__name__)
+                self.log.info(
+                    "Processing event", event_msg=event_msg, step=self.step.__name__
+                )
 
                 try:
                     self.step(log=self.log).process_event(event_msg)
 
-                except:
-                    self.log.exception("Exception in event step", event_msg=event_msg, step=self.step.__name__)
+                except BaseException:
+                    self.log.exception(
+                        "Exception in event step",
+                        event_msg=event_msg,
+                        step=self.step.__name__,
+                    )
 
 
 class XOSEventEngine(object):
@@ -149,7 +163,12 @@
         # NOTE we'll load all the classes that inherit from EventStep
         for fn in os.listdir(event_step_dir):
             pathname = os.path.join(event_step_dir, fn)
-            if os.path.isfile(pathname) and fn.endswith(".py") and (fn != "__init__.py") and ("test" not in fn):
+            if (
+                os.path.isfile(pathname)
+                and fn.endswith(".py")
+                and (fn != "__init__.py")
+                and ("test" not in fn)
+            ):
                 event_module = imp.load_source(fn[:-3], pathname)
 
                 for classname in dir(event_module):
@@ -157,7 +176,7 @@
 
                     if inspect.isclass(c):
                         base_names = [b.__name__ for b in c.__bases__]
-                        if 'EventStep' in base_names:
+                        if "EventStep" in base_names:
                             self.event_steps.append(c)
         self.log.info("Loaded event steps", steps=self.event_steps)
 
@@ -166,15 +185,22 @@
         eventbus_endpoint = Config.get("event_bus.endpoint")
 
         if not eventbus_kind:
-            self.log.error("Eventbus kind is not configured in synchronizer config file.")
+            self.log.error(
+                "Eventbus kind is not configured in synchronizer config file."
+            )
             return
 
         if eventbus_kind not in ["kafka"]:
-            self.log.error("Eventbus kind is set to a technology we do not implement.", eventbus_kind=eventbus_kind)
+            self.log.error(
+                "Eventbus kind is set to a technology we do not implement.",
+                eventbus_kind=eventbus_kind,
+            )
             return
 
         if not eventbus_endpoint:
-            self.log.error("Eventbus endpoint is not configured in synchronizer config file.")
+            self.log.error(
+                "Eventbus endpoint is not configured in synchronizer config file."
+            )
             return
 
         for step in self.event_steps:
@@ -183,4 +209,8 @@
                 thread.start()
                 self.threads.append(thread)
             else:
-                self.log.error("Unknown technology. Skipping step", technology=step.technology, step=step.__name__)
+                self.log.error(
+                    "Unknown technology. Skipping step",
+                    technology=step.technology,
+                    step=step.__name__,
+                )
diff --git a/xos/synchronizers/new_base/event_loop.py b/xos/synchronizers/new_base/event_loop.py
index 9c2479a..9d44413 100644
--- a/xos/synchronizers/new_base/event_loop.py
+++ b/xos/synchronizers/new_base/event_loop.py
@@ -21,7 +21,12 @@
 import json
 
 from collections import defaultdict
-from networkx import DiGraph, weakly_connected_component_subgraphs, all_shortest_paths, NetworkXNoPath
+from networkx import (
+    DiGraph,
+    weakly_connected_component_subgraphs,
+    all_shortest_paths,
+    NetworkXNoPath,
+)
 from networkx.algorithms.dag import topological_sort
 
 from synchronizers.new_base.steps import *
@@ -31,7 +36,8 @@
 from xosconfig import Config
 from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
+
 
 class StepNotReady(Exception):
     pass
@@ -40,6 +46,7 @@
 class ExternalDependencyFailed(Exception):
     pass
 
+
 # FIXME: Move drivers into a context shared across sync steps.
 
 
@@ -86,7 +93,7 @@
         self.event_cond.release()
 
     def wake_up(self):
-        self.log.debug('Wake up routine called')
+        self.log.debug("Wake up routine called")
         self.event_cond.acquire()
         self.event_cond.notify()
         self.event_cond.release()
@@ -95,11 +102,14 @@
 
         try:
             if Config.get("dependency_graph"):
-                self.log.trace('Loading model dependency graph', path=Config.get("dependency_graph"))
+                self.log.trace(
+                    "Loading model dependency graph",
+                    path=Config.get("dependency_graph"),
+                )
                 dep_graph_str = open(Config.get("dependency_graph")).read()
             else:
-                self.log.trace('Using default model dependency graph', graph={})
-                dep_graph_str = '{}'
+                self.log.trace("Using default model dependency graph", graph={})
+                dep_graph_str = "{}"
 
             # joint_dependencies is of the form { Model1 -> [(Model2, src_port, dst_port), ...] }
             # src_port is the field that accesses Model2 from Model1
@@ -108,28 +118,29 @@
             dynamic_dependencies = self.compute_service_dependencies()
 
             joint_dependencies = dict(
-                static_dependencies.items() + dynamic_dependencies)
+                static_dependencies.items() + dynamic_dependencies
+            )
 
             model_dependency_graph = DiGraph()
             for src_model, deps in joint_dependencies.items():
                 for dep in deps:
                     dst_model, src_accessor, dst_accessor = dep
                     if src_model != dst_model:
-                        edge_label = {'src_accessor': src_accessor,
-                                      'dst_accessor': dst_accessor}
+                        edge_label = {
+                            "src_accessor": src_accessor,
+                            "dst_accessor": dst_accessor,
+                        }
                         model_dependency_graph.add_edge(
-                            src_model, dst_model, edge_label)
+                            src_model, dst_model, edge_label
+                        )
 
-            model_dependency_graph_rev = model_dependency_graph.reverse(
-                copy=True)
+            model_dependency_graph_rev = model_dependency_graph.reverse(copy=True)
             self.model_dependency_graph = {
                 # deletion
                 True: model_dependency_graph_rev,
-                False: model_dependency_graph
+                False: model_dependency_graph,
             }
-            self.log.trace(
-                "Loaded dependencies",
-                edges=model_dependency_graph.edges())
+            self.log.trace("Loaded dependencies", edges=model_dependency_graph.edges())
         except Exception as e:
             self.log.exception("Error loading dependency graph", e=e)
             raise e
@@ -157,33 +168,32 @@
         self.model_to_step = model_to_step
         self.external_dependencies = list(set(external_dependencies))
         self.log.info(
-            'Loaded external dependencies',
-            external_dependencies=external_dependencies)
-        self.log.info('Loaded model_map', **model_to_step)
+            "Loaded external dependencies", external_dependencies=external_dependencies
+        )
+        self.log.info("Loaded model_map", **model_to_step)
 
     def reset_model_accessor(self, o=None):
         try:
             model_accessor.reset_queries()
         except BaseException:
             # this shouldn't happen, but in case it does, catch it...
-            if (o):
+            if o:
                 logdict = o.tologdict()
             else:
                 logdict = {}
 
             self.log.error("exception in reset_queries", **logdict)
 
-    def delete_record(self, o, dr_log = None):
+    def delete_record(self, o, dr_log=None):
 
         if dr_log is None:
             dr_log = self.log
 
         if getattr(o, "backend_need_reap", False):
             # the object has already been deleted and marked for reaping
-            model_accessor.journal_object(
-                o, "syncstep.call.already_marked_reap")
+            model_accessor.journal_object(o, "syncstep.call.already_marked_reap")
         else:
-            step = getattr(o, 'synchronizer_step', None)
+            step = getattr(o, "synchronizer_step", None)
             if not step:
                 raise ExternalDependencyFailed
 
@@ -199,7 +209,7 @@
 
             model_accessor.journal_object(o, "syncstep.call.delete_set_reap")
             o.backend_need_reap = True
-            o.save(update_fields=['backend_need_reap'])
+            o.save(update_fields=["backend_need_reap"])
 
     def sync_record(self, o, sr_log=None):
         try:
@@ -217,7 +227,7 @@
         # this now rather than after the syncstep,
         if not (o.backend_need_delete):
             o.backend_need_delete = True
-            o.save(update_fields=['backend_need_delete'])
+            o.save(update_fields=["backend_need_delete"])
 
         model_accessor.journal_object(o, "syncstep.call.sync_record")
 
@@ -230,14 +240,19 @@
         sr_log.debug("Synced object", **o.tologdict())
 
         o.enacted = max(o.updated, o.changed_by_policy)
-        scratchpad = {'next_run': 0, 'exponent': 0,
-                      'last_success': time.time()}
+        scratchpad = {"next_run": 0, "exponent": 0, "last_success": time.time()}
         o.backend_register = json.dumps(scratchpad)
         o.backend_status = "OK"
         o.backend_code = 1
         model_accessor.journal_object(o, "syncstep.call.save_update")
-        o.save(update_fields=['enacted', 'backend_status',
-                              'backend_register', 'backend_code'])
+        o.save(
+            update_fields=[
+                "enacted",
+                "backend_status",
+                "backend_register",
+                "backend_code",
+            ]
+        )
 
         if hasattr(step, "after_sync_save"):
             step.log = sr_log.new(step=step)
@@ -252,7 +267,7 @@
         self.log.exception("sync step failed!", e=e, **o.tologdict())
         current_code = o.backend_code
 
-        if hasattr(e, 'message'):
+        if hasattr(e, "message"):
             status = str(e.message)
         else:
             status = str(e)
@@ -267,13 +282,16 @@
 
         self.set_object_error(o, status, code)
 
-        dependency_error = 'Failed due to error in model %s id %d: %s' % (
-            o.leaf_model_name, o.id, status)
+        dependency_error = "Failed due to error in model %s id %d: %s" % (
+            o.leaf_model_name,
+            o.id,
+            status,
+        )
         return dependency_error, code
 
     def set_object_error(self, o, status, code):
         if o.backend_status:
-            error_list = o.backend_status.split(' // ')
+            error_list = o.backend_status.split(" // ")
         else:
             error_list = []
 
@@ -284,62 +302,63 @@
         error_list = error_list[-2:]
 
         o.backend_code = code
-        o.backend_status = ' // '.join(error_list)
+        o.backend_status = " // ".join(error_list)
 
         try:
             scratchpad = json.loads(o.backend_register)
-            scratchpad['exponent']
+            scratchpad["exponent"]
         except BaseException:
-            scratchpad = {'next_run': 0, 'exponent': 0,
-                          'last_success': time.time(), 'failures': 0}
+            scratchpad = {
+                "next_run": 0,
+                "exponent": 0,
+                "last_success": time.time(),
+                "failures": 0,
+            }
 
         # Second failure
-        if (scratchpad['exponent']):
+        if scratchpad["exponent"]:
             if code == 1:
-                delay = scratchpad['exponent'] * 60  # 1 minute
+                delay = scratchpad["exponent"] * 60  # 1 minute
             else:
-                delay = scratchpad['exponent'] * 600  # 10 minutes
+                delay = scratchpad["exponent"] * 600  # 10 minutes
 
             # cap delays at 8 hours
-            if (delay > 8 * 60 * 60):
+            if delay > 8 * 60 * 60:
                 delay = 8 * 60 * 60
-            scratchpad['next_run'] = time.time() + delay
+            scratchpad["next_run"] = time.time() + delay
 
-        scratchpad['exponent'] += 1
+        scratchpad["exponent"] += 1
 
         try:
-            scratchpad['failures'] += 1
+            scratchpad["failures"] += 1
         except KeyError:
-            scratchpad['failures'] = 1
+            scratchpad["failures"] = 1
 
-        scratchpad['last_failure'] = time.time()
+        scratchpad["last_failure"] = time.time()
 
         o.backend_register = json.dumps(scratchpad)
 
         # TOFIX:
         # DatabaseError: value too long for type character varying(140)
-        if (model_accessor.obj_exists(o)):
+        if model_accessor.obj_exists(o):
             try:
                 o.backend_status = o.backend_status[:1024]
-                o.save(update_fields=['backend_status',
-                                      'backend_register'],
-                       always_update_timestamp=True)
+                o.save(
+                    update_fields=["backend_status", "backend_register"],
+                    always_update_timestamp=True,
+                )
             except BaseException as e:
-                self.log.exception(
-                    "Could not update backend status field!", e=e)
+                self.log.exception("Could not update backend status field!", e=e)
                 pass
 
     def sync_cohort(self, cohort, deletion):
-        threading.current_thread().is_sync_thread=True
+        threading.current_thread().is_sync_thread = True
 
         sc_log = self.log.new(thread_id=threading.current_thread().ident)
 
         try:
             start_time = time.time()
-            sc_log.debug(
-                "Starting to work on cohort",
-                cohort=cohort,
-                deletion=deletion)
+            sc_log.debug("Starting to work on cohort", cohort=cohort, deletion=deletion)
 
             cohort_emptied = False
             dependency_error = None
@@ -354,27 +373,28 @@
 
                     if dependency_error:
                         self.set_object_error(
-                            o, dependency_error, dependency_error_code)
+                            o, dependency_error, dependency_error_code
+                        )
                         continue
 
                     try:
-                        if (deletion):
+                        if deletion:
                             self.delete_record(o, sc_log)
                         else:
                             self.sync_record(o, sc_log)
                     except ExternalDependencyFailed:
-                        dependency_error = 'External dependency on object %s id %d not met' % (
-                            o.leaf_model_name, o.id)
+                        dependency_error = (
+                            "External dependency on object %s id %d not met"
+                            % (o.leaf_model_name, o.id)
+                        )
                         dependency_error_code = 1
                     except (DeferredException, InnocuousException, Exception) as e:
                         dependency_error, dependency_error_code = self.handle_sync_exception(
-                            o, e)
+                            o, e
+                        )
 
                 except StopIteration:
-                    sc_log.debug(
-                        "Cohort completed",
-                        cohort=cohort,
-                        deletion=deletion)
+                    sc_log.debug("Cohort completed", cohort=cohort, deletion=deletion)
                     cohort_emptied = True
         finally:
             self.reset_model_accessor()
@@ -382,11 +402,11 @@
 
     def tenant_class_name_from_service(self, service_name):
         """ This code supports legacy functionality. To be cleaned up. """
-        name1 = service_name + 'Instance'
+        name1 = service_name + "Instance"
         if hasattr(Slice().stub, name1):
             return name1
         else:
-            name2 = service_name.replace('Service', 'Tenant')
+            name2 = service_name.replace("Service", "Tenant")
             if hasattr(Slice().stub, name2):
                 return name2
             else:
@@ -396,41 +416,54 @@
         """ FIXME: Implement more cleanly via xproto """
 
         model_names = self.model_to_step.keys()
-        ugly_tuples = [(m, m.replace('Instance', '').replace('Tenant', 'Service'))
-                       for m in model_names if m.endswith('ServiceInstance') or m.endswith('Tenant')]
+        ugly_tuples = [
+            (m, m.replace("Instance", "").replace("Tenant", "Service"))
+            for m in model_names
+            if m.endswith("ServiceInstance") or m.endswith("Tenant")
+        ]
         ugly_rtuples = [(v, k) for k, v in ugly_tuples]
 
         ugly_map = dict(ugly_tuples)
         ugly_rmap = dict(ugly_rtuples)
 
         s_model_names = [v for k, v in ugly_tuples]
-        s_models0 = [getattr(Slice().stub, model_name, None)
-                     for model_name in s_model_names]
+        s_models0 = [
+            getattr(Slice().stub, model_name, None) for model_name in s_model_names
+        ]
         s_models1 = [model.objects.first() for model in s_models0]
         s_models = [m for m in s_models1 if m is not None]
 
         dependencies = []
         for model in s_models:
-            deps = ServiceDependency.objects.filter(
-                subscriber_service_id=model.id)
+            deps = ServiceDependency.objects.filter(subscriber_service_id=model.id)
             if deps:
-                services = [self.tenant_class_name_from_service(
-                    d.provider_service.leaf_model_name) for d in deps]
-                dependencies.append((ugly_rmap[model.leaf_model_name], [
-                                    (s, '', '') for s in services]))
+                services = [
+                    self.tenant_class_name_from_service(
+                        d.provider_service.leaf_model_name
+                    )
+                    for d in deps
+                ]
+                dependencies.append(
+                    (ugly_rmap[model.leaf_model_name], [(s, "", "") for s in services])
+                )
 
         return dependencies
 
     def compute_service_instance_dependencies(self, objects):
-        link_set = [ServiceInstanceLink.objects.filter(
-            subscriber_service_instance_id=o.id) for o in objects]
+        link_set = [
+            ServiceInstanceLink.objects.filter(subscriber_service_instance_id=o.id)
+            for o in objects
+        ]
 
-        dependencies = [(l.provider_service_instance, l.subscriber_service_instance)
-                        for links in link_set for l in links]
+        dependencies = [
+            (l.provider_service_instance, l.subscriber_service_instance)
+            for links in link_set
+            for l in links
+        ]
         providers = []
 
         for p, s in dependencies:
-            if not p.enacted or p.enacted<p.updated:
+            if not p.enacted or p.enacted < p.updated:
                 p.dependent = s
                 providers.append(p)
 
@@ -440,13 +473,13 @@
         # Cleanup: Move self.driver into a synchronizer context
         # made available to every sync step.
         if not self.driver.enabled:
-            self.log.warning('Driver is not enabled. Not running sync steps.')
+            self.log.warning("Driver is not enabled. Not running sync steps.")
             return
 
         while True:
-            self.log.trace('Waiting for event or timeout')
+            self.log.trace("Waiting for event or timeout")
             self.wait_for_event(timeout=5)
-            self.log.trace('Synchronizer awake')
+            self.log.trace("Synchronizer awake")
 
             self.run_once()
 
@@ -465,7 +498,7 @@
             step = step_class(driver=self.driver)
             step.log = self.log.new(step=step)
 
-            if not hasattr(step, 'call'):
+            if not hasattr(step, "call"):
                 pending = step.fetch_pending(deletion)
                 for obj in pending:
                     step = step_class(driver=self.driver)
@@ -473,7 +506,8 @@
                     obj.synchronizer_step = step
 
                 pending_service_dependencies = self.compute_service_instance_dependencies(
-                    pending)
+                    pending
+                )
 
                 for obj in pending_service_dependencies:
                     obj.synchronizer_step = None
@@ -486,9 +520,10 @@
                 pending_steps.append(step)
 
         self.log.trace(
-            'Fetched pending data',
+            "Fetched pending data",
             pending_objects=pending_objects,
-            legacy_steps=pending_steps)
+            legacy_steps=pending_steps,
+        )
         return pending_objects, pending_steps
 
     def linked_objects(self, o):
@@ -518,10 +553,13 @@
         o1_lst, edge_type = self.linked_objects(o1)
 
         try:
-            found = next(obj for obj in o1_lst if obj.leaf_model_name ==
-                         o2.leaf_model_name and obj.pk == o2.pk)
+            found = next(
+                obj
+                for obj in o1_lst
+                if obj.leaf_model_name == o2.leaf_model_name and obj.pk == o2.pk
+            )
         except AttributeError as e:
-            self.log.exception('Compared objects could not be identified', e=e)
+            self.log.exception("Compared objects could not be identified", e=e)
             raise e
         except StopIteration:
             # This is a temporary workaround to establish dependencies between
@@ -530,7 +568,7 @@
             # the following line would change back to found = False
             # - Sapan
 
-            found = getattr(o2, 'deleted', False)
+            found = getattr(o2, "deleted", False)
 
         return found, edge_type
 
@@ -543,8 +581,8 @@
             # No dependency
             return False, None
 
-        if m1.endswith('ServiceInstance') and m2.endswith('ServiceInstance'):
-            return getattr(o2, 'dependent', None) == o1, DIRECT_EDGE
+        if m1.endswith("ServiceInstance") and m2.endswith("ServiceInstance"):
+            return getattr(o2, "dependent", None) == o1, DIRECT_EDGE
 
         # FIXME: Dynamic dependency check
         G = self.model_dependency_graph[False]
@@ -565,11 +603,10 @@
                 src = p[i]
                 dst = p[i + 1]
                 edge_label = G[src][dst]
-                sa = edge_label['src_accessor']
+                sa = edge_label["src_accessor"]
                 try:
                     dst_accessor = getattr(src_object, sa)
-                    dst_objects, link_edge_type = self.linked_objects(
-                        dst_accessor)
+                    dst_objects, link_edge_type = self.linked_objects(dst_accessor)
                     if link_edge_type == PROXY_EDGE:
                         edge_type = link_edge_type
 
@@ -595,9 +632,15 @@
                     else:
                         dst_object = dst_objects[0]
                 except AttributeError as e:
-                    if sa!='fake_accessor':
+                    if sa != "fake_accessor":
                         self.log.debug(
-                            'Could not check object dependencies, making conservative choice %s', e, src_object=src_object, sa=sa, o1=o1, o2=o2)
+                            "Could not check object dependencies, making conservative choice %s",
+                            e,
+                            src_object=src_object,
+                            sa=sa,
+                            o1=o1,
+                            o2=o2,
+                        )
                     return True, edge_type
 
                 src_object = dst_object
@@ -652,23 +695,23 @@
                         else:
                             path_args = (objects[i0], objects[i1])
 
-                        is_connected, edge_type = self.concrete_path_exists(
-                            *path_args)
+                        is_connected, edge_type = self.concrete_path_exists(*path_args)
                         if is_connected:
                             try:
-                                edge_type = oG[i1][i0]['type']
+                                edge_type = oG[i1][i0]["type"]
                                 if edge_type == PROXY_EDGE:
                                     oG.remove_edge(i1, i0)
-                                    oG.add_edge(i0, i1, {'type': edge_type})
+                                    oG.add_edge(i0, i1, {"type": edge_type})
                             except KeyError:
-                                oG.add_edge(i0, i1, {'type': edge_type})
+                                oG.add_edge(i0, i1, {"type": edge_type})
         except KeyError:
             pass
 
         components = weakly_connected_component_subgraphs(oG)
         cohort_indexes = [reversed(topological_sort(g)) for g in components]
-        cohorts = [[objects[i] for i in cohort_index]
-                   for cohort_index in cohort_indexes]
+        cohorts = [
+            [objects[i] for i in cohort_index] for cohort_index in cohort_indexes
+        ]
 
         return cohorts
 
@@ -685,18 +728,20 @@
             for deletion in (False, True):
                 objects_to_process = []
 
-                objects_to_process, steps_to_process = self.fetch_pending(
-                    deletion)
+                objects_to_process, steps_to_process = self.fetch_pending(deletion)
                 dependent_cohorts = self.compute_dependent_cohorts(
-                    objects_to_process, deletion)
+                    objects_to_process, deletion
+                )
 
                 threads = []
-                self.log.trace('In run once inner loop', deletion=deletion)
+                self.log.trace("In run once inner loop", deletion=deletion)
 
                 for cohort in dependent_cohorts:
                     thread = threading.Thread(
-                        target=self.sync_cohort, name='synchronizer', args=(
-                            cohort, deletion))
+                        target=self.sync_cohort,
+                        name="synchronizer",
+                        args=(cohort, deletion),
+                    )
 
                     threads.append(thread)
 
@@ -716,13 +761,13 @@
                     try:
                         step.call(deletion=deletion)
                     except Exception as e:
-                        self.log.exception(
-                            "Legacy step failed", step=step, e=e)
+                        self.log.exception("Legacy step failed", step=step, e=e)
 
             loop_end = time.time()
 
         except Exception as e:
             self.log.exception(
-                'Core error. This seems like a misconfiguration or bug. This error will not be relayed to the user!',
-                e=e)
+                "Core error. This seems like a misconfiguration or bug. This error will not be relayed to the user!",
+                e=e,
+            )
             self.log.error("Exception in observer run loop")
diff --git a/xos/synchronizers/new_base/eventstep.py b/xos/synchronizers/new_base/eventstep.py
index 80e89e6..9596248 100644
--- a/xos/synchronizers/new_base/eventstep.py
+++ b/xos/synchronizers/new_base/eventstep.py
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 class EventStep(object):
     """
     All the event steps defined in each synchronizer needs to inherit from this class in order to be loaded
@@ -36,4 +37,7 @@
     def process_event(self, event):
         # This method must be overridden in your class. Do not call the original method.
 
-        self.log.warning("There is no default process_event, please provide a process_event method", msg=event)
\ No newline at end of file
+        self.log.warning(
+            "There is no default process_event, please provide a process_event method",
+            msg=event,
+        )
diff --git a/xos/synchronizers/new_base/exceptions.py b/xos/synchronizers/new_base/exceptions.py
index a233603..3589777 100644
--- a/xos/synchronizers/new_base/exceptions.py
+++ b/xos/synchronizers/new_base/exceptions.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,8 +16,10 @@
 class SynchronizerException(Exception):
     pass
 
-class SynchronizerProgrammingError(SynchronizerException): 
+
+class SynchronizerProgrammingError(SynchronizerException):
     pass
 
+
 class SynchronizerConfigurationError(SynchronizerException):
     pass
diff --git a/xos/synchronizers/new_base/loadmodels.py b/xos/synchronizers/new_base/loadmodels.py
index dc8be4b..7e82ac9 100644
--- a/xos/synchronizers/new_base/loadmodels.py
+++ b/xos/synchronizers/new_base/loadmodels.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,7 +16,8 @@
 from xosconfig import Config
 from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
+
 
 class ModelLoadClient(object):
     def __init__(self, api):
@@ -40,7 +40,9 @@
 
         attic_dir = os.path.join(dir, "attic")
         if os.path.exists(attic_dir):
-            log.warn("Attics are deprecated, please use the legacy=True option in xProto")
+            log.warn(
+                "Attics are deprecated, please use the legacy=True option in xProto"
+            )
             for fn in os.listdir(attic_dir):
                 if fn.endswith(".py"):
                     item = request.attics.add()
@@ -50,10 +52,9 @@
         api_convenience_dir = os.path.join(dir, "convenience")
         if os.path.exists(api_convenience_dir):
             for fn in os.listdir(api_convenience_dir):
-                if fn.endswith(".py") and not "test" in fn:
+                if fn.endswith(".py") and "test" not in fn:
                     item = request.convenience_methods.add()
                     item.filename = fn
                     item.contents = open(os.path.join(api_convenience_dir, fn)).read()
 
         result = self.api.dynamicload.LoadModels(request)
-
diff --git a/xos/synchronizers/new_base/mock_modelaccessor_build.py b/xos/synchronizers/new_base/mock_modelaccessor_build.py
index 3ab635e..9cb1d4f 100644
--- a/xos/synchronizers/new_base/mock_modelaccessor_build.py
+++ b/xos/synchronizers/new_base/mock_modelaccessor_build.py
@@ -12,18 +12,25 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os, cPickle, subprocess
+import os
+import cPickle
+import subprocess
 
 """
     Support for autogenerating mock_modelaccessor.
 
     Each unit test might have its own requirements for the set of xprotos that make
-    up its model testing framework. These should always include the core, and   
-    optionally include one or more services. 
+    up its model testing framework. These should always include the core, and
+    optionally include one or more services.
 """
 
-def build_mock_modelaccessor(xos_dir, services_dir, service_xprotos, target="mock_classes.xtarget"):
-    dest_fn = os.path.join(xos_dir, "synchronizers", "new_base", "mock_modelaccessor.py")
+
+def build_mock_modelaccessor(
+    xos_dir, services_dir, service_xprotos, target="mock_classes.xtarget"
+):
+    dest_fn = os.path.join(
+        xos_dir, "synchronizers", "new_base", "mock_modelaccessor.py"
+    )
 
     args = ["xosgenx", "--target", target]
     args.append(os.path.join(xos_dir, "core/models/core.xproto"))
@@ -37,7 +44,7 @@
     if os.path.exists(context_fn):
         try:
             context = cPickle.loads(open(context_fn).read())
-            if (context == this_context):
+            if context == this_context:
                 return
         except (cPickle.UnpicklingError, EOFError):
             # Something went wrong with the file read or depickling
@@ -49,10 +56,18 @@
     if os.path.exists(dest_fn):
         os.remove(dest_fn)
 
-    p = subprocess.Popen(" ".join(args) + " > " + dest_fn, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    (stdoutdata, stderrdata) = p.communicate();
-    if (p.returncode!=0) or (not os.path.exists(dest_fn)):
-        raise Exception("Failed to create mock model accessor, returncode=%d, stdout=%s" % (p.returncode, stdoutdata))
+    p = subprocess.Popen(
+        " ".join(args) + " > " + dest_fn,
+        shell=True,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.STDOUT,
+    )
+    (stdoutdata, stderrdata) = p.communicate()
+    if (p.returncode != 0) or (not os.path.exists(dest_fn)):
+        raise Exception(
+            "Failed to create mock model accessor, returncode=%d, stdout=%s"
+            % (p.returncode, stdoutdata)
+        )
 
     # Save the context of this invocation of xosgenx
     open(context_fn, "w").write(cPickle.dumps(this_context))
diff --git a/xos/synchronizers/new_base/model_policies/__init__.py b/xos/synchronizers/new_base/model_policies/__init__.py
index d4e8062..b0fb0b2 100644
--- a/xos/synchronizers/new_base/model_policies/__init__.py
+++ b/xos/synchronizers/new_base/model_policies/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/xos/synchronizers/new_base/model_policies/model_policy_tenantwithcontainer.py b/xos/synchronizers/new_base/model_policies/model_policy_tenantwithcontainer.py
index b9b6b32..afe2c67 100644
--- a/xos/synchronizers/new_base/model_policies/model_policy_tenantwithcontainer.py
+++ b/xos/synchronizers/new_base/model_policies/model_policy_tenantwithcontainer.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -65,8 +64,7 @@
         nodes = sorted(nodes, key=lambda node: node.instances.count())
 
         if not nodes:
-            raise Exception(
-                "LeastLoadedNodeScheduler: No suitable nodes to pick from")
+            raise Exception("LeastLoadedNodeScheduler: No suitable nodes to pick from")
 
         picked_node = nodes[0]
 
@@ -88,22 +86,24 @@
         return self.handle_update(tenant)
 
     def handle_update(self, service_instance):
-        if (service_instance.link_deleted_count > 0) and (not service_instance.provided_links.exists()):
+        if (service_instance.link_deleted_count > 0) and (
+            not service_instance.provided_links.exists()
+        ):
             model = globals()[self.model_name]
             self.log.info(
-                "The last provided link has been deleted -- self-destructing.")
+                "The last provided link has been deleted -- self-destructing."
+            )
             self.handle_delete(service_instance)
             if model.objects.filter(id=service_instance.id).exists():
                 service_instance.delete()
             else:
-                self.log.info("Tenant %s is already deleted" %
-                              service_instance)
+                self.log.info("Tenant %s is already deleted" % service_instance)
             return
         self.manage_container(service_instance)
 
-#    def handle_delete(self, tenant):
-#        if tenant.vcpe:
-#            tenant.vcpe.delete()
+    #    def handle_delete(self, tenant):
+    #        if tenant.vcpe:
+    #            tenant.vcpe.delete()
 
     def save_instance(self, instance):
         # Override this function to do custom pre-save or post-save processing,
@@ -111,7 +111,7 @@
         instance.save()
 
     def ip_to_mac(self, ip):
-        (a, b, c, d) = ip.split('.')
+        (a, b, c, d) = ip.split(".")
         return "02:42:%02x:%02x:%02x:%02x" % (int(a), int(b), int(c), int(d))
 
     def allocate_public_service_instance(self, **kwargs):
@@ -128,16 +128,15 @@
         am_service = am_service[0]
 
         ap = AddressPool.objects.filter(
-            name=address_pool_name, service_id=am_service.id)
+            name=address_pool_name, service_id=am_service.id
+        )
         if not ap:
-            raise Exception(
-                "Addressing service unable to find addresspool %s" % name)
+            raise Exception("Addressing service unable to find addresspool %s" % name)
         ap = ap[0]
 
         ip = ap.get_address()
         if not ip:
-            raise Exception(
-                "AddressPool '%s' has run out of addresses." % ap.name)
+            raise Exception("AddressPool '%s' has run out of addresses." % ap.name)
 
         ap.save()  # save the AddressPool to account for address being removed from it
 
@@ -149,15 +148,15 @@
         if "subscriber_tenant" in kwargs:
             subscriber_service_instance = kwargs.pop("subscriber_tenant")
         elif "subscriber_service_instance" in kwargs:
-            subscriber_service_instance = kwargs.pop(
-                "subscriber_service_instance")
+            subscriber_service_instance = kwargs.pop("subscriber_service_instance")
 
         # TODO: potential partial failure -- AddressPool address is allocated and saved before addressing tenant
 
         t = None
         try:
             t = AddressManagerServiceInstance(
-                owner=am_service, **kwargs)    # TODO: Hardcoded dependency
+                owner=am_service, **kwargs
+            )  # TODO: Hardcoded dependency
             t.public_ip = ip
             t.public_mac = self.ip_to_mac(ip)
             t.address_pool_id = ap.id
@@ -165,18 +164,21 @@
 
             if subscriber_service:
                 link = ServiceInstanceLink(
-                    subscriber_service=subscriber_service, provider_service_instance=t)
+                    subscriber_service=subscriber_service, provider_service_instance=t
+                )
                 link.save()
 
             if subscriber_service_instance:
                 link = ServiceInstanceLink(
-                    subscriber_service_instance=subscriber_service_instance, provider_service_instance=t)
+                    subscriber_service_instance=subscriber_service_instance,
+                    provider_service_instance=t,
+                )
                 link.save()
-        except:
+        except BaseException:
             # cleanup if anything went wrong
             ap.put_address(ip)
             ap.save()  # save the AddressPool to account for address being added to it
-            if (t and t.id):
+            if t and t.id:
                 t.delete()
             raise
 
@@ -193,7 +195,8 @@
             return slice.default_image
 
         raise SynchronizerProgrammingError(
-            "Please set a default image for %s" % self.slice.name)
+            "Please set a default image for %s" % self.slice.name
+        )
 
     """ get_legacy_tenant_attribute
         pick_least_loaded_instance_in_slice
@@ -227,7 +230,10 @@
     def count_of_tenants_of_an_instance(self, tenant, instance):
         tenant_count = 0
         for tenant in self.__class__.objects.all():
-            if self.get_legacy_tenant_attribute(tenant, "instance_id", None) == instance.id:
+            if (
+                self.get_legacy_tenant_attribute(tenant, "instance_id", None)
+                == instance.id
+            ):
                 tenant_count += 1
         return tenant_count
 
@@ -237,22 +243,26 @@
 
         desired_image = self.get_image(tenant)
 
-        if (tenant.instance is not None) and (tenant.instance.image.id != desired_image.id):
+        if (tenant.instance is not None) and (
+            tenant.instance.image.id != desired_image.id
+        ):
             tenant.instance.delete()
             tenant.instance = None
 
         if tenant.instance is None:
             if not tenant.owner.slices.count():
-                raise SynchronizerConfigurationError(
-                    "The service has no slices")
+                raise SynchronizerConfigurationError("The service has no slices")
 
             new_instance_created = False
             instance = None
-            if self.get_legacy_tenant_attribute(tenant, "use_same_instance_for_multiple_tenants", default=False):
+            if self.get_legacy_tenant_attribute(
+                tenant, "use_same_instance_for_multiple_tenants", default=False
+            ):
                 # Find if any existing instances can be used for this tenant
                 slices = tenant.owner.slices.all()
                 instance = self.pick_least_loaded_instance_in_slice(
-                    tenant, slices, desired_image)
+                    tenant, slices, desired_image
+                )
 
             if not instance:
                 slice = tenant.owner.slices.first()
@@ -261,43 +271,47 @@
                 if not flavor:
                     flavors = Flavor.objects.filter(name="m1.small")
                     if not flavors:
-                        raise SynchronizerConfigurationError(
-                            "No m1.small flavor")
+                        raise SynchronizerConfigurationError("No m1.small flavor")
                     flavor = flavors[0]
 
                 if slice.default_isolation == "container_vm":
                     raise Exception("Not implemented")
                 else:
-                    scheduler = getattr(self, "scheduler",
-                                        LeastLoadedNodeScheduler)
+                    scheduler = getattr(self, "scheduler", LeastLoadedNodeScheduler)
                     constrain_by_service_instance = getattr(
-                        self, 'constrain_by_service_instance', False)
+                        self, "constrain_by_service_instance", False
+                    )
                     tenant_node_label = getattr(tenant, "node_label", None)
-                    (node, parent) = scheduler(slice, label=tenant_node_label,
-                                               constrain_by_service_instance=constrain_by_service_instance).pick()
+                    (node, parent) = scheduler(
+                        slice,
+                        label=tenant_node_label,
+                        constrain_by_service_instance=constrain_by_service_instance,
+                    ).pick()
 
-                assert(slice is not None)
-                assert(node is not None)
-                assert(desired_image is not None)
-                assert(tenant.creator is not None)
-                assert(node.site_deployment.deployment is not None)
-                assert(flavor is not None)
+                assert slice is not None
+                assert node is not None
+                assert desired_image is not None
+                assert tenant.creator is not None
+                assert node.site_deployment.deployment is not None
+                assert flavor is not None
 
                 try:
-                    instance = Instance(slice=slice,
-                                        node=node,
-                                        image=desired_image,
-                                        creator=tenant.creator,
-                                        deployment=node.site_deployment.deployment,
-                                        flavor=flavor,
-                                        isolation=slice.default_isolation,
-                                        parent=parent)
+                    instance = Instance(
+                        slice=slice,
+                        node=node,
+                        image=desired_image,
+                        creator=tenant.creator,
+                        deployment=node.site_deployment.deployment,
+                        flavor=flavor,
+                        isolation=slice.default_isolation,
+                        parent=parent,
+                    )
                     self.save_instance(instance)
                     new_instance_created = True
 
                     tenant.instance = instance
                     tenant.save()
-                except:
+                except BaseException:
                     # NOTE: We don't have transactional support, so if the synchronizer crashes and exits after
                     #       creating the instance, but before adding it to the tenant, then we will leave an
                     #       orphaned instance.
diff --git a/xos/synchronizers/new_base/model_policies/test_model_policy_tenantwithcontainer.py b/xos/synchronizers/new_base/model_policies/test_model_policy_tenantwithcontainer.py
index 33b7914..ddce4a6 100644
--- a/xos/synchronizers/new_base/model_policies/test_model_policy_tenantwithcontainer.py
+++ b/xos/synchronizers/new_base/model_policies/test_model_policy_tenantwithcontainer.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,11 +18,13 @@
 import mock
 import pdb
 
-import os, sys
+import os
+import sys
 from xosconfig import Config
 
 test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
-xos_dir = os.path.join(test_path, '..', '..', '..')
+xos_dir = os.path.join(test_path, "..", "..", "..")
+
 
 class TestModelPolicyTenantWithContainer(unittest.TestCase):
     def setUp(self):
@@ -32,29 +33,42 @@
         self.sys_path_save = sys.path
         self.cwd_save = os.getcwd()
         sys.path.append(xos_dir)
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base'))
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base', 'model_policies'))
+        sys.path.append(os.path.join(xos_dir, "synchronizers", "new_base"))
+        sys.path.append(
+            os.path.join(xos_dir, "synchronizers", "new_base", "model_policies")
+        )
 
-        config = basic_conf = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/test_config.yaml")
-        Config.clear() # in case left unclean by a previous test case
-        Config.init(config, 'synchronizer-config-schema.yaml')
+        config = basic_conf = os.path.abspath(
+            os.path.dirname(os.path.realpath(__file__)) + "/test_config.yaml"
+        )
+        Config.clear()  # in case left unclean by a previous test case
+        Config.init(config, "synchronizer-config-schema.yaml")
 
-        from synchronizers.new_base.mock_modelaccessor_build import build_mock_modelaccessor
+        from synchronizers.new_base.mock_modelaccessor_build import (
+            build_mock_modelaccessor,
+        )
+
         build_mock_modelaccessor(xos_dir, services_dir=None, service_xprotos=[])
 
         import model_policy_tenantwithcontainer
-        from model_policy_tenantwithcontainer import TenantWithContainerPolicy, LeastLoadedNodeScheduler
+        from model_policy_tenantwithcontainer import (
+            TenantWithContainerPolicy,
+            LeastLoadedNodeScheduler,
+        )
 
         from mock_modelaccessor import MockObjectList
 
         # import all class names to globals
-        for (k, v) in model_policy_tenantwithcontainer.model_accessor.all_model_classes.items():
+        for (
+            k,
+            v,
+        ) in model_policy_tenantwithcontainer.model_accessor.all_model_classes.items():
             globals()[k] = v
 
         # TODO: Mock_model_accessor lacks save or delete methods
-        #Instance.save = mock.Mock
-        #Instance.delete = mock.Mock
-        #TenantWithContainer.save = mock.Mock
+        # Instance.save = mock.Mock
+        # Instance.delete = mock.Mock
+        # TenantWithContainer.save = mock.Mock
 
         self.policy = TenantWithContainerPolicy()
         self.user = User(email="testadmin@test.org")
@@ -74,41 +88,49 @@
             self.assertEqual(e.exception.message, "The service has no slices")
 
     def test_manage_container(self):
-      with patch.object(TenantWithContainer, "owner") as owner, \
-           patch.object(TenantWithContainer, "save") as tenant_save, \
-           patch.object(Node, "site_deployment") as site_deployment, \
-           patch.object(Instance, "save") as instance_save, \
-           patch.object(Instance, "delete") as instance_delete, \
-           patch.object(TenantWithContainerPolicy, "get_image") as get_image, \
-           patch.object(LeastLoadedNodeScheduler, "pick") as pick:
-        # setup mocks
-        node = Node(hostname="my.node.com")
-        slice = Slice(name="mysite_test1", default_flavor=self.flavor, default_isolation="vm")
-        image = Image(name="trusty-server-multi-nic")
-        deployment = Deployment(name="testdeployment")
-        owner.slices.count.return_value = 1
-        owner.slices.all.return_value = [slice]
-        owner.slices.first.return_value = slice
-        get_image.return_value = image
-        pick.return_value = (node, None)
-        site_deployment.deployment = deployment
-        # done setup mocks
+        with patch.object(TenantWithContainer, "owner") as owner, patch.object(
+            TenantWithContainer, "save"
+        ) as tenant_save, patch.object(
+            Node, "site_deployment"
+        ) as site_deployment, patch.object(
+            Instance, "save"
+        ) as instance_save, patch.object(
+            Instance, "delete"
+        ) as instance_delete, patch.object(
+            TenantWithContainerPolicy, "get_image"
+        ) as get_image, patch.object(
+            LeastLoadedNodeScheduler, "pick"
+        ) as pick:
+            # setup mocks
+            node = Node(hostname="my.node.com")
+            slice = Slice(
+                name="mysite_test1", default_flavor=self.flavor, default_isolation="vm"
+            )
+            image = Image(name="trusty-server-multi-nic")
+            deployment = Deployment(name="testdeployment")
+            owner.slices.count.return_value = 1
+            owner.slices.all.return_value = [slice]
+            owner.slices.first.return_value = slice
+            get_image.return_value = image
+            pick.return_value = (node, None)
+            site_deployment.deployment = deployment
+            # done setup mocks
 
-        # call manage_container
-        self.policy.manage_container(self.tenant)
+            # call manage_container
+            self.policy.manage_container(self.tenant)
 
-        # make sure manage_container did what it is supposed to do
-        self.assertNotEqual(self.tenant.instance, None)
-        self.assertEqual(self.tenant.instance.creator.email, "testadmin@test.org")
-        self.assertEqual(self.tenant.instance.image.name, "trusty-server-multi-nic")
-        self.assertEqual(self.tenant.instance.flavor.name, "m1.small")
-        self.assertEqual(self.tenant.instance.isolation, "vm")
-        self.assertEqual(self.tenant.instance.node.hostname, "my.node.com")
-        self.assertEqual(self.tenant.instance.slice.name, "mysite_test1")
-        self.assertEqual(self.tenant.instance.parent, None)
-        instance_save.assert_called()
-        instance_delete.assert_not_called()
-        tenant_save.assert_called()
+            # make sure manage_container did what it is supposed to do
+            self.assertNotEqual(self.tenant.instance, None)
+            self.assertEqual(self.tenant.instance.creator.email, "testadmin@test.org")
+            self.assertEqual(self.tenant.instance.image.name, "trusty-server-multi-nic")
+            self.assertEqual(self.tenant.instance.flavor.name, "m1.small")
+            self.assertEqual(self.tenant.instance.isolation, "vm")
+            self.assertEqual(self.tenant.instance.node.hostname, "my.node.com")
+            self.assertEqual(self.tenant.instance.slice.name, "mysite_test1")
+            self.assertEqual(self.tenant.instance.parent, None)
+            instance_save.assert_called()
+            instance_delete.assert_not_called()
+            tenant_save.assert_called()
 
     def test_manage_container_delete(self):
         self.tenant.deleted = True
@@ -120,32 +142,40 @@
         self.assertEqual(self.tenant.instance, None)
 
     def test_manage_container_no_m1_small(self):
-      with patch.object(TenantWithContainer, "owner") as owner, \
-           patch.object(Node, "site_deployment") as site_deployment, \
-           patch.object(Flavor, "objects") as flavor_objects, \
-           patch.object(TenantWithContainerPolicy, "get_image") as get_image, \
-                patch.object(LeastLoadedNodeScheduler, "pick") as pick:
-        # setup mocks
-        node = Node(hostname="my.node.com")
-        slice = Slice(name="mysite_test1", default_flavor=None, default_isolation="vm")
-        image = Image(name="trusty-server-multi-nic")
-        deployment = Deployment(name="testdeployment")
-        owner.slices.count.return_value = 1
-        owner.slices.all.return_value = [slice]
-        owner.slices.first.return_value = slice
-        get_image.return_value = image
-        pick.return_value = (node, None)
-        site_deployment.deployment = deployment
-        flavor_objects.filter.return_value = []
-        # done setup mocks
+        with patch.object(TenantWithContainer, "owner") as owner, patch.object(
+            Node, "site_deployment"
+        ) as site_deployment, patch.object(
+            Flavor, "objects"
+        ) as flavor_objects, patch.object(
+            TenantWithContainerPolicy, "get_image"
+        ) as get_image, patch.object(
+            LeastLoadedNodeScheduler, "pick"
+        ) as pick:
+            # setup mocks
+            node = Node(hostname="my.node.com")
+            slice = Slice(
+                name="mysite_test1", default_flavor=None, default_isolation="vm"
+            )
+            image = Image(name="trusty-server-multi-nic")
+            deployment = Deployment(name="testdeployment")
+            owner.slices.count.return_value = 1
+            owner.slices.all.return_value = [slice]
+            owner.slices.first.return_value = slice
+            get_image.return_value = image
+            pick.return_value = (node, None)
+            site_deployment.deployment = deployment
+            flavor_objects.filter.return_value = []
+            # done setup mocks
 
-        with self.assertRaises(Exception) as e:
-            self.policy.manage_container(self.tenant)
-        self.assertEqual(e.exception.message, "No m1.small flavor")
+            with self.assertRaises(Exception) as e:
+                self.policy.manage_container(self.tenant)
+            self.assertEqual(e.exception.message, "No m1.small flavor")
 
     def test_least_loaded_node_scheduler(self):
         with patch.object(Node.objects, "get_items") as node_objects:
-            slice = Slice(name="mysite_test1", default_flavor=None, default_isolation="vm")
+            slice = Slice(
+                name="mysite_test1", default_flavor=None, default_isolation="vm"
+            )
             node = Node(hostname="my.node.com", id=4567)
             node.instances = MockObjectList(initial=[])
             node_objects.return_value = [node]
@@ -158,7 +188,9 @@
 
     def test_least_loaded_node_scheduler_two_nodes(self):
         with patch.object(Node.objects, "get_items") as node_objects:
-            slice = Slice(name="mysite_test1", default_flavor=None, default_isolation="vm")
+            slice = Slice(
+                name="mysite_test1", default_flavor=None, default_isolation="vm"
+            )
             instance1 = Instance(id=1)
             node1 = Node(hostname="my.node.com", id=4567)
             node1.instances = MockObjectList(initial=[])
@@ -176,7 +208,9 @@
 
     def test_least_loaded_node_scheduler_two_nodes_multi(self):
         with patch.object(Node.objects, "get_items") as node_objects:
-            slice = Slice(name="mysite_test1", default_flavor=None, default_isolation="vm")
+            slice = Slice(
+                name="mysite_test1", default_flavor=None, default_isolation="vm"
+            )
             instance1 = Instance(id=1)
             instance2 = Instance(id=2)
             instance3 = Instance(id=3)
@@ -196,7 +230,9 @@
 
     def test_least_loaded_node_scheduler_with_label(self):
         with patch.object(Node.objects, "get_items") as node_objects:
-            slice = Slice(name="mysite_test1", default_flavor=None, default_isolation="vm")
+            slice = Slice(
+                name="mysite_test1", default_flavor=None, default_isolation="vm"
+            )
             instance1 = Instance(id=1)
             node1 = Node(hostname="my.node.com", id=4567)
             node1.instances = MockObjectList(initial=[])
@@ -217,10 +253,12 @@
             self.assertEqual(picked_node.id, node2.id)
 
     def test_least_loaded_node_scheduler_create_label(self):
-        with patch.object(Node.objects, "get_items") as node_objects, \
-             patch.object(NodeLabel, "save", autospec=True) as nodelabel_save, \
-             patch.object(NodeLabel, "node") as nodelabel_node_add:
-            slice = Slice(name="mysite_test1", default_flavor=None, default_isolation="vm")
+        with patch.object(Node.objects, "get_items") as node_objects, patch.object(
+            NodeLabel, "save", autospec=True
+        ) as nodelabel_save, patch.object(NodeLabel, "node") as nodelabel_node_add:
+            slice = Slice(
+                name="mysite_test1", default_flavor=None, default_isolation="vm"
+            )
             instance1 = Instance(id=1)
             node1 = Node(hostname="my.node.com", id=4567)
             node1.instances = MockObjectList(initial=[])
@@ -234,7 +272,9 @@
 
             # should pick the node with the least number of instances
 
-            sched = LeastLoadedNodeScheduler(slice, label="foo", constrain_by_service_instance = True)
+            sched = LeastLoadedNodeScheduler(
+                slice, label="foo", constrain_by_service_instance=True
+            )
             (picked_node, parent) = sched.pick()
 
             self.assertNotEqual(picked_node, None)
@@ -250,6 +290,5 @@
             NodeLabel.node.add.assert_called_with(node1)
 
 
-
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/xos/synchronizers/new_base/model_policy_loop.py b/xos/synchronizers/new_base/model_policy_loop.py
index 25aaabf..1160386 100644
--- a/xos/synchronizers/new_base/model_policy_loop.py
+++ b/xos/synchronizers/new_base/model_policy_loop.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import print_function
 from synchronizers.new_base.modelaccessor import *
 from synchronizers.new_base.dependency_walker_new import *
 from synchronizers.new_base.policy import Policy
@@ -32,106 +32,135 @@
         self.log = log
 
         for policy in self.model_policies:
-            if not policy.model_name in self.policies_by_name:
+            if policy.model_name not in self.policies_by_name:
                 self.policies_by_name[policy.model_name] = []
             self.policies_by_name[policy.model_name].append(policy)
 
-            if not policy.model in self.policies_by_class:
+            if policy.model not in self.policies_by_class:
                 self.policies_by_class[policy.model] = []
             self.policies_by_class[policy.model].append(policy)
 
     def update_wp(self, d, o):
         try:
             save_fields = []
-            if (d.write_protect != o.write_protect):
+            if d.write_protect != o.write_protect:
                 d.write_protect = o.write_protect
-                save_fields.append('write_protect')
-            if (save_fields):
+                save_fields.append("write_protect")
+            if save_fields:
                 d.save(update_fields=save_fields)
-        except AttributeError,e:
+        except AttributeError as e:
             raise e
 
     def update_dep(self, d, o):
         try:
-            print 'Trying to update %s'%d
+            print("Trying to update %s" % d)
             save_fields = []
-            if (d.updated < o.updated):
-                save_fields = ['updated']
+            if d.updated < o.updated:
+                save_fields = ["updated"]
 
-            if (save_fields):
+            if save_fields:
                 d.save(update_fields=save_fields)
-        except AttributeError,e:
-            log.exception("AttributeError in update_dep", e = e)
+        except AttributeError as e:
+            log.exception("AttributeError in update_dep", e=e)
             raise e
-        except Exception,e:
-            log.exception("Exception in update_dep", e = e)
+        except Exception as e:
+            log.exception("Exception in update_dep", e=e)
 
     def delete_if_inactive(self, d, o):
         try:
             d.delete()
-            print "Deleted %s (%s)"%(d,d.__class__.__name__)
-        except:
+            print("Deleted %s (%s)" % (d, d.__class__.__name__))
+        except BaseException:
             pass
         return
 
     def load_model_policies(self, policies_dir):
-        policies=[]
+        policies = []
         for fn in os.listdir(policies_dir):
-                if fn.startswith("test"):
-                    # don't try to import unit tests!
-                    continue
-                pathname = os.path.join(policies_dir,fn)
-                if os.path.isfile(pathname) and fn.endswith(".py") and (fn!="__init__.py"):
-                    module = imp.load_source(fn[:-3], pathname)
-                    for classname in dir(module):
-                        c = getattr(module, classname, None)
+            if fn.startswith("test"):
+                # don't try to import unit tests!
+                continue
+            pathname = os.path.join(policies_dir, fn)
+            if (
+                os.path.isfile(pathname)
+                and fn.endswith(".py")
+                and (fn != "__init__.py")
+            ):
+                module = imp.load_source(fn[:-3], pathname)
+                for classname in dir(module):
+                    c = getattr(module, classname, None)
 
-                        # make sure 'c' is a descendent of Policy and has a
-                        # provides field (this eliminates the abstract base classes
-                        # since they don't have a provides)
+                    # make sure 'c' is a descendent of Policy and has a
+                    # provides field (this eliminates the abstract base classes
+                    # since they don't have a provides)
 
-                        if inspect.isclass(c) and issubclass(c, Policy) and hasattr(c, "model_name") and (
-                            c not in policies):
-                            if not c.model_name:
-                                log.info("load_model_policies: skipping model policy", classname =classname)
-                                continue
-                            if not model_accessor.has_model_class(c.model_name):
-                                log.error("load_model_policies: unable to find model policy", classname = classname, model = c.model_name)
-                            c.model = model_accessor.get_model_class(c.model_name)
-                            policies.append(c)
+                    if (
+                        inspect.isclass(c)
+                        and issubclass(c, Policy)
+                        and hasattr(c, "model_name")
+                        and (c not in policies)
+                    ):
+                        if not c.model_name:
+                            log.info(
+                                "load_model_policies: skipping model policy",
+                                classname=classname,
+                            )
+                            continue
+                        if not model_accessor.has_model_class(c.model_name):
+                            log.error(
+                                "load_model_policies: unable to find model policy",
+                                classname=classname,
+                                model=c.model_name,
+                            )
+                        c.model = model_accessor.get_model_class(c.model_name)
+                        policies.append(c)
 
-        log.info("Loaded model policies", policies = policies)
+        log.info("Loaded model policies", policies=policies)
         return policies
 
     def execute_model_policy(self, instance, action):
         # These are the models whose children get deleted when they are
-        delete_policy_models = ['Slice','Instance','Network']
+        delete_policy_models = ["Slice", "Instance", "Network"]
         sender_name = getattr(instance, "model_name", instance.__class__.__name__)
 
-        #if (action != "deleted"):
+        # if (action != "deleted"):
         #    walk_inv_deps(self.update_dep, instance)
         #    walk_deps(self.update_wp, instance)
-        #elif (sender_name in delete_policy_models):
+        # elif (sender_name in delete_policy_models):
         #    walk_inv_deps(self.delete_if_inactive, instance)
 
         policies_failed = False
         for policy in self.policies_by_name.get(sender_name, None):
-            method_name= "handle_%s" % action
+            method_name = "handle_%s" % action
             if hasattr(policy, method_name):
                 try:
-                    log.debug("MODEL POLICY: calling handler",sender_name = sender_name, instance = instance, policy = policy.__name__, method = method_name)
+                    log.debug(
+                        "MODEL POLICY: calling handler",
+                        sender_name=sender_name,
+                        instance=instance,
+                        policy=policy.__name__,
+                        method=method_name,
+                    )
                     getattr(policy(), method_name)(instance)
-                    log.debug("MODEL POLICY: completed handler",sender_name = sender_name, instance = instance, policy_name = policy.__name__, method = method_name)
-                except Exception,e:
-                    log.exception("MODEL POLICY: Exception when running handler", e = e)
+                    log.debug(
+                        "MODEL POLICY: completed handler",
+                        sender_name=sender_name,
+                        instance=instance,
+                        policy_name=policy.__name__,
+                        method=method_name,
+                    )
+                except Exception as e:
+                    log.exception("MODEL POLICY: Exception when running handler", e=e)
                     policies_failed = True
 
                     try:
                         instance.policy_status = "%s" % traceback.format_exc(limit=1)
                         instance.policy_code = 2
                         instance.save(update_fields=["policy_status", "policy_code"])
-                    except Exception,e:
-                        log.exception("MODEL_POLICY: Exception when storing policy_status", e = e)
+                    except Exception as e:
+                        log.exception(
+                            "MODEL_POLICY: Exception when storing policy_status", e=e
+                        )
 
         if not policies_failed:
             try:
@@ -139,53 +168,56 @@
                 instance.policy_status = "done"
                 instance.policy_code = 1
 
-                instance.save(update_fields=['policed', 'policy_status', 'policy_code'])
+                instance.save(update_fields=["policed", "policy_status", "policy_code"])
 
                 if hasattr(policy, "after_policy_save"):
                     policy().after_policy_save(instance)
 
                 log.info("MODEL_POLICY: Saved", o=instance)
-            except:
-                log.exception('MODEL POLICY: Object failed to update policed timestamp', instance =instance)
+            except BaseException:
+                log.exception(
+                    "MODEL POLICY: Object failed to update policed timestamp",
+                    instance=instance,
+                )
 
-    def noop(self, o,p):
-            pass
+    def noop(self, o, p):
+        pass
 
     def run(self):
-        while (True):
+        while True:
             start = time.time()
             try:
                 self.run_policy_once()
-            except Exception,e:
-                log.exception("MODEL_POLICY: Exception in run()", e = e)
-            if (time.time() - start < 5):
+            except Exception as e:
+                log.exception("MODEL_POLICY: Exception in run()", e=e)
+            if time.time() - start < 5:
                 time.sleep(5)
 
     # TODO: This loop is different from the synchronizer event_loop, but they both do mostly the same thing. Look for
     # ways to combine them.
 
     def run_policy_once(self):
-            models = self.policies_by_class.keys()
+        models = self.policies_by_class.keys()
 
-            model_accessor.check_db_connection_okay()
+        model_accessor.check_db_connection_okay()
 
-            objects = model_accessor.fetch_policies(models, False)
-            deleted_objects = model_accessor.fetch_policies(models, True)
+        objects = model_accessor.fetch_policies(models, False)
+        deleted_objects = model_accessor.fetch_policies(models, True)
 
-            for o in objects:
-                if o.deleted:
-                    # This shouldn't happen, but previous code was examining o.deleted. Verify.
-                    continue
-                if not o.policed:
-                    self.execute_model_policy(o, "create")
-                else:
-                    self.execute_model_policy(o, "update")
+        for o in objects:
+            if o.deleted:
+                # This shouldn't happen, but previous code was examining o.deleted. Verify.
+                continue
+            if not o.policed:
+                self.execute_model_policy(o, "create")
+            else:
+                self.execute_model_policy(o, "update")
 
-            for o in deleted_objects:
-                self.execute_model_policy(o, "delete")
+        for o in deleted_objects:
+            self.execute_model_policy(o, "delete")
 
-            try:
-                model_accessor.reset_queries()
-            except Exception,e:
-                # this shouldn't happen, but in case it does, catch it...
-                log.exception("MODEL POLICY: exception in reset_queries", e)
+        try:
+            model_accessor.reset_queries()
+        except Exception as e:
+            # this shouldn't happen, but in case it does, catch it...
+            log.exception("MODEL POLICY: exception in reset_queries", e)
diff --git a/xos/synchronizers/new_base/modelaccessor.py b/xos/synchronizers/new_base/modelaccessor.py
index 7dbd2d6..926a17b 100644
--- a/xos/synchronizers/new_base/modelaccessor.py
+++ b/xos/synchronizers/new_base/modelaccessor.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -36,7 +35,7 @@
 from multistructlog import create_logger
 from xosutil.autodiscover_version import autodiscover_version_of_main
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
 orig_sigint = None
 model_accessor = None
@@ -120,6 +119,7 @@
     # xosbase doesn't exist from the synchronizer's perspective, so fake out
     # ModelLink.
     if "ModelLink" not in globals():
+
         class ModelLink:
             def __init__(self, dest, via, into=None):
                 self.dest = dest
@@ -139,14 +139,14 @@
 
     try:
         client.utility.NoOp(Empty())
-    except Exception,e:
+    except Exception as e:
         # If we caught an exception, then the API has become unavailable.
         # So reconnect.
 
         log.exception("exception in NoOp", e=e)
         log.info("restarting synchronizer")
 
-        os.execv(sys.executable, ['python'] + sys.argv)
+        os.execv(sys.executable, ["python"] + sys.argv)
         return
 
     reactor.callLater(1, functools.partial(keep_trying, client, reactor))
@@ -162,9 +162,16 @@
         version = autodiscover_version_of_main(max_parent_depth=0) or "unknown"
         log.info("Service version is %s" % version)
         try:
-            ModelLoadClient(client).upload_models(Config.get("name"), Config.get("models_dir"), version=version)
-        except Exception, e:  # TODO: narrow exception scope
-            if (hasattr(e, "code") and callable(e.code) and hasattr(e.code(), "name") and (e.code().name) == "UNAVAILABLE"):
+            ModelLoadClient(client).upload_models(
+                Config.get("name"), Config.get("models_dir"), version=version
+            )
+        except Exception as e:  # TODO: narrow exception scope
+            if (
+                hasattr(e, "code")
+                and callable(e.code)
+                and hasattr(e.code(), "name")
+                and (e.code().name) == "UNAVAILABLE"
+            ):
                 # We need to make sure we force a reconnection, as it's possible that we will end up downloading a
                 # new xos API.
                 log.info("grpc unavailable during loadmodels. Force a reconnect")
@@ -189,6 +196,7 @@
     client.xos_orm.restart_on_disconnect = True
 
     from apiaccessor import CoreApiModelAccessor
+
     model_accessor = CoreApiModelAccessor(orm=client.xos_orm)
 
     # If required_models is set, then check to make sure the required_models
@@ -207,9 +215,9 @@
             else:
                 missing.append(model)
 
-        log.info("required_models, found:", models =  ", ".join(found))
+        log.info("required_models, found:", models=", ".join(found))
         if missing:
-            log.warning("required_models: missing",models = ", ".join(missing))
+            log.warning("required_models: missing", models=", ".join(missing))
             # We're missing a required model. Give up and wait for the connection
             # to reconnect, and hope our missing model has shown up.
             reactor.callLater(1, functools.partial(keep_trying, client, reactor))
@@ -224,6 +232,7 @@
     # Restore the sigint handler
     signal.signal(signal.SIGINT, orig_sigint)
 
+
 def config_accessor_grpcapi():
     global orig_sigint
 
@@ -243,8 +252,12 @@
     from xosapi.xos_grpc_client import SecureClient
     from twisted.internet import reactor
 
-    grpcapi_client = SecureClient(endpoint=grpcapi_endpoint, username=grpcapi_username, password=grpcapi_password)
-    grpcapi_client.set_reconnect_callback(functools.partial(grpcapi_reconnect, grpcapi_client, reactor))
+    grpcapi_client = SecureClient(
+        endpoint=grpcapi_endpoint, username=grpcapi_username, password=grpcapi_password
+    )
+    grpcapi_client.set_reconnect_callback(
+        functools.partial(grpcapi_reconnect, grpcapi_client, reactor)
+    )
     grpcapi_client.start()
 
     # Start reactor. This will cause the client to connect and then execute
@@ -259,13 +272,16 @@
 
     reactor.run()
 
+
 def config_accessor_mock():
     global model_accessor
     from mock_modelaccessor import model_accessor as mock_model_accessor
+
     model_accessor = mock_model_accessor
 
     # mock_model_accessor doesn't have an all_model_classes field, so make one.
     import mock_modelaccessor as mma
+
     all_model_classes = {}
     for k in dir(mma):
         v = getattr(mma, k)
@@ -276,6 +292,7 @@
 
     import_models_to_globals()
 
+
 def config_accessor():
     accessor_kind = Config.get("accessor.kind")
 
@@ -291,4 +308,5 @@
         for wrapper_name in Config.get("wrappers"):
             importlib.import_module(wrapper_name)
 
+
 config_accessor()
diff --git a/xos/synchronizers/new_base/policy.py b/xos/synchronizers/new_base/policy.py
index fa01902..b455c79 100644
--- a/xos/synchronizers/new_base/policy.py
+++ b/xos/synchronizers/new_base/policy.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -22,7 +21,8 @@
 from xosconfig import Config
 from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
+
 
 class Policy(object):
     """ An XOS Model Policy
@@ -38,4 +38,3 @@
 
     def __init__(self):
         self.logger = log
-
diff --git a/xos/synchronizers/new_base/pull_step_engine.py b/xos/synchronizers/new_base/pull_step_engine.py
index 0d989a1..3f4732d 100644
--- a/xos/synchronizers/new_base/pull_step_engine.py
+++ b/xos/synchronizers/new_base/pull_step_engine.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,10 +20,10 @@
 from xosconfig import Config
 from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
 
-class XOSPullStepScheduler():
+class XOSPullStepScheduler:
     """ XOSPullStepThread
 
         A Thread for servicing pull steps. There is one event_step associated with one XOSPullStepThread.
@@ -40,11 +39,11 @@
             self.run_once()
 
     def run_once(self):
-        log.trace('Starting pull steps', steps=self.steps)
+        log.trace("Starting pull steps", steps=self.steps)
 
         threads = []
         for step in self.steps:
-            thread = threading.Thread(target=step().pull_records, name='pull_step')
+            thread = threading.Thread(target=step().pull_records, name="pull_step")
             threads.append(thread)
 
         for t in threads:
@@ -53,7 +52,7 @@
         for t in threads:
             t.join()
 
-        log.trace('Done with pull steps', steps=self.steps)
+        log.trace("Done with pull steps", steps=self.steps)
 
 
 class XOSPullStepEngine:
@@ -78,7 +77,12 @@
         # NOTE we'll load all the classes that inherit from PullStep
         for fn in os.listdir(pull_step_dir):
             pathname = os.path.join(pull_step_dir, fn)
-            if os.path.isfile(pathname) and fn.endswith(".py") and (fn != "__init__.py") and ("test" not in fn):
+            if (
+                os.path.isfile(pathname)
+                and fn.endswith(".py")
+                and (fn != "__init__.py")
+                and ("test" not in fn)
+            ):
                 event_module = imp.load_source(fn[:-3], pathname)
 
                 for classname in dir(event_module):
@@ -86,7 +90,7 @@
 
                     if inspect.isclass(c):
                         base_names = [b.__name__ for b in c.__bases__]
-                        if 'PullStep' in base_names:
+                        if "PullStep" in base_names:
                             self.pull_steps.append(c)
         log.info("Loaded pull steps", steps=self.pull_steps)
 
diff --git a/xos/synchronizers/new_base/pullstep.py b/xos/synchronizers/new_base/pullstep.py
index 1300ea6..adbc0b1 100644
--- a/xos/synchronizers/new_base/pullstep.py
+++ b/xos/synchronizers/new_base/pullstep.py
@@ -12,17 +12,22 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 class PullStep(object):
     """
     All the pull steps defined in each synchronizer needs to inherit from this class in order to be loaded
     """
+
     def __init__(self, **kwargs):
         """
         Initialize a pull step
         :param kwargs:
         -- observed_model: name of the model that is being polled
         """
-        self.observed_model = kwargs.get('observed_model')
+        self.observed_model = kwargs.get("observed_model")
 
     def pull_records(self):
-        self.log.debug("There is no default pull_records, please provide a pull_records method for %s" % self.observed_model)
\ No newline at end of file
+        self.log.debug(
+            "There is no default pull_records, please provide a pull_records method for %s"
+            % self.observed_model
+        )
diff --git a/xos/synchronizers/new_base/steps/__init__.py b/xos/synchronizers/new_base/steps/__init__.py
index 563b1bb..4ea5d64 100644
--- a/xos/synchronizers/new_base/steps/__init__.py
+++ b/xos/synchronizers/new_base/steps/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,9 +13,9 @@
 # limitations under the License.
 
 
-#from .sync_controller_sites import SyncControllerSites
-#from .sync_controller_slices import SyncControllerSlices
-#from .sync_controller_users import SyncControllerUsers
-#from .sync_controller_site_privileges import SyncControllerSitePrivileges
-#from .sync_controller_slice_privileges import SyncControllerSlicePrivileges
-#from .sync_controller_networks import SyncControllerNetworks
+# from .sync_controller_sites import SyncControllerSites
+# from .sync_controller_slices import SyncControllerSlices
+# from .sync_controller_users import SyncControllerUsers
+# from .sync_controller_site_privileges import SyncControllerSitePrivileges
+# from .sync_controller_slice_privileges import SyncControllerSlicePrivileges
+# from .sync_controller_networks import SyncControllerNetworks
diff --git a/xos/synchronizers/new_base/steps/sync_object.py b/xos/synchronizers/new_base/steps/sync_object.py
index d26f706..1fb5894 100644
--- a/xos/synchronizers/new_base/steps/sync_object.py
+++ b/xos/synchronizers/new_base/steps/sync_object.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,10 +15,11 @@
 
 from synchronizers.new_base.syncstep import *
 
+
 class SyncObject(SyncStep):
-    provides=[] # Caller fills this in
-    requested_interval=0
-    observes=[] # Caller fills this in
+    provides = []  # Caller fills this in
+    requested_interval = 0
+    observes = []  # Caller fills this in
 
     def sync_record(self, r):
-        raise DeferredException('Waiting for Service dependency: %r'%r)
+        raise DeferredException("Waiting for Service dependency: %r" % r)
diff --git a/xos/synchronizers/new_base/syncstep.py b/xos/synchronizers/new_base/syncstep.py
index fcb1cb3..8c92f71 100644
--- a/xos/synchronizers/new_base/syncstep.py
+++ b/xos/synchronizers/new_base/syncstep.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,13 +19,16 @@
 from xosconfig import Config
 from synchronizers.new_base.modelaccessor import *
 from synchronizers.new_base.ansible_helper import run_template
-#from tests.steps.mock_modelaccessor import model_accessor
+
+# from tests.steps.mock_modelaccessor import model_accessor
 
 import json
 import time
 import pdb
 
 from xosconfig import Config
+from functools import reduce
+
 
 def f7(seq):
     seen = set()
@@ -35,13 +37,13 @@
 
 
 def elim_dups(backend_str):
-    strs = backend_str.split(' // ')
+    strs = backend_str.split(" // ")
     strs2 = f7(strs)
-    return ' // '.join(strs2)
+    return " // ".join(strs2)
 
 
 def deepgetattr(obj, attr):
-    return reduce(getattr, attr.split('.'), obj)
+    return reduce(getattr, attr.split("."), obj)
 
 
 def obj_class_name(obj):
@@ -78,7 +80,7 @@
     def get_prop(self, prop):
         # NOTE config_dir is never define, is this used?
         sync_config_dir = Config.get("config_dir")
-        prop_config_path = '/'.join(sync_config_dir, self.name, prop)
+        prop_config_path = "/".join(sync_config_dir, self.name, prop)
         return open(prop_config_path).read().rstrip()
 
     def __init__(self, **args):
@@ -88,16 +90,16 @@
                 provides -- XOS models sync'd by this step
         """
         dependencies = []
-        self.driver = args.get('driver')
-        self.error_map = args.get('error_map')
+        self.driver = args.get("driver")
+        self.error_map = args.get("error_map")
 
         try:
-            self.soft_deadline = int(self.get_prop('soft_deadline_seconds'))
-        except:
+            self.soft_deadline = int(self.get_prop("soft_deadline_seconds"))
+        except BaseException:
             self.soft_deadline = 5  # 5 seconds
 
-        if 'log' in args:
-            self.log = args.get('log')
+        if "log" in args:
+            self.log = args.get("log")
 
         return
 
@@ -116,10 +118,10 @@
             return
 
         main_objs = self.observes
-        if (type(main_objs) is list):
+        if isinstance(main_objs, list):
             main_objs = main_objs[0]
 
-        path = ''.join(main_objs.__name__).lower()
+        path = "".join(main_objs.__name__).lower()
         res = run_template(self.playbook, tenant_fields, path=path, object=o)
 
         if hasattr(self, "map_sync_outputs"):
@@ -137,12 +139,12 @@
         tenant_fields = self.map_delete_inputs(o)
 
         main_objs = self.observes
-        if (type(main_objs) is list):
+        if isinstance(main_objs, list):
             main_objs = main_objs[0]
 
-        path = ''.join(main_objs.__name__).lower()
+        path = "".join(main_objs.__name__).lower()
 
-        tenant_fields['delete'] = True
+        tenant_fields["delete"] = True
         res = run_template(self.playbook, tenant_fields, path=path)
 
         if hasattr(self, "map_delete_outputs"):
diff --git a/xos/synchronizers/new_base/tests/__init__.py b/xos/synchronizers/new_base/tests/__init__.py
index d4e8062..b0fb0b2 100644
--- a/xos/synchronizers/new_base/tests/__init__.py
+++ b/xos/synchronizers/new_base/tests/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/xos/synchronizers/new_base/tests/event_steps/event_step.py b/xos/synchronizers/new_base/tests/event_steps/event_step.py
index d83fbe7..1fa47e1 100644
--- a/xos/synchronizers/new_base/tests/event_steps/event_step.py
+++ b/xos/synchronizers/new_base/tests/event_steps/event_step.py
@@ -12,9 +12,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
 from synchronizers.new_base.eventstep import EventStep
 from mock_modelaccessor import *
 
+
 class TestEventStep(EventStep):
     technology = "kafka"
     topics = ["sometopic"]
@@ -24,5 +26,4 @@
         super(TestEventStep, self).__init__(log, *args, **kwargs)
 
     def process_event(self, event):
-        print "received an event", event
-
+        print("received an event", event)
diff --git a/xos/synchronizers/new_base/tests/pull_steps/__init__.py b/xos/synchronizers/new_base/tests/pull_steps/__init__.py
index eb28b96..b0fb0b2 100644
--- a/xos/synchronizers/new_base/tests/pull_steps/__init__.py
+++ b/xos/synchronizers/new_base/tests/pull_steps/__init__.py
@@ -10,4 +10,4 @@
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
-# limitations under the License.
\ No newline at end of file
+# limitations under the License.
diff --git a/xos/synchronizers/new_base/tests/pull_steps/pull_step.py b/xos/synchronizers/new_base/tests/pull_steps/pull_step.py
index 2209b99..0f29433 100644
--- a/xos/synchronizers/new_base/tests/pull_steps/pull_step.py
+++ b/xos/synchronizers/new_base/tests/pull_steps/pull_step.py
@@ -15,6 +15,7 @@
 from synchronizers.new_base.pullstep import PullStep
 from mock_modelaccessor import *
 
+
 class TestPullStep(PullStep):
     def __init__(self):
-        super(TestPullStep, self).__init__(observed_model=Instance)
\ No newline at end of file
+        super(TestPullStep, self).__init__(observed_model=Instance)
diff --git a/xos/synchronizers/new_base/tests/steps/__init__.py b/xos/synchronizers/new_base/tests/steps/__init__.py
index d4e8062..b0fb0b2 100644
--- a/xos/synchronizers/new_base/tests/steps/__init__.py
+++ b/xos/synchronizers/new_base/tests/steps/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/xos/synchronizers/new_base/tests/steps/sync_container.py b/xos/synchronizers/new_base/tests/steps/sync_container.py
index 6eeb975..51bf872 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_container.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_container.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -27,12 +26,13 @@
 from synchronizers.new_base.syncstep import SyncStep
 
 # hpclibrary will be in steps/..
-parentdir = os.path.join(os.path.dirname(__file__),"..")
+parentdir = os.path.join(os.path.dirname(__file__), "..")
 sys.path.insert(0, parentdir)
 
+
 class SyncContainer(SyncInstanceUsingAnsible):
-    provides=[Instance]
-    observes=Instance
+    provides = [Instance]
+    observes = Instance
     template_name = "sync_container.yaml"
 
     def __init__(self, *args, **kwargs):
@@ -46,7 +46,7 @@
         k = Instance()
         k.name = "Embarrassed Cat"
 
-        objs = [i,j,k]
+        objs = [i, j, k]
         return objs
 
     def sync_record(self, o):
diff --git a/xos/synchronizers/new_base/tests/steps/sync_controller_images.py b/xos/synchronizers/new_base/tests/steps/sync_controller_images.py
index c47b389..5545c74 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_controller_images.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_controller_images.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,11 +19,12 @@
 from synchronizers.new_base.ansible_helper import *
 from mock_modelaccessor import *
 
+
 class SyncControllerImages(SyncStep):
-    provides=[ControllerImages]
+    provides = [ControllerImages]
     observes = ControllerImages
-    requested_interval=0
-    playbook='sync_controller_images.yaml'
+    requested_interval = 0
+    playbook = "sync_controller_images.yaml"
 
     def fetch_pending(self, deleted):
         ci = ControllerImages()
@@ -34,20 +34,23 @@
         return [ci]
 
     def map_sync_inputs(self, controller_image):
-        image_fields = {'endpoint':controller_image.controller.auth_url,
-                        'endpoint_v3': controller_image.controller.auth_url_v3,
-                        'admin_user':controller_image.controller.admin_user,
-                        'admin_password':controller_image.controller.admin_password,
-                        'domain': controller_image.controller.domain,
-                        'name':controller_image.image.name,
-                        'filepath':controller_image.image.path,
-                        'ansible_tag': '%s@%s'%(controller_image.image.name,controller_image.controller.name), # name of ansible playbook
-                        }
+        image_fields = {
+            "endpoint": controller_image.controller.auth_url,
+            "endpoint_v3": controller_image.controller.auth_url_v3,
+            "admin_user": controller_image.controller.admin_user,
+            "admin_password": controller_image.controller.admin_password,
+            "domain": controller_image.controller.domain,
+            "name": controller_image.image.name,
+            "filepath": controller_image.image.path,
+            # name of ansible playbook
+            "ansible_tag": "%s@%s"
+            % (controller_image.image.name, controller_image.controller.name),
+        }
 
-	return image_fields
+        return image_fields
 
     def map_sync_outputs(self, controller_image, res):
-        image_id = res[0]['id']
+        image_id = res[0]["id"]
         controller_image.glance_image_id = image_id
-	controller_image.backend_status = '1 - OK'
+        controller_image.backend_status = "1 - OK"
         controller_image.save()
diff --git a/xos/synchronizers/new_base/tests/steps/sync_controller_networks.py b/xos/synchronizers/new_base/tests/steps/sync_controller_networks.py
index 793b87d..c81ab6a 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_controller_networks.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_controller_networks.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -23,41 +22,41 @@
 from synchronizers.new_base.ansible_helper import *
 from mock_modelaccessor import *
 
+
 class SyncControllerNetworks(SyncStep):
     requested_interval = 0
-    provides=[Network]
-    observes=ControllerNetwork	
+    provides = [Network]
+    observes = ControllerNetwork
     external_dependencies = [User]
-    playbook='sync_controller_networks.yaml'
+    playbook = "sync_controller_networks.yaml"
 
     def fetch_pending(self, deleted):
         ci = ControllerNetwork()
         i = Network()
         i.name = "Lush Loss"
-	s = Slice()
-	s.name = "Ghastly Notebook"
-	i.owner = s
+        s = Slice()
+        s.name = "Ghastly Notebook"
+        i.owner = s
         ci.i = i
         return [ci]
 
-    def map_sync_outputs(self, controller_network,res):
-        network_id = res[0]['network']['id']
-        subnet_id = res[1]['subnet']['id']
+    def map_sync_outputs(self, controller_network, res):
+        network_id = res[0]["network"]["id"]
+        subnet_id = res[1]["subnet"]["id"]
         controller_network.net_id = network_id
         controller_network.subnet = self.cidr
         controller_network.subnet_id = subnet_id
-	controller_network.backend_status = '1 - OK'
+        controller_network.backend_status = "1 - OK"
         if not controller_network.segmentation_id:
-            controller_network.segmentation_id = str(self.get_segmentation_id(controller_network))
+            controller_network.segmentation_id = str(
+                self.get_segmentation_id(controller_network)
+            )
         controller_network.save()
 
     def map_sync_inputs(self, controller_network):
         pass
 
     def map_delete_inputs(self, controller_network):
-	network_fields = {'endpoint':None,
-		    'delete':True	
-                    }
+        network_fields = {"endpoint": None, "delete": True}
 
         return network_fields
-
diff --git a/xos/synchronizers/new_base/tests/steps/sync_controller_site_privileges.py b/xos/synchronizers/new_base/tests/steps/sync_controller_site_privileges.py
index 680dc79..5f4e50f 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_controller_site_privileges.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_controller_site_privileges.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,72 +20,90 @@
 from synchronizers.new_base.ansible_helper import *
 from mock_modelaccessor import *
 
+
 class SyncControllerSitePrivileges(SyncStep):
-    provides=[SitePrivilege]
-    requested_interval=0
-    observes=ControllerSitePrivilege
-    playbook='sync_controller_users.yaml'
+    provides = [SitePrivilege]
+    requested_interval = 0
+    observes = ControllerSitePrivilege
+    playbook = "sync_controller_users.yaml"
 
     def map_sync_inputs(self, controller_site_privilege):
-	controller_register = json.loads(controller_site_privilege.controller.backend_register)
+        controller_register = json.loads(
+            controller_site_privilege.controller.backend_register
+        )
         if not controller_site_privilege.controller.admin_user:
             return
 
         roles = [controller_site_privilege.site_privilege.role.role]
-	# setup user home site roles at controller 
+        # setup user home site roles at controller
         if not controller_site_privilege.site_privilege.user.site:
-            raise Exception('Siteless user %s'%controller_site_privilege.site_privilege.user.email)
+            raise Exception(
+                "Siteless user %s" % controller_site_privilege.site_privilege.user.email
+            )
         else:
             # look up tenant id for the user's site at the controller
-            #ctrl_site_deployments = SiteDeployment.objects.filter(
+            # ctrl_site_deployments = SiteDeployment.objects.filter(
             #  site_deployment__site=controller_site_privilege.user.site,
             #  controller=controller_site_privilege.controller)
 
-            #if ctrl_site_deployments:
+            # if ctrl_site_deployments:
             #    # need the correct tenant id for site at the controller
-            #    tenant_id = ctrl_site_deployments[0].tenant_id  
+            #    tenant_id = ctrl_site_deployments[0].tenant_id
             #    tenant_name = ctrl_site_deployments[0].site_deployment.site.login_base
             user_fields = {
-               'endpoint':controller_site_privilege.controller.auth_url,
-               'endpoint_v3': controller_site_privilege.controller.auth_url_v3,
-               'domain': controller_site_privilege.controller.domain,
-		       'name': controller_site_privilege.site_privilege.user.email,
-               'email': controller_site_privilege.site_privilege.user.email,
-               'password': controller_site_privilege.site_privilege.user.remote_password,
-               'admin_user': controller_site_privilege.controller.admin_user,
-		       'admin_password': controller_site_privilege.controller.admin_password,
-	           'ansible_tag':'%s@%s'%(controller_site_privilege.site_privilege.user.email.replace('@','-at-'),controller_site_privilege.controller.name),
-		       'admin_tenant': controller_site_privilege.controller.admin_tenant,
-		       'roles':roles,
-		       'tenant':controller_site_privilege.site_privilege.site.login_base}    
-	
-	    return user_fields
+                "endpoint": controller_site_privilege.controller.auth_url,
+                "endpoint_v3": controller_site_privilege.controller.auth_url_v3,
+                "domain": controller_site_privilege.controller.domain,
+                "name": controller_site_privilege.site_privilege.user.email,
+                "email": controller_site_privilege.site_privilege.user.email,
+                "password": controller_site_privilege.site_privilege.user.remote_password,
+                "admin_user": controller_site_privilege.controller.admin_user,
+                "admin_password": controller_site_privilege.controller.admin_password,
+                "ansible_tag": "%s@%s"
+                % (
+                    controller_site_privilege.site_privilege.user.email.replace(
+                        "@", "-at-"
+                    ),
+                    controller_site_privilege.controller.name,
+                ),
+                "admin_tenant": controller_site_privilege.controller.admin_tenant,
+                "roles": roles,
+                "tenant": controller_site_privilege.site_privilege.site.login_base,
+            }
+
+            return user_fields
 
     def map_sync_outputs(self, controller_site_privilege, res):
-	    # results is an array in which each element corresponds to an 
-	    # "ok" string received per operation. If we get as many oks as
-	    # the number of operations we issued, that means a grand success.
-	    # Otherwise, the number of oks tell us which operation failed.
-            controller_site_privilege.role_id = res[0]['id']
-            controller_site_privilege.save()
+        # results is an array in which each element corresponds to an
+        # "ok" string received per operation. If we get as many oks as
+        # the number of operations we issued, that means a grand success.
+        # Otherwise, the number of oks tell us which operation failed.
+        controller_site_privilege.role_id = res[0]["id"]
+        controller_site_privilege.save()
 
     def delete_record(self, controller_site_privilege):
-	controller_register = json.loads(controller_site_privilege.controller.backend_register)
-        if (controller_register.get('disabled',False)):
-                raise InnocuousException('Controller %s is disabled'%controller_site_privilege.controller.name)
+        controller_register = json.loads(
+            controller_site_privilege.controller.backend_register
+        )
+        if controller_register.get("disabled", False):
+            raise InnocuousException(
+                "Controller %s is disabled" % controller_site_privilege.controller.name
+            )
 
         if controller_site_privilege.role_id:
-            driver = self.driver.admin_driver(controller=controller_site_privilege.controller)
+            driver = self.driver.admin_driver(
+                controller=controller_site_privilege.controller
+            )
             user = ControllerUser.objects.get(
-                controller=controller_site_privilege.controller, 
-                user=controller_site_privilege.site_privilege.user
+                controller=controller_site_privilege.controller,
+                user=controller_site_privilege.site_privilege.user,
             )
             site = ControllerSite.objects.get(
-                controller=controller_site_privilege.controller, 
-                user=controller_site_privilege.site_privilege.user
+                controller=controller_site_privilege.controller,
+                user=controller_site_privilege.site_privilege.user,
             )
             driver.delete_user_role(
-                user.kuser_id, 
-                site.tenant_id, 
-                controller_site_privilege.site_prvilege.role.role
+                user.kuser_id,
+                site.tenant_id,
+                controller_site_privilege.site_prvilege.role.role,
             )
diff --git a/xos/synchronizers/new_base/tests/steps/sync_controller_sites.py b/xos/synchronizers/new_base/tests/steps/sync_controller_sites.py
index cc7e357..5da9ca7 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_controller_sites.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_controller_sites.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,11 +20,12 @@
 import json
 from mock_modelaccessor import *
 
+
 class SyncControllerSites(SyncStep):
-    requested_interval=0
-    provides=[Site]
-    observes=ControllerSite
-    playbook = 'sync_controller_sites.yaml'
+    requested_interval = 0
+    provides = [Site]
+    observes = ControllerSite
+    playbook = "sync_controller_sites.yaml"
 
     def fetch_pending(self, deleted=False):
         lobjs = super(SyncControllerSites, self).fetch_pending(deleted)
@@ -37,48 +37,54 @@
         return lobjs
 
     def map_sync_inputs(self, controller_site):
-	tenant_fields = {'endpoint':controller_site.controller.auth_url,
-                 'endpoint_v3': controller_site.controller.auth_url_v3,
-                 'domain': controller_site.controller.domain,
-		         'admin_user': controller_site.controller.admin_user,
-		         'admin_password': controller_site.controller.admin_password,
-		         'admin_tenant': controller_site.controller.admin_tenant,
-	             'ansible_tag': '%s@%s'%(controller_site.site.login_base,controller_site.controller.name), # name of ansible playbook
-		         'tenant': controller_site.site.login_base,
-		         'tenant_description': controller_site.site.name}
+        tenant_fields = {
+            "endpoint": controller_site.controller.auth_url,
+            "endpoint_v3": controller_site.controller.auth_url_v3,
+            "domain": controller_site.controller.domain,
+            "admin_user": controller_site.controller.admin_user,
+            "admin_password": controller_site.controller.admin_password,
+            "admin_tenant": controller_site.controller.admin_tenant,
+            # name of ansible playbook
+            "ansible_tag": "%s@%s"
+            % (controller_site.site.login_base, controller_site.controller.name),
+            "tenant": controller_site.site.login_base,
+            "tenant_description": controller_site.site.name,
+        }
         return tenant_fields
 
     def map_sync_outputs(self, controller_site, res):
-	controller_site.tenant_id = res[0]['id']
-	controller_site.backend_status = '1 - OK'
+        controller_site.tenant_id = res[0]["id"]
+        controller_site.backend_status = "1 - OK"
         controller_site.save()
-            
-    def delete_record(self, controller_site):
-	controller_register = json.loads(controller_site.controller.backend_register)
-        if (controller_register.get('disabled',False)):
-                raise InnocuousException('Controller %s is disabled'%controller_site.controller.name)
 
-	if controller_site.tenant_id:
+    def delete_record(self, controller_site):
+        controller_register = json.loads(controller_site.controller.backend_register)
+        if controller_register.get("disabled", False):
+            raise InnocuousException(
+                "Controller %s is disabled" % controller_site.controller.name
+            )
+
+        if controller_site.tenant_id:
             driver = self.driver.admin_driver(controller=controller_site.controller)
             driver.delete_tenant(controller_site.tenant_id)
 
-	"""
+        """
         Ansible does not support tenant deletion yet
 
-	import pdb
-	pdb.set_trace()
+        import pdb
+        pdb.set_trace()
         template = os_template_env.get_template('delete_controller_sites.yaml')
-	tenant_fields = {'endpoint':controller_site.controller.auth_url,
-		         'admin_user': controller_site.controller.admin_user,
-		         'admin_password': controller_site.controller.admin_password,
-		         'admin_tenant': 'admin',
-	                 'ansible_tag': 'controller_sites/%s@%s'%(controller_site.controller_site.site.login_base,controller_site.controller_site.deployment.name), # name of ansible playbook
-		         'tenant': controller_site.controller_site.site.login_base,
-		         'delete': True}
+        tenant_fields = {'endpoint':controller_site.controller.auth_url,
+                         'admin_user': controller_site.controller.admin_user,
+                         'admin_password': controller_site.controller.admin_password,
+                         'admin_tenant': 'admin',
+                         'ansible_tag': 'controller_sites/%s@%s'%(controller_site.controller_site.site.login_base,controller_site.controller_site.deployment.name), # name of ansible playbook
+                         'tenant': controller_site.controller_site.site.login_base,
+                         'delete': True}
 
-	rendered = template.render(tenant_fields)
-	res = run_template('sync_controller_sites.yaml', tenant_fields)
+        rendered = template.render(tenant_fields)
+        res = run_template('sync_controller_sites.yaml', tenant_fields)
 
-	if (len(res)!=1):
-		raise Exception('Could not assign roles for user %s'%tenant_fields['tenant'])
-	"""
+        if (len(res)!=1):
+                raise Exception('Could not assign roles for user %s'%tenant_fields['tenant'])
+        """
diff --git a/xos/synchronizers/new_base/tests/steps/sync_controller_slice_privileges.py b/xos/synchronizers/new_base/tests/steps/sync_controller_slice_privileges.py
index e40e5c2..95a0aad 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_controller_slice_privileges.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_controller_slice_privileges.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,60 +20,78 @@
 from mock_modelaccessor import *
 import syncstep
 
+
 class SyncControllerSlicePrivileges(syncstep.SyncStep):
-    provides=[SlicePrivilege]
-    requested_interval=0
-    observes=ControllerSlicePrivilege
-    playbook = 'sync_controller_users.yaml'
+    provides = [SlicePrivilege]
+    requested_interval = 0
+    observes = ControllerSlicePrivilege
+    playbook = "sync_controller_users.yaml"
 
     def map_sync_inputs(self, controller_slice_privilege):
         if not controller_slice_privilege.controller.admin_user:
             return
 
-	template = os_template_env.get_template('sync_controller_users.yaml')
+        template = os_template_env.get_template("sync_controller_users.yaml")
         roles = [controller_slice_privilege.slice_privilege.role.role]
-	# setup user home slice roles at controller 
+        # setup user home slice roles at controller
         if not controller_slice_privilege.slice_privilege.user.site:
-            raise Exception('Sliceless user %s'%controller_slice_privilege.slice_privilege.user.email)
+            raise Exception(
+                "Sliceless user %s"
+                % controller_slice_privilege.slice_privilege.user.email
+            )
         else:
             user_fields = {
-               'endpoint':controller_slice_privilege.controller.auth_url,
-               'endpoint_v3': controller_slice_privilege.controller.auth_url_v3,
-               'domain': controller_slice_privilege.controller.domain,
-		       'name': controller_slice_privilege.slice_privilege.user.email,
-               'email': controller_slice_privilege.slice_privilege.user.email,
-               'password': controller_slice_privilege.slice_privilege.user.remote_password,
-               'admin_user': controller_slice_privilege.controller.admin_user,
-		       'admin_password': controller_slice_privilege.controller.admin_password,
-               'ansible_tag':'%s@%s@%s'%(controller_slice_privilege.slice_privilege.user.email.replace('@','-at-'),controller_slice_privilege.slice_privilege.slice.name,controller_slice_privilege.controller.name),
-		       'admin_tenant': controller_slice_privilege.controller.admin_tenant,
-		       'roles':roles,
-		       'tenant':controller_slice_privilege.slice_privilege.slice.name}    
+                "endpoint": controller_slice_privilege.controller.auth_url,
+                "endpoint_v3": controller_slice_privilege.controller.auth_url_v3,
+                "domain": controller_slice_privilege.controller.domain,
+                "name": controller_slice_privilege.slice_privilege.user.email,
+                "email": controller_slice_privilege.slice_privilege.user.email,
+                "password": controller_slice_privilege.slice_privilege.user.remote_password,
+                "admin_user": controller_slice_privilege.controller.admin_user,
+                "admin_password": controller_slice_privilege.controller.admin_password,
+                "ansible_tag": "%s@%s@%s"
+                % (
+                    controller_slice_privilege.slice_privilege.user.email.replace(
+                        "@", "-at-"
+                    ),
+                    controller_slice_privilege.slice_privilege.slice.name,
+                    controller_slice_privilege.controller.name,
+                ),
+                "admin_tenant": controller_slice_privilege.controller.admin_tenant,
+                "roles": roles,
+                "tenant": controller_slice_privilege.slice_privilege.slice.name,
+            }
             return user_fields
-	
+
     def map_sync_outputs(self, controller_slice_privilege, res):
-        controller_slice_privilege.role_id = res[0]['id']
+        controller_slice_privilege.role_id = res[0]["id"]
         controller_slice_privilege.save()
 
     def delete_record(self, controller_slice_privilege):
-	controller_register = json.loads(controller_slice_privilege.controller.backend_register)
-        if (controller_register.get('disabled',False)):
-                raise InnocuousException('Controller %s is disabled'%controller_slice_privilege.controller.name)
+        controller_register = json.loads(
+            controller_slice_privilege.controller.backend_register
+        )
+        if controller_register.get("disabled", False):
+            raise InnocuousException(
+                "Controller %s is disabled" % controller_slice_privilege.controller.name
+            )
 
         if controller_slice_privilege.role_id:
-            driver = self.driver.admin_driver(controller=controller_slice_privilege.controller)
+            driver = self.driver.admin_driver(
+                controller=controller_slice_privilege.controller
+            )
             user = ControllerUser.objects.filter(
                 controller_id=controller_slice_privilege.controller.id,
-                user_id=controller_slice_privilege.slice_privilege.user.id
+                user_id=controller_slice_privilege.slice_privilege.user.id,
             )
             user = user[0]
             slice = ControllerSlice.objects.filter(
                 controller_id=controller_slice_privilege.controller.id,
-                user_id=controller_slice_privilege.slice_privilege.user.id
+                user_id=controller_slice_privilege.slice_privilege.user.id,
             )
             slice = slice[0]
             driver.delete_user_role(
-                user.kuser_id, 
-                slice.tenant_id, 
-                controller_slice_privilege.slice_prvilege.role.role
+                user.kuser_id,
+                slice.tenant_id,
+                controller_slice_privilege.slice_prvilege.role.role,
             )
diff --git a/xos/synchronizers/new_base/tests/steps/sync_controller_slices.py b/xos/synchronizers/new_base/tests/steps/sync_controller_slices.py
index 31196ff..929dd1c 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_controller_slices.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_controller_slices.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,30 +19,30 @@
 from synchronizers.new_base.ansible_helper import *
 from mock_modelaccessor import *
 
+
 class SyncControllerSlices(syncstep.SyncStep):
-    provides=[Slice]
-    requested_interval=0
-    observes=ControllerSlice
-    playbook='sync_controller_slices.yaml'
+    provides = [Slice]
+    requested_interval = 0
+    observes = ControllerSlice
+    playbook = "sync_controller_slices.yaml"
 
     def map_sync_inputs(self, controller_slice):
-        if getattr(controller_slice, 'force_fail',None):
+        if getattr(controller_slice, "force_fail", None):
             raise Exception("Forced failure")
-        elif getattr(controller_slice, 'force_defer', None):
+        elif getattr(controller_slice, "force_defer", None):
             raise syncstep.DeferredException("Forced defer")
 
-        tenant_fields = {'endpoint': 'endpoint',
-                         'name':'Flagrant Haircut'
-                         }
+        tenant_fields = {"endpoint": "endpoint", "name": "Flagrant Haircut"}
 
         return tenant_fields
 
     def map_sync_outputs(self, controller_slice, res):
         controller_slice.save()
 
-
     def map_delete_inputs(self, controller_slice):
-        tenant_fields = {'endpoint': 'endpoint',
-                          'name':'Conscientious Plastic',
-                          'delete': True}
-	return tenant_fields
+        tenant_fields = {
+            "endpoint": "endpoint",
+            "name": "Conscientious Plastic",
+            "delete": True,
+        }
+        return tenant_fields
diff --git a/xos/synchronizers/new_base/tests/steps/sync_controller_users.py b/xos/synchronizers/new_base/tests/steps/sync_controller_users.py
index 39fcb92..1c722b5 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_controller_users.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_controller_users.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,11 +19,12 @@
 from synchronizers.new_base.ansible_helper import *
 from mock_modelaccessor import *
 
+
 class SyncControllerUsers(SyncStep):
-    provides=[User]
-    requested_interval=0
-    observes=ControllerUser
-    playbook='sync_controller_users.yaml'
+    provides = [User]
+    requested_interval = 0
+    observes = ControllerUser
+    playbook = "sync_controller_users.yaml"
 
     def map_sync_inputs(self, controller_user):
         if not controller_user.controller.admin_user:
@@ -33,34 +33,38 @@
         # All users will have at least the 'user' role at their home site/tenant.
         # We must also check if the user should have the admin role
 
-        roles = ['user']
+        roles = ["user"]
         if controller_user.user.is_admin:
             driver = self.driver.admin_driver(controller=controller_user.controller)
             roles.append(driver.get_admin_role().name)
 
         # setup user home site roles at controller
         if not controller_user.user.site:
-            raise Exception('Siteless user %s'%controller_user.user.email)
+            raise Exception("Siteless user %s" % controller_user.user.email)
         else:
             user_fields = {
-                'endpoint':controller_user.controller.auth_url,
-                'endpoint_v3': controller_user.controller.auth_url_v3,
-                'domain': controller_user.controller.domain,
-                'name': controller_user.user.email,
-                'email': controller_user.user.email,
-                'password': controller_user.user.remote_password,
-                'admin_user': controller_user.controller.admin_user,
-                'admin_password': controller_user.controller.admin_password,
-                'ansible_tag':'%s@%s'%(controller_user.user.email.replace('@','-at-'),controller_user.controller.name),
-                'admin_project': controller_user.controller.admin_tenant,
-                'roles':roles,
-                'project':controller_user.user.site.login_base
-                }
-	    return user_fields
+                "endpoint": controller_user.controller.auth_url,
+                "endpoint_v3": controller_user.controller.auth_url_v3,
+                "domain": controller_user.controller.domain,
+                "name": controller_user.user.email,
+                "email": controller_user.user.email,
+                "password": controller_user.user.remote_password,
+                "admin_user": controller_user.controller.admin_user,
+                "admin_password": controller_user.controller.admin_password,
+                "ansible_tag": "%s@%s"
+                % (
+                    controller_user.user.email.replace("@", "-at-"),
+                    controller_user.controller.name,
+                ),
+                "admin_project": controller_user.controller.admin_tenant,
+                "roles": roles,
+                "project": controller_user.user.site.login_base,
+            }
+            return user_fields
 
     def map_sync_outputs(self, controller_user, res):
-        controller_user.kuser_id = res[0]['user']['id']
-        controller_user.backend_status = '1 - OK'
+        controller_user.kuser_id = res[0]["user"]["id"]
+        controller_user.backend_status = "1 - OK"
         controller_user.save()
 
     def delete_record(self, controller_user):
diff --git a/xos/synchronizers/new_base/tests/steps/sync_images.py b/xos/synchronizers/new_base/tests/steps/sync_images.py
index ea12459..3a56f92 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_images.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_images.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,10 +18,11 @@
 from mock_modelaccessor import *
 from synchronizers.new_base.syncstep import SyncStep
 
+
 class SyncImages(SyncStep):
-    provides=[Image]
-    requested_interval=0
-    observes=[Image]
+    provides = [Image]
+    requested_interval = 0
+    observes = [Image]
 
     def sync_record(self, role):
         # do nothing
diff --git a/xos/synchronizers/new_base/tests/steps/sync_instances.py b/xos/synchronizers/new_base/tests/steps/sync_instances.py
index 49dccb9..74984ae 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_instances.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_instances.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -24,15 +23,17 @@
 RESTAPI_HOSTNAME = socket.gethostname()
 RESTAPI_PORT = "8000"
 
+
 def escape(s):
-    s = s.replace('\n', r'\n').replace('"', r'\"')
+    s = s.replace("\n", r"\n").replace('"', r"\"")
     return s
 
+
 class SyncInstances(syncstep.SyncStep):
     provides = [Instance]
     requested_interval = 0
     observes = Instance
-    playbook = 'sync_instances.yaml'
+    playbook = "sync_instances.yaml"
 
     def fetch_pending(self, deletion=False):
         objs = super(SyncInstances, self).fetch_pending(deletion)
@@ -42,25 +43,24 @@
     def map_sync_inputs(self, instance):
         inputs = {}
         metadata_update = {}
-        
-        fields = {
-                  'name': instance.name,
-                  'delete': False,
-                 }
+
+        fields = {"name": instance.name, "delete": False}
         return fields
 
     def map_sync_outputs(self, instance, res):
         instance.save()
 
     def map_delete_inputs(self, instance):
-        input = {'endpoint': 'endpoint',
-                 'admin_user': 'admin_user',
-                 'admin_password': 'admin_password',
-                 'project_name': 'project_name',
-                 'tenant': 'tenant',
-                 'tenant_description': 'tenant_description',
-                 'name': instance.name,
-                 'ansible_tag': 'ansible_tag',
-                 'delete': True}
+        input = {
+            "endpoint": "endpoint",
+            "admin_user": "admin_user",
+            "admin_password": "admin_password",
+            "project_name": "project_name",
+            "tenant": "tenant",
+            "tenant_description": "tenant_description",
+            "name": instance.name,
+            "ansible_tag": "ansible_tag",
+            "delete": True,
+        }
 
         return input
diff --git a/xos/synchronizers/new_base/tests/steps/sync_ports.py b/xos/synchronizers/new_base/tests/steps/sync_ports.py
index 3d68293..e301ea4 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_ports.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_ports.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,10 +18,11 @@
 from mock_modelaccessor import *
 from synchronizers.new_base.syncstep import SyncStep
 
+
 class SyncPort(SyncStep):
-    requested_interval = 0 # 3600
-    provides=[Port]
-    observes=Port
+    requested_interval = 0  # 3600
+    provides = [Port]
+    observes = Port
 
     def call(self, failed=[], deletion=False):
         if deletion:
@@ -31,8 +31,7 @@
             self.sync_ports()
 
     def sync_ports(self):
-        open('/tmp/sync_ports','w').write('Sync successful')
-        
+        open("/tmp/sync_ports", "w").write("Sync successful")
 
     def delete_ports(self):
-        open('/tmp/delete_ports','w').write('Delete successful')
+        open("/tmp/delete_ports", "w").write("Delete successful")
diff --git a/xos/synchronizers/new_base/tests/steps/sync_roles.py b/xos/synchronizers/new_base/tests/steps/sync_roles.py
index 0a91b33..ea0c77b 100644
--- a/xos/synchronizers/new_base/tests/steps/sync_roles.py
+++ b/xos/synchronizers/new_base/tests/steps/sync_roles.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,18 +16,18 @@
 import os
 import base64
 from mock_modelaccessor import *
-import syncstep 
+import syncstep
+
 
 class SyncRoles(syncstep.SyncStep):
-    provides=[Role]
-    requested_interval=0
-    observes=[SiteRole,SliceRole,ControllerRole]
+    provides = [Role]
+    requested_interval = 0
+    observes = [SiteRole, SliceRole, ControllerRole]
 
     def sync_record(self, role):
         if not role.enacted:
             controllers = Controller.objects.all()
-       	    for controller in controllers:
+            for controller in controllers:
                 driver = self.driver.admin_driver(controller=controller)
                 driver.create_role(role.role)
             role.save()
-    
diff --git a/xos/synchronizers/new_base/tests/test_controller_dependencies.py b/xos/synchronizers/new_base/tests/test_controller_dependencies.py
index 35bbdc8..47f17d2 100644
--- a/xos/synchronizers/new_base/tests/test_controller_dependencies.py
+++ b/xos/synchronizers/new_base/tests/test_controller_dependencies.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,10 +18,12 @@
 import pdb
 import networkx as nx
 
-import os, sys
+import os
+import sys
 
 test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
-xos_dir = os.path.join(test_path, '..', '..', '..')
+xos_dir = os.path.join(test_path, "..", "..", "..")
+
 
 class TestControllerDependencies(unittest.TestCase):
 
@@ -34,22 +35,30 @@
         self.sys_path_save = sys.path
         self.cwd_save = os.getcwd()
         sys.path.append(xos_dir)
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base'))
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base', 'tests', 'steps'))
+        sys.path.append(os.path.join(xos_dir, "synchronizers", "new_base"))
+        sys.path.append(
+            os.path.join(xos_dir, "synchronizers", "new_base", "tests", "steps")
+        )
 
         config = os.path.join(test_path, "test_config.yaml")
         from xosconfig import Config
-        Config.clear()
-        Config.init(config, 'synchronizer-config-schema.yaml')
 
-        from synchronizers.new_base.mock_modelaccessor_build import build_mock_modelaccessor
+        Config.clear()
+        Config.init(config, "synchronizer-config-schema.yaml")
+
+        from synchronizers.new_base.mock_modelaccessor_build import (
+            build_mock_modelaccessor,
+        )
+
         build_mock_modelaccessor(xos_dir, services_dir=None, service_xprotos=[])
 
-        os.chdir(os.path.join(test_path, '..'))  # config references tests/model-deps
+        os.chdir(os.path.join(test_path, ".."))  # config references tests/model-deps
 
         import event_loop
+
         reload(event_loop)
         import backend
+
         reload(backend)
         from mock_modelaccessor import mock_enumerator
         from modelaccessor import model_accessor
@@ -91,9 +100,9 @@
         s.site = t
         ct.site = t
         t.controllersite = mock_enumerator([ct])
-        cohorts = self.synchronizer.compute_dependent_cohorts([p,ct], False)
+        cohorts = self.synchronizer.compute_dependent_cohorts([p, ct], False)
         self.assertEqual([ct, p], cohorts[0])
-        cohorts = self.synchronizer.compute_dependent_cohorts([ct,p], False)
+        cohorts = self.synchronizer.compute_dependent_cohorts([ct, p], False)
         self.assertEqual([ct, p], cohorts[0])
 
     def test_controller_deletion_path(self):
@@ -106,10 +115,10 @@
         s.site = t
 
         t.controllersite = mock_enumerator([ct])
-        
-        cohorts = self.synchronizer.compute_dependent_cohorts([p,s,t,ct], False)
+
+        cohorts = self.synchronizer.compute_dependent_cohorts([p, s, t, ct], False)
         self.assertEqual([t, ct, s, p], cohorts[0])
-        cohorts = self.synchronizer.compute_dependent_cohorts([p,s,t,ct], True)
+        cohorts = self.synchronizer.compute_dependent_cohorts([p, s, t, ct], True)
         self.assertEqual([p, s, ct, t], cohorts[0])
 
     def test_multi_controller_schedule(self):
@@ -125,7 +134,9 @@
         i = Instance()
         i.slice = slice
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([i, slice, site, csl, csi], False)
+        cohorts = self.synchronizer.compute_dependent_cohorts(
+            [i, slice, site, csl, csi], False
+        )
         self.assertEqual([site, csi, slice, csl, i], cohorts[0])
 
     def test_multi_controller_path_negative(self):
@@ -152,7 +163,7 @@
         s.site = t
         ct.site = t
         t.controllersite = mock_enumerator([])
-        cohorts = self.synchronizer.compute_dependent_cohorts([p,ct], False)
+        cohorts = self.synchronizer.compute_dependent_cohorts([p, ct], False)
         self.assertIn([ct], cohorts)
         self.assertIn([p], cohorts)
 
@@ -164,16 +175,15 @@
         s.site = t
 
         t.controllersite = mock_enumerator([])
-        
-        cohorts = self.synchronizer.compute_dependent_cohorts([p,s,t,ct], False)
-        self.assertIn([t,s], cohorts)
-        self.assertIn([p], cohorts)
-        self.assertIn([ct], cohorts)
-        cohorts = self.synchronizer.compute_dependent_cohorts([p,s,t,ct], True)
-        self.assertIn([s,t], cohorts)
-        self.assertIn([p], cohorts)
-        self.assertIn([ct], cohorts)
 
+        cohorts = self.synchronizer.compute_dependent_cohorts([p, s, t, ct], False)
+        self.assertIn([t, s], cohorts)
+        self.assertIn([p], cohorts)
+        self.assertIn([ct], cohorts)
+        cohorts = self.synchronizer.compute_dependent_cohorts([p, s, t, ct], True)
+        self.assertIn([s, t], cohorts)
+        self.assertIn([p], cohorts)
+        self.assertIn([ct], cohorts)
 
     def test_multi_controller_deletion_schedule(self):
         csl = ControllerSlice()
@@ -186,7 +196,9 @@
         i = Instance()
         i.slice = slice
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([i, slice, site, csl, csi], False)
+        cohorts = self.synchronizer.compute_dependent_cohorts(
+            [i, slice, site, csl, csi], False
+        )
         self.assertIn([site, slice, i], cohorts)
         self.assertIn([csl], cohorts)
         self.assertIn([csi], cohorts)
@@ -202,10 +214,13 @@
         i = Instance()
         i.slice = slice
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([i, slice, site, csl, csi], False)
+        cohorts = self.synchronizer.compute_dependent_cohorts(
+            [i, slice, site, csl, csi], False
+        )
         self.assertIn([site, slice, i], cohorts)
         self.assertIn([csl], cohorts)
         self.assertIn([csi], cohorts)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/xos/synchronizers/new_base/tests/test_diffs.py b/xos/synchronizers/new_base/tests/test_diffs.py
index 6ae2b37..c2e53ad 100644
--- a/xos/synchronizers/new_base/tests/test_diffs.py
+++ b/xos/synchronizers/new_base/tests/test_diffs.py
@@ -16,18 +16,20 @@
 from mock import patch, call, Mock, PropertyMock
 import json
 
-import os, sys
+import os
+import sys
 
 # Hack to load synchronizer framework
-test_path=os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
-xos_dir=os.path.join(test_path, "../../..")
+test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
+xos_dir = os.path.join(test_path, "../../..")
 if not os.path.exists(os.path.join(test_path, "new_base")):
-    xos_dir=os.path.join(test_path, "../../../../../../orchestration/xos/xos")
+    xos_dir = os.path.join(test_path, "../../../../../../orchestration/xos/xos")
     services_dir = os.path.join(xos_dir, "../../xos_services")
 sys.path.append(xos_dir)
-sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base'))
+sys.path.append(os.path.join(xos_dir, "synchronizers", "new_base"))
 # END Hack to load synchronizer framework
 
+
 class TestDiffs(unittest.TestCase):
 
     """ These tests are for the mock modelaccessor, to make sure it behaves like the real one """
@@ -36,16 +38,19 @@
 
         self.sys_path_save = sys.path
         sys.path.append(xos_dir)
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base'))
+        sys.path.append(os.path.join(xos_dir, "synchronizers", "new_base"))
 
         # Setting up the config module
         from xosconfig import Config
+
         config = os.path.join(test_path, "test_config.yaml")
         Config.clear()
         Config.init(config, "synchronizer-config-schema.yaml")
         # END Setting up the config module
 
-        from synchronizers.new_base.mock_modelaccessor_build import build_mock_modelaccessor
+        from synchronizers.new_base.mock_modelaccessor_build import (
+            build_mock_modelaccessor,
+        )
 
         # FIXME this is to get jenkins to pass the tests, somehow it is running tests in a different order
         # and apparently it is not overriding the generated model accessor
@@ -53,16 +58,19 @@
         import synchronizers.new_base.modelaccessor
 
         # import all class names to globals
-        for (k, v) in synchronizers.new_base.modelaccessor.model_accessor.all_model_classes.items():
+        for (
+            k,
+            v,
+        ) in (
+            synchronizers.new_base.modelaccessor.model_accessor.all_model_classes.items()
+        ):
             globals()[k] = v
 
         self.log = Mock()
 
-
     def tearDown(self):
         sys.path = self.sys_path_save
 
-
     def test_new_diff(self):
         site = Site(name="mysite")
 
@@ -75,8 +83,8 @@
 
         site.login_base = "bar"
 
-        self.assertEqual(site._dict, {'login_base': 'bar', 'name': 'mysite'})
-        self.assertEqual(site.diff, {'login_base': (None, 'bar')})
+        self.assertEqual(site._dict, {"login_base": "bar", "name": "mysite"})
+        self.assertEqual(site.diff, {"login_base": (None, "bar")})
         self.assertIn("name", site.changed_fields)
         self.assertIn("login_base", site.changed_fields)
         self.assertEqual(site.has_field_changed("name"), False)
@@ -100,11 +108,12 @@
 
         site.login_base = "bar"
 
-        self.assertEqual(site._dict, {'id': 1, 'login_base': 'bar', 'name': 'mysite'})
-        self.assertEqual(site.diff, {'login_base': ("foo", 'bar')})
+        self.assertEqual(site._dict, {"id": 1, "login_base": "bar", "name": "mysite"})
+        self.assertEqual(site.diff, {"login_base": ("foo", "bar")})
         self.assertIn("login_base", site.changed_fields)
         self.assertEqual(site.has_field_changed("name"), False)
         self.assertEqual(site.has_field_changed("login_base"), True)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/xos/synchronizers/new_base/tests/test_event_engine.py b/xos/synchronizers/new_base/tests/test_event_engine.py
index 94ac2d7..b5b6921 100644
--- a/xos/synchronizers/new_base/tests/test_event_engine.py
+++ b/xos/synchronizers/new_base/tests/test_event_engine.py
@@ -18,13 +18,15 @@
 
 from mock import patch, PropertyMock, ANY
 
-import os, sys
+import os
+import sys
 import time
 
 log = None
 
 test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
-xos_dir = os.path.join(test_path, '..', '..', '..')
+xos_dir = os.path.join(test_path, "..", "..", "..")
+
 
 def config_get_mock(orig, overrides, key):
     if key in overrides:
@@ -33,7 +35,7 @@
         return orig(key)
 
 
-class FakeKafkaConsumer():
+class FakeKafkaConsumer:
     def __init__(self, values=[]):
         self.values = values
 
@@ -47,13 +49,19 @@
         time.sleep(1000)
 
 
-class FakeKafkaMessage():
-    ''' Works like Message in confluent_kafka
+class FakeKafkaMessage:
+    """ Works like Message in confluent_kafka
         https://docs.confluent.io/current/clients/confluent-kafka-python/#message
-    '''
+    """
 
-    def __init__(self, timestamp=None, topic='faketopic',
-                 key='fakekey', value='fakevalue', error=False):
+    def __init__(
+        self,
+        timestamp=None,
+        topic="faketopic",
+        key="fakekey",
+        value="fakevalue",
+        error=False,
+    ):
 
         if timestamp is None:
             self.fake_ts_type = confluent_kafka.TIMESTAMP_NOT_AVAILABLE
@@ -84,7 +92,6 @@
 
 
 class TestEventEngine(unittest.TestCase):
-
     @classmethod
     def setUpClass(cls):
 
@@ -92,13 +99,14 @@
 
         config = os.path.join(test_path, "test_config.yaml")
         from xosconfig import Config
+
         Config.clear()
-        Config.init(config, 'synchronizer-config-schema.yaml')
+        Config.init(config, "synchronizer-config-schema.yaml")
 
         if not log:
             from multistructlog import create_logger
-            log = create_logger(Config().get('logging'))
 
+            log = create_logger(Config().get("logging"))
 
     def setUp(self):
         global XOSKafkaThread, Config, log
@@ -106,18 +114,24 @@
         self.sys_path_save = sys.path
         self.cwd_save = os.getcwd()
         sys.path.append(xos_dir)
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base'))
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base', 'tests', 'event_steps'))
+        sys.path.append(os.path.join(xos_dir, "synchronizers", "new_base"))
+        sys.path.append(
+            os.path.join(xos_dir, "synchronizers", "new_base", "tests", "event_steps")
+        )
 
         config = os.path.join(test_path, "test_config.yaml")
         from xosconfig import Config
-        Config.clear()
-        Config.init(config, 'synchronizer-config-schema.yaml')
 
-        from synchronizers.new_base.mock_modelaccessor_build import build_mock_modelaccessor
+        Config.clear()
+        Config.init(config, "synchronizer-config-schema.yaml")
+
+        from synchronizers.new_base.mock_modelaccessor_build import (
+            build_mock_modelaccessor,
+        )
+
         build_mock_modelaccessor(xos_dir, services_dir=None, service_xprotos=[])
 
-        os.chdir(os.path.join(test_path, '..'))  # config references tests/model-deps
+        os.chdir(os.path.join(test_path, ".."))  # config references tests/model-deps
 
         from event_engine import XOSKafkaThread, XOSEventEngine
 
@@ -135,11 +149,17 @@
     def test_start(self):
         self.event_engine.load_event_step_modules(self.event_steps_dir)
 
-        with patch.object(XOSKafkaThread, "create_kafka_consumer") as create_kafka_consumer, \
-             patch.object(FakeKafkaConsumer, "subscribe") as fake_subscribe, \
-             patch.object(self.event_engine.event_steps[0], "process_event") as process_event:
+        with patch.object(
+            XOSKafkaThread, "create_kafka_consumer"
+        ) as create_kafka_consumer, patch.object(
+            FakeKafkaConsumer, "subscribe"
+        ) as fake_subscribe, patch.object(
+            self.event_engine.event_steps[0], "process_event"
+        ) as process_event:
 
-            create_kafka_consumer.return_value = FakeKafkaConsumer(values=["sampleevent"])
+            create_kafka_consumer.return_value = FakeKafkaConsumer(
+                values=["sampleevent"]
+            )
             self.event_engine.start()
 
             self.assertEqual(len(self.event_engine.threads), 1)
@@ -153,20 +173,27 @@
             # The fake consumer will have returned one event
             process_event.assert_called_once()
 
-
     def test_start_with_pattern(self):
         self.event_engine.load_event_step_modules(self.event_steps_dir)
 
-        with patch.object(XOSKafkaThread, "create_kafka_consumer") as create_kafka_consumer, \
-             patch.object(FakeKafkaConsumer, "subscribe") as fake_subscribe, \
-             patch.object(self.event_engine.event_steps[0], "process_event") as process_event, \
-             patch.object(self.event_engine.event_steps[0], "pattern", new_callable=PropertyMock) as pattern, \
-             patch.object(self.event_engine.event_steps[0], "topics", new_callable=PropertyMock) as topics:
+        with patch.object(
+            XOSKafkaThread, "create_kafka_consumer"
+        ) as create_kafka_consumer, patch.object(
+            FakeKafkaConsumer, "subscribe"
+        ) as fake_subscribe, patch.object(
+            self.event_engine.event_steps[0], "process_event"
+        ) as process_event, patch.object(
+            self.event_engine.event_steps[0], "pattern", new_callable=PropertyMock
+        ) as pattern, patch.object(
+            self.event_engine.event_steps[0], "topics", new_callable=PropertyMock
+        ) as topics:
 
             pattern.return_value = "somepattern"
             topics.return_value = []
 
-            create_kafka_consumer.return_value = FakeKafkaConsumer(values=["sampleevent"])
+            create_kafka_consumer.return_value = FakeKafkaConsumer(
+                values=["sampleevent"]
+            )
             self.event_engine.start()
 
             self.assertEqual(len(self.event_engine.threads), 1)
@@ -180,7 +207,6 @@
             # The fake consumer will have returned one event
             process_event.assert_called_once()
 
-
     def test_start_bad_tech(self):
         """ Set an unknown Technology in the event_step. XOSEventEngine.start() should print an error message and
             not create any threads.
@@ -188,17 +214,24 @@
 
         self.event_engine.load_event_step_modules(self.event_steps_dir)
 
-        with patch.object(XOSKafkaThread, "create_kafka_consumer") as create_kafka_consumer, \
-                patch.object(log, "error") as log_error, \
-                patch.object(self.event_engine.event_steps[0], "technology") as technology:
+        with patch.object(
+            XOSKafkaThread, "create_kafka_consumer"
+        ) as create_kafka_consumer, patch.object(
+            log, "error"
+        ) as log_error, patch.object(
+            self.event_engine.event_steps[0], "technology"
+        ) as technology:
             technology.return_value = "not_kafka"
             create_kafka_consumer.return_value = FakeKafkaConsumer()
             self.event_engine.start()
 
             self.assertEqual(len(self.event_engine.threads), 0)
 
-            log_error.assert_called_with('Unknown technology. Skipping step', step="TestEventStep",
-                                         technology=ANY)
+            log_error.assert_called_with(
+                "Unknown technology. Skipping step",
+                step="TestEventStep",
+                technology=ANY,
+            )
 
     def test_start_bad_no_topics(self):
         """ Set no topics in the event_step. XOSEventEngine.start() will launch a thread, but the thread will fail
@@ -207,9 +240,13 @@
 
         self.event_engine.load_event_step_modules(self.event_steps_dir)
 
-        with patch.object(XOSKafkaThread, "create_kafka_consumer") as create_kafka_consumer, \
-             patch.object(FakeKafkaConsumer, "subscribe") as fake_subscribe, \
-             patch.object(self.event_engine.event_steps[0], "topics", new_callable=PropertyMock) as topics:
+        with patch.object(
+            XOSKafkaThread, "create_kafka_consumer"
+        ) as create_kafka_consumer, patch.object(
+            FakeKafkaConsumer, "subscribe"
+        ) as fake_subscribe, patch.object(
+            self.event_engine.event_steps[0], "topics", new_callable=PropertyMock
+        ) as topics:
             topics.return_value = []
             create_kafka_consumer.return_value = FakeKafkaConsumer()
             self.event_engine.start()
@@ -228,9 +265,13 @@
 
         self.event_engine.load_event_step_modules(self.event_steps_dir)
 
-        with patch.object(XOSKafkaThread, "create_kafka_consumer") as create_kafka_consumer, \
-             patch.object(FakeKafkaConsumer, "subscribe") as fake_subscribe, \
-             patch.object(self.event_engine.event_steps[0], "pattern", new_callable=PropertyMock) as pattern:
+        with patch.object(
+            XOSKafkaThread, "create_kafka_consumer"
+        ) as create_kafka_consumer, patch.object(
+            FakeKafkaConsumer, "subscribe"
+        ) as fake_subscribe, patch.object(
+            self.event_engine.event_steps[0], "pattern", new_callable=PropertyMock
+        ) as pattern:
             pattern.return_value = "foo"
             create_kafka_consumer.return_value = FakeKafkaConsumer()
             self.event_engine.start()
@@ -250,16 +291,26 @@
         self.event_engine.load_event_step_modules(self.event_steps_dir)
 
         config_get_orig = Config.get
-        with patch.object(XOSKafkaThread, "create_kafka_consumer") as create_kafka_consumer, \
-                patch.object(log, "error") as log_error, \
-                patch.object(Config, "get", new=functools.partial(config_get_mock, config_get_orig, {"event_bus.kind": None})):
+        with patch.object(
+            XOSKafkaThread, "create_kafka_consumer"
+        ) as create_kafka_consumer, patch.object(
+            log, "error"
+        ) as log_error, patch.object(
+            Config,
+            "get",
+            new=functools.partial(
+                config_get_mock, config_get_orig, {"event_bus.kind": None}
+            ),
+        ):
 
             create_kafka_consumer.return_value = FakeKafkaConsumer()
             self.event_engine.start()
 
             self.assertEqual(len(self.event_engine.threads), 0)
 
-            log_error.assert_called_with('Eventbus kind is not configured in synchronizer config file.')
+            log_error.assert_called_with(
+                "Eventbus kind is not configured in synchronizer config file."
+            )
 
     def test_start_config_bad_eventbus_kind(self):
         """ Set an unknown event_bus.kind in Config. XOSEventEngine.start() should print an error message and
@@ -269,17 +320,27 @@
         self.event_engine.load_event_step_modules(self.event_steps_dir)
 
         config_get_orig = Config.get
-        with patch.object(XOSKafkaThread, "create_kafka_consumer") as create_kafka_consumer, \
-                patch.object(log, "error") as log_error, \
-                patch.object(Config, "get",
-                             new=functools.partial(config_get_mock, config_get_orig, {"event_bus.kind": "not_kafka"})):
+        with patch.object(
+            XOSKafkaThread, "create_kafka_consumer"
+        ) as create_kafka_consumer, patch.object(
+            log, "error"
+        ) as log_error, patch.object(
+            Config,
+            "get",
+            new=functools.partial(
+                config_get_mock, config_get_orig, {"event_bus.kind": "not_kafka"}
+            ),
+        ):
             create_kafka_consumer.return_value = FakeKafkaConsumer()
             self.event_engine.start()
 
             self.assertEqual(len(self.event_engine.threads), 0)
 
-            log_error.assert_called_with('Eventbus kind is set to a technology we do not implement.',
-                                         eventbus_kind='not_kafka')
+            log_error.assert_called_with(
+                "Eventbus kind is set to a technology we do not implement.",
+                eventbus_kind="not_kafka",
+            )
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/xos/synchronizers/new_base/tests/test_load.py b/xos/synchronizers/new_base/tests/test_load.py
index c42b8af..06baa03 100644
--- a/xos/synchronizers/new_base/tests/test_load.py
+++ b/xos/synchronizers/new_base/tests/test_load.py
@@ -18,32 +18,42 @@
 import pdb
 import networkx as nx
 
-import os, sys
+import os
+import sys
 
 test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
-xos_dir = os.path.join(test_path, '..', '..', '..')
+xos_dir = os.path.join(test_path, "..", "..", "..")
+
 
 class TestScheduling(unittest.TestCase):
     def setUp(self):
         self.sys_path_save = sys.path
         self.cwd_save = os.getcwd()
         sys.path.append(xos_dir)
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base'))
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base', 'tests', 'steps'))
+        sys.path.append(os.path.join(xos_dir, "synchronizers", "new_base"))
+        sys.path.append(
+            os.path.join(xos_dir, "synchronizers", "new_base", "tests", "steps")
+        )
 
         config = os.path.join(test_path, "test_config.yaml")
         from xosconfig import Config
-        Config.clear()
-        Config.init(config, 'synchronizer-config-schema.yaml')
 
-        from synchronizers.new_base.mock_modelaccessor_build import build_mock_modelaccessor
+        Config.clear()
+        Config.init(config, "synchronizer-config-schema.yaml")
+
+        from synchronizers.new_base.mock_modelaccessor_build import (
+            build_mock_modelaccessor,
+        )
+
         build_mock_modelaccessor(xos_dir, services_dir=None, service_xprotos=[])
 
-        os.chdir(os.path.join(test_path, '..'))  # config references tests/model-deps
+        os.chdir(os.path.join(test_path, ".."))  # config references tests/model-deps
 
         import event_loop
+
         reload(event_loop)
         import backend
+
         reload(backend)
 
         # self.policy = TenantWithContainerPolicy()
@@ -64,28 +74,36 @@
 
     def test_load_steps(self):
         step_names = [s.__name__ for s in self.steps]
-        self.assertIn('SyncControllerSlices', step_names)
-    
+        self.assertIn("SyncControllerSlices", step_names)
+
     def test_load_deps(self):
         self.synchronizer.load_dependency_graph()
         graph = self.synchronizer.model_dependency_graph
-        self.assertTrue(graph[False].has_edge('Instance','Slice'))
-        self.assertTrue(graph[True].has_edge('Slice','Instance'))
-        self.assertTrue(graph[False].has_edge('Slice','ControllerSlice'))
-        self.assertTrue(graph[True].has_edge('ControllerSlice','Slice'))
+        self.assertTrue(graph[False].has_edge("Instance", "Slice"))
+        self.assertTrue(graph[True].has_edge("Slice", "Instance"))
+        self.assertTrue(graph[False].has_edge("Slice", "ControllerSlice"))
+        self.assertTrue(graph[True].has_edge("ControllerSlice", "Slice"))
 
     def test_load_dep_accessors(self):
         self.synchronizer.load_dependency_graph()
         graph = self.synchronizer.model_dependency_graph
-        self.assertDictContainsSubset({'src_accessor': 'controllerslices'}, graph[False]['Slice']['ControllerSlice'])
-        self.assertDictContainsSubset({'src_accessor': 'slice', 'dst_accessor': 'controllerslices'}, graph[True]['Slice']['ControllerSlice'])
+        self.assertDictContainsSubset(
+            {"src_accessor": "controllerslices"},
+            graph[False]["Slice"]["ControllerSlice"],
+        )
+        self.assertDictContainsSubset(
+            {"src_accessor": "slice", "dst_accessor": "controllerslices"},
+            graph[True]["Slice"]["ControllerSlice"],
+        )
 
     def test_load_sync_steps(self):
         self.synchronizer.load_sync_steps()
         model_to_step = self.synchronizer.model_to_step
         step_lookup = self.synchronizer.step_lookup
-        self.assertIn(('ControllerSlice', ['SyncControllerSlices']), model_to_step.items())
-        self.assertIn(('SiteRole', ['SyncRoles']), model_to_step.items())
+        self.assertIn(
+            ("ControllerSlice", ["SyncControllerSlices"]), model_to_step.items()
+        )
+        self.assertIn(("SiteRole", ["SyncRoles"]), model_to_step.items())
 
         for k, v in model_to_step.items():
             val = v[0]
@@ -97,5 +115,5 @@
             self.assertIn(k, observed_names)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/xos/synchronizers/new_base/tests/test_payload.py b/xos/synchronizers/new_base/tests/test_payload.py
index d398612..308133b 100644
--- a/xos/synchronizers/new_base/tests/test_payload.py
+++ b/xos/synchronizers/new_base/tests/test_payload.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,31 +19,35 @@
 import pdb
 import networkx as nx
 
-import os, sys
+import os
+import sys
 
 test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
-xos_dir = os.path.join(test_path, '..', '..', '..')
+xos_dir = os.path.join(test_path, "..", "..", "..")
 
-ANSIBLE_FILE='/tmp/payload_test'
+ANSIBLE_FILE = "/tmp/payload_test"
 
 log = None
 
-def run_fake_ansible_template(*args,**kwargs):
+
+def run_fake_ansible_template(*args, **kwargs):
     opts = args[1]
-    open(ANSIBLE_FILE,'w').write(json.dumps(opts))
+    open(ANSIBLE_FILE, "w").write(json.dumps(opts))
     return [{"rc": 0}]
 
-def run_fake_ansible_template_fail(*args,**kwargs):
+
+def run_fake_ansible_template_fail(*args, **kwargs):
     opts = args[1]
-    open(ANSIBLE_FILE,'w').write(json.dumps(opts))
+    open(ANSIBLE_FILE, "w").write(json.dumps(opts))
     return [{"rc": 1}]
 
+
 def get_ansible_output():
     ansible_str = open(ANSIBLE_FILE).read()
     return json.loads(ansible_str)
 
-class TestPayload(unittest.TestCase):
 
+class TestPayload(unittest.TestCase):
     @classmethod
     def setUpClass(cls):
 
@@ -52,13 +55,14 @@
 
         config = os.path.join(test_path, "test_config.yaml")
         from xosconfig import Config
+
         Config.clear()
-        Config.init(config, 'synchronizer-config-schema.yaml')
+        Config.init(config, "synchronizer-config-schema.yaml")
 
         if not log:
             from multistructlog import create_logger
-            log = create_logger(Config().get('logging'))
 
+            log = create_logger(Config().get("logging"))
 
     def setUp(self):
 
@@ -67,22 +71,30 @@
         self.sys_path_save = sys.path
         self.cwd_save = os.getcwd()
         sys.path.append(xos_dir)
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base'))
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base', 'tests', 'steps'))
+        sys.path.append(os.path.join(xos_dir, "synchronizers", "new_base"))
+        sys.path.append(
+            os.path.join(xos_dir, "synchronizers", "new_base", "tests", "steps")
+        )
 
         config = os.path.join(test_path, "test_config.yaml")
         from xosconfig import Config
-        Config.clear()
-        Config.init(config, 'synchronizer-config-schema.yaml')
 
-        from synchronizers.new_base.mock_modelaccessor_build import build_mock_modelaccessor
+        Config.clear()
+        Config.init(config, "synchronizer-config-schema.yaml")
+
+        from synchronizers.new_base.mock_modelaccessor_build import (
+            build_mock_modelaccessor,
+        )
+
         build_mock_modelaccessor(xos_dir, services_dir=None, service_xprotos=[])
 
-        os.chdir(os.path.join(test_path, '..'))  # config references tests/model-deps
+        os.chdir(os.path.join(test_path, ".."))  # config references tests/model-deps
 
         import event_loop
+
         reload(event_loop)
         import backend
+
         reload(backend)
         import steps.sync_instances
         import steps.sync_controller_slices
@@ -100,7 +112,10 @@
         sys.path = self.sys_path_save
         os.chdir(self.cwd_save)
 
-    @mock.patch("steps.sync_instances.syncstep.run_template",side_effect=run_fake_ansible_template)
+    @mock.patch(
+        "steps.sync_instances.syncstep.run_template",
+        side_effect=run_fake_ansible_template,
+    )
     @mock.patch("event_loop.model_accessor")
     def test_delete_record(self, mock_run_template, mock_modelaccessor):
         with mock.patch.object(Instance, "save") as instance_save:
@@ -111,10 +126,13 @@
             self.synchronizer.delete_record(o, log)
 
             a = get_ansible_output()
-            self.assertDictContainsSubset({'delete':True, 'name':o.name}, a)
-            o.save.assert_called_with(update_fields=['backend_need_reap'])
+            self.assertDictContainsSubset({"delete": True, "name": o.name}, a)
+            o.save.assert_called_with(update_fields=["backend_need_reap"])
 
-    @mock.patch("steps.sync_instances.syncstep.run_template",side_effect=run_fake_ansible_template_fail)
+    @mock.patch(
+        "steps.sync_instances.syncstep.run_template",
+        side_effect=run_fake_ansible_template_fail,
+    )
     @mock.patch("event_loop.model_accessor")
     def test_delete_record_fail(self, mock_run_template, mock_modelaccessor):
         with mock.patch.object(Instance, "save") as instance_save:
@@ -126,9 +144,14 @@
             with self.assertRaises(Exception) as e:
                 self.synchronizer.delete_record(o, log)
 
-            self.assertEqual(e.exception.message, "Nonzero rc from Ansible during delete_record")
+            self.assertEqual(
+                e.exception.message, "Nonzero rc from Ansible during delete_record"
+            )
 
-    @mock.patch("steps.sync_instances.syncstep.run_template",side_effect=run_fake_ansible_template)
+    @mock.patch(
+        "steps.sync_instances.syncstep.run_template",
+        side_effect=run_fake_ansible_template,
+    )
     @mock.patch("event_loop.model_accessor")
     def test_sync_record(self, mock_run_template, mock_modelaccessor):
         with mock.patch.object(Instance, "save") as instance_save:
@@ -139,16 +162,27 @@
             self.synchronizer.sync_record(o, log)
 
             a = get_ansible_output()
-            self.assertDictContainsSubset({'delete':False, 'name':o.name}, a)
-            o.save.assert_called_with(update_fields=['enacted', 'backend_status', 'backend_register', 'backend_code'])
+            self.assertDictContainsSubset({"delete": False, "name": o.name}, a)
+            o.save.assert_called_with(
+                update_fields=[
+                    "enacted",
+                    "backend_status",
+                    "backend_register",
+                    "backend_code",
+                ]
+            )
 
-    @mock.patch("steps.sync_instances.syncstep.run_template",side_effect=run_fake_ansible_template)
+    @mock.patch(
+        "steps.sync_instances.syncstep.run_template",
+        side_effect=run_fake_ansible_template,
+    )
     @mock.patch("event_loop.model_accessor")
     def test_sync_cohort(self, mock_run_template, mock_modelaccessor):
-        with mock.patch.object(Instance, "save") as instance_save, \
-             mock.patch.object(ControllerSlice, "save") as controllerslice_save:
+        with mock.patch.object(Instance, "save") as instance_save, mock.patch.object(
+            ControllerSlice, "save"
+        ) as controllerslice_save:
             cs = ControllerSlice()
-            s = Slice(name = 'SP SP')
+            s = Slice(name="SP SP")
             cs.slice = s
 
             o = Instance()
@@ -162,16 +196,33 @@
             self.synchronizer.sync_cohort(cohort, False)
 
             a = get_ansible_output()
-            self.assertDictContainsSubset({'delete':False, 'name':o.name}, a)
-            o.save.assert_called_with(update_fields=['enacted', 'backend_status', 'backend_register', 'backend_code'])
-            cs.save.assert_called_with(update_fields=['enacted', 'backend_status', 'backend_register', 'backend_code'])
+            self.assertDictContainsSubset({"delete": False, "name": o.name}, a)
+            o.save.assert_called_with(
+                update_fields=[
+                    "enacted",
+                    "backend_status",
+                    "backend_register",
+                    "backend_code",
+                ]
+            )
+            cs.save.assert_called_with(
+                update_fields=[
+                    "enacted",
+                    "backend_status",
+                    "backend_register",
+                    "backend_code",
+                ]
+            )
 
-    @mock.patch("steps.sync_instances.syncstep.run_template",side_effect=run_fake_ansible_template)
+    @mock.patch(
+        "steps.sync_instances.syncstep.run_template",
+        side_effect=run_fake_ansible_template,
+    )
     @mock.patch("event_loop.model_accessor")
     def test_deferred_exception(self, mock_run_template, mock_modelaccessor):
         with mock.patch.object(Instance, "save") as instance_save:
             cs = ControllerSlice()
-            s = Slice(name = 'SP SP')
+            s = Slice(name="SP SP")
             cs.slice = s
             cs.force_defer = True
 
@@ -184,18 +235,24 @@
             cs.synchronizer_step = steps.sync_controller_slices.SyncControllerSlices()
 
             self.synchronizer.sync_cohort(cohort, False)
-            o.save.assert_called_with(always_update_timestamp=True, update_fields=['backend_status', 'backend_register'])
+            o.save.assert_called_with(
+                always_update_timestamp=True,
+                update_fields=["backend_status", "backend_register"],
+            )
             self.assertEqual(cs.backend_code, 0)
 
-            self.assertIn('Force', cs.backend_status)
-            self.assertIn('Failed due to', o.backend_status)
+            self.assertIn("Force", cs.backend_status)
+            self.assertIn("Failed due to", o.backend_status)
 
-    @mock.patch("steps.sync_instances.syncstep.run_template",side_effect=run_fake_ansible_template)
+    @mock.patch(
+        "steps.sync_instances.syncstep.run_template",
+        side_effect=run_fake_ansible_template,
+    )
     @mock.patch("event_loop.model_accessor")
     def test_backend_status(self, mock_run_template, mock_modelaccessor):
         with mock.patch.object(Instance, "save") as instance_save:
             cs = ControllerSlice()
-            s = Slice(name = 'SP SP')
+            s = Slice(name="SP SP")
             cs.slice = s
             cs.force_fail = True
 
@@ -208,18 +265,28 @@
             cs.synchronizer_step = steps.sync_controller_slices.SyncControllerSlices()
 
             self.synchronizer.sync_cohort(cohort, False)
-            o.save.assert_called_with(always_update_timestamp=True, update_fields=['backend_status', 'backend_register'])
-            self.assertIn('Force', cs.backend_status)
-            self.assertIn('Failed due to', o.backend_status)
+            o.save.assert_called_with(
+                always_update_timestamp=True,
+                update_fields=["backend_status", "backend_register"],
+            )
+            self.assertIn("Force", cs.backend_status)
+            self.assertIn("Failed due to", o.backend_status)
 
-    @mock.patch("steps.sync_instances.syncstep.run_template",side_effect=run_fake_ansible_template)
+    @mock.patch(
+        "steps.sync_instances.syncstep.run_template",
+        side_effect=run_fake_ansible_template,
+    )
     @mock.patch("event_loop.model_accessor")
     def test_fetch_pending(self, mock_run_template, mock_accessor, *_other_accessors):
         pending_objects, pending_steps = self.synchronizer.fetch_pending()
         pending_objects2 = list(pending_objects)
 
-        any_cs = next(obj for obj in pending_objects if obj.leaf_model_name == 'ControllerSlice')
-        any_instance = next(obj for obj in pending_objects2 if obj.leaf_model_name == 'Instance')
+        any_cs = next(
+            obj for obj in pending_objects if obj.leaf_model_name == "ControllerSlice"
+        )
+        any_instance = next(
+            obj for obj in pending_objects2 if obj.leaf_model_name == "Instance"
+        )
 
         slice = Slice()
         any_instance.slice = slice
@@ -228,22 +295,31 @@
         self.synchronizer.external_dependencies = []
         cohorts = self.synchronizer.compute_dependent_cohorts(pending_objects, False)
         flat_objects = [item for cohort in cohorts for item in cohort]
-       
+
         self.assertEqual(set(flat_objects), set(pending_objects))
-    
-    @mock.patch("steps.sync_instances.syncstep.run_template",side_effect=run_fake_ansible_template)
+
+    @mock.patch(
+        "steps.sync_instances.syncstep.run_template",
+        side_effect=run_fake_ansible_template,
+    )
     @mock.patch("event_loop.model_accessor")
-    def test_fetch_pending_with_external_dependencies(self, mock_run_template, mock_accessor, *_other_accessors):
+    def test_fetch_pending_with_external_dependencies(
+        self, mock_run_template, mock_accessor, *_other_accessors
+    ):
         pending_objects, pending_steps = self.synchronizer.fetch_pending()
         pending_objects2 = list(pending_objects)
 
         self.synchronizer = event_loop.XOSObserver(self.steps)
 
-        any_cn = next(obj for obj in pending_objects if obj.leaf_model_name == 'ControllerNetwork')
-        any_user = next(obj for obj in pending_objects2 if obj.leaf_model_name == 'User')
+        any_cn = next(
+            obj for obj in pending_objects if obj.leaf_model_name == "ControllerNetwork"
+        )
+        any_user = next(
+            obj for obj in pending_objects2 if obj.leaf_model_name == "User"
+        )
 
         cohorts = self.synchronizer.compute_dependent_cohorts(pending_objects, False)
-       
+
         flat_objects = [item for cohort in cohorts for item in cohort]
         self.assertEqual(set(flat_objects), set(pending_objects))
 
@@ -251,11 +327,14 @@
         self.assertIsNotNone(any_cn)
         self.assertIsNotNone(any_user)
 
-    @mock.patch("steps.sync_instances.syncstep.run_template",side_effect=run_fake_ansible_template)
+    @mock.patch(
+        "steps.sync_instances.syncstep.run_template",
+        side_effect=run_fake_ansible_template,
+    )
     @mock.patch("event_loop.model_accessor")
     def test_external_dependency_exception(self, mock_run_template, mock_modelaccessor):
         cs = ControllerSlice()
-        s = Slice(name = 'SP SP')
+        s = Slice(name="SP SP")
         cs.slice = s
 
         o = Instance()
@@ -268,5 +347,6 @@
 
         self.synchronizer.sync_cohort(cohort, False)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/xos/synchronizers/new_base/tests/test_run.py b/xos/synchronizers/new_base/tests/test_run.py
index 86db27d..863bc0f 100644
--- a/xos/synchronizers/new_base/tests/test_run.py
+++ b/xos/synchronizers/new_base/tests/test_run.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,42 +19,54 @@
 import pdb
 import networkx as nx
 
-import os, sys
+import os
+import sys
 
 test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
-xos_dir = os.path.join(test_path, '..', '..', '..')
+xos_dir = os.path.join(test_path, "..", "..", "..")
 
-ANSIBLE_FILE='/tmp/payload_test'
+ANSIBLE_FILE = "/tmp/payload_test"
 
-def run_fake_ansible_template(*args,**kwargs):
+
+def run_fake_ansible_template(*args, **kwargs):
     opts = args[1]
-    open(ANSIBLE_FILE,'w').write(json.dumps(opts))
+    open(ANSIBLE_FILE, "w").write(json.dumps(opts))
+
 
 def get_ansible_output():
     ansible_str = open(ANSIBLE_FILE).read()
     return json.loads(ansible_str)
 
+
 class TestRun(unittest.TestCase):
     def setUp(self):
         self.sys_path_save = sys.path
         self.cwd_save = os.getcwd()
         sys.path.append(xos_dir)
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base'))
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base', 'tests', 'steps'))
+        sys.path.append(os.path.join(xos_dir, "synchronizers", "new_base"))
+        sys.path.append(
+            os.path.join(xos_dir, "synchronizers", "new_base", "tests", "steps")
+        )
 
         config = os.path.join(test_path, "test_config.yaml")
         from xosconfig import Config
-        Config.clear()
-        Config.init(config, 'synchronizer-config-schema.yaml')
 
-        from synchronizers.new_base.mock_modelaccessor_build import build_mock_modelaccessor
+        Config.clear()
+        Config.init(config, "synchronizer-config-schema.yaml")
+
+        from synchronizers.new_base.mock_modelaccessor_build import (
+            build_mock_modelaccessor,
+        )
+
         build_mock_modelaccessor(xos_dir, services_dir=None, service_xprotos=[])
 
-        os.chdir(os.path.join(test_path, '..'))  # config references tests/model-deps
+        os.chdir(os.path.join(test_path, ".."))  # config references tests/model-deps
 
         import event_loop
+
         reload(event_loop)
         import backend
+
         reload(backend)
         from modelaccessor import model_accessor
 
@@ -68,11 +79,11 @@
         self.steps = b.load_sync_step_modules(steps_dir)
         self.synchronizer = event_loop.XOSObserver(self.steps)
         try:
-            os.remove('/tmp/sync_ports')
+            os.remove("/tmp/sync_ports")
         except OSError:
             pass
         try:
-            os.remove('/tmp/delete_ports')
+            os.remove("/tmp/delete_ports")
         except OSError:
             pass
 
@@ -80,16 +91,22 @@
         sys.path = self.sys_path_save
         os.chdir(self.cwd_save)
 
-    @mock.patch("steps.sync_instances.syncstep.run_template",side_effect=run_fake_ansible_template)
+    @mock.patch(
+        "steps.sync_instances.syncstep.run_template",
+        side_effect=run_fake_ansible_template,
+    )
     @mock.patch("event_loop.model_accessor")
     def test_run_once(self, mock_run_template, mock_accessor, *_other_accessors):
 
-
         pending_objects, pending_steps = self.synchronizer.fetch_pending()
         pending_objects2 = list(pending_objects)
 
-        any_cs = next(obj for obj in pending_objects if obj.leaf_model_name == 'ControllerSlice')
-        any_instance = next(obj for obj in pending_objects2 if obj.leaf_model_name == 'Instance')
+        any_cs = next(
+            obj for obj in pending_objects if obj.leaf_model_name == "ControllerSlice"
+        )
+        any_instance = next(
+            obj for obj in pending_objects2 if obj.leaf_model_name == "Instance"
+        )
 
         slice = Slice()
         any_instance.slice = slice
@@ -97,11 +114,12 @@
 
         self.synchronizer.run_once()
 
-        sync_ports = open('/tmp/sync_ports').read()
-        delete_ports = open('/tmp/delete_ports').read()
+        sync_ports = open("/tmp/sync_ports").read()
+        delete_ports = open("/tmp/delete_ports").read()
 
         self.assertIn("successful", sync_ports)
         self.assertIn("successful", delete_ports)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/xos/synchronizers/new_base/tests/test_scheduler.py b/xos/synchronizers/new_base/tests/test_scheduler.py
index d8a7a7f..fa2d493 100644
--- a/xos/synchronizers/new_base/tests/test_scheduler.py
+++ b/xos/synchronizers/new_base/tests/test_scheduler.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,10 +18,12 @@
 import pdb
 import networkx as nx
 
-import os, sys
+import os
+import sys
 
 test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
-xos_dir = os.path.join(test_path, '..', '..', '..')
+xos_dir = os.path.join(test_path, "..", "..", "..")
+
 
 class TestScheduling(unittest.TestCase):
 
@@ -34,22 +35,30 @@
         self.sys_path_save = sys.path
         self.cwd_save = os.getcwd()
         sys.path.append(xos_dir)
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base'))
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base', 'tests', 'steps'))
+        sys.path.append(os.path.join(xos_dir, "synchronizers", "new_base"))
+        sys.path.append(
+            os.path.join(xos_dir, "synchronizers", "new_base", "tests", "steps")
+        )
 
         config = os.path.join(test_path, "test_config.yaml")
         from xosconfig import Config
-        Config.clear()
-        Config.init(config, 'synchronizer-config-schema.yaml')
 
-        from synchronizers.new_base.mock_modelaccessor_build import build_mock_modelaccessor
+        Config.clear()
+        Config.init(config, "synchronizer-config-schema.yaml")
+
+        from synchronizers.new_base.mock_modelaccessor_build import (
+            build_mock_modelaccessor,
+        )
+
         build_mock_modelaccessor(xos_dir, services_dir=None, service_xprotos=[])
 
-        os.chdir(os.path.join(test_path, '..'))  # config references tests/model-deps
+        os.chdir(os.path.join(test_path, ".."))  # config references tests/model-deps
 
         import event_loop
+
         reload(event_loop)
         import backend
+
         reload(backend)
         from mock_modelaccessor import mock_enumerator
         from modelaccessor import model_accessor
@@ -92,7 +101,7 @@
         t = ControllerSlice(slice=s)
         u = ControllerSlice(slice=s)
 
-        s.controllerslices = mock_enumerator([t,u])
+        s.controllerslices = mock_enumerator([t, u])
 
         same, et = self.synchronizer.same_object(s.controllerslices, u)
         self.assertTrue(same)
@@ -120,7 +129,7 @@
     def test_concrete_path_no_model_path(self):
         p = Port()
         n = NetworkParameter()
-        verdict,_ = self.synchronizer.concrete_path_exists(p, n)
+        verdict, _ = self.synchronizer.concrete_path_exists(p, n)
         self.assertFalse(verdict)
 
     def test_concrete_no_object_path_adjacent(self):
@@ -128,16 +137,16 @@
         s1 = Slice()
         s2 = Slice()
         p.slice = s2
-        verdict,_ = self.synchronizer.concrete_path_exists(p, s1)
-        
+        verdict, _ = self.synchronizer.concrete_path_exists(p, s1)
+
         self.assertFalse(verdict)
-    
+
     def test_concrete_object_path_adjacent(self):
         p = Instance()
         s = Slice()
         p.slice = s
         verdict, edge_type = self.synchronizer.concrete_path_exists(p, s)
-        
+
         self.assertTrue(verdict)
         self.assertEqual(edge_type, event_loop.DIRECT_EDGE)
 
@@ -154,11 +163,10 @@
         s1.controllerslices = mock_enumerator([cs])
         s2.controllerslices = mock_enumerator([])
 
-
         verdict1, edge_type1 = self.synchronizer.concrete_path_exists(p, cs)
         verdict2, _ = self.synchronizer.concrete_path_exists(q, cs)
         verdict3, _ = self.synchronizer.concrete_path_exists(p, cs2)
-        
+
         self.assertTrue(verdict1)
         self.assertFalse(verdict2)
         self.assertFalse(verdict3)
@@ -188,10 +196,10 @@
     def test_concrete_no_object_path_distant(self):
         p = Instance()
         s = Slice()
-        s.controllerslice=mock_enumerator([])
+        s.controllerslice = mock_enumerator([])
 
         t = Site()
-        t.controllersite=mock_enumerator([])
+        t.controllersite = mock_enumerator([])
 
         ct = ControllerSite()
         ct.site = Site()
@@ -209,9 +217,9 @@
         c.slice = None
         c.image = None
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([i,p,c], False)
+        cohorts = self.synchronizer.compute_dependent_cohorts([i, p, c], False)
         self.assertEqual(len(cohorts), 3)
-    
+
     def test_cohorting_related(self):
         i = Image()
         p = Port()
@@ -219,8 +227,8 @@
         c.image = i
         s = ControllerSlice()
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([i,p,c,s], False)
-        self.assertIn([i,c], cohorts)
+        cohorts = self.synchronizer.compute_dependent_cohorts([i, p, c, s], False)
+        self.assertIn([i, c], cohorts)
         self.assertIn([p], cohorts)
         self.assertIn([s], cohorts)
 
@@ -235,7 +243,7 @@
         s.controllerslices = mock_enumerator([cs])
         c.slice = s
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([i,p,c,s,cs], False)
+        cohorts = self.synchronizer.compute_dependent_cohorts([i, p, c, s, cs], False)
 
         big_cohort = max(cohorts, key=len)
         self.assertGreater(big_cohort.index(c), big_cohort.index(i))
@@ -252,7 +260,7 @@
         cs.slice = s
         c.slice = s
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([i,p,c,s,cs], True)
+        cohorts = self.synchronizer.compute_dependent_cohorts([i, p, c, s, cs], True)
 
         big_cohort = max(cohorts, key=len)
         self.assertGreater(big_cohort.index(i), big_cohort.index(c))
@@ -266,10 +274,11 @@
         c.image = i
         s = ControllerSlice()
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([i,p,c,s], True)
-        self.assertIn([c,i], cohorts)
+        cohorts = self.synchronizer.compute_dependent_cohorts([i, p, c, s], True)
+        self.assertIn([c, i], cohorts)
         self.assertIn([p], cohorts)
         self.assertIn([s], cohorts)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/xos/synchronizers/new_base/tests/test_services.py b/xos/synchronizers/new_base/tests/test_services.py
index fcfed20..e5e2d49 100644
--- a/xos/synchronizers/new_base/tests/test_services.py
+++ b/xos/synchronizers/new_base/tests/test_services.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,32 +18,42 @@
 import pdb
 import networkx as nx
 
-import os, sys
+import os
+import sys
 
 test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
-xos_dir = os.path.join(test_path, '..', '..', '..')
+xos_dir = os.path.join(test_path, "..", "..", "..")
+
 
 class TestServices(unittest.TestCase):
     def setUp(self):
         self.sys_path_save = sys.path
         self.cwd_save = os.getcwd()
         sys.path.append(xos_dir)
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base'))
-        sys.path.append(os.path.join(xos_dir, 'synchronizers', 'new_base', 'tests', 'steps'))
+        sys.path.append(os.path.join(xos_dir, "synchronizers", "new_base"))
+        sys.path.append(
+            os.path.join(xos_dir, "synchronizers", "new_base", "tests", "steps")
+        )
 
         config = os.path.join(test_path, "test_config.yaml")
         from xosconfig import Config
-        Config.clear()
-        Config.init(config, 'synchronizer-config-schema.yaml')
 
-        from synchronizers.new_base.mock_modelaccessor_build import build_mock_modelaccessor
+        Config.clear()
+        Config.init(config, "synchronizer-config-schema.yaml")
+
+        from synchronizers.new_base.mock_modelaccessor_build import (
+            build_mock_modelaccessor,
+        )
+
         build_mock_modelaccessor(xos_dir, services_dir=None, service_xprotos=[])
 
-        os.chdir(os.path.join(test_path, '..'))  # config references tests/model-deps
+        os.chdir(os.path.join(test_path, ".."))  # config references tests/model-deps
 
         import event_loop
+
         reload(event_loop)
         import backend
+
         reload(backend)
         from modelaccessor import model_accessor
 
@@ -65,17 +74,18 @@
         s = Service()
         a = ServiceInstance(owner=s)
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([a,s], False)
-        self.assertIn([s,a], cohorts)
+        cohorts = self.synchronizer.compute_dependent_cohorts([a, s], False)
+        self.assertIn([s, a], cohorts)
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([s,a], False)
-        self.assertIn([s,a], cohorts)
+        cohorts = self.synchronizer.compute_dependent_cohorts([s, a], False)
+        self.assertIn([s, a], cohorts)
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([a,s], True)
-        self.assertIn([a,s], cohorts)
+        cohorts = self.synchronizer.compute_dependent_cohorts([a, s], True)
+        self.assertIn([a, s], cohorts)
 
-        cohorts = self.synchronizer.compute_dependent_cohorts([s,a], True)
-        self.assertIn([a,s], cohorts)
+        cohorts = self.synchronizer.compute_dependent_cohorts([s, a], True)
+        self.assertIn([a, s], cohorts)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/xos/synchronizers/new_base/xos-policy.py b/xos/synchronizers/new_base/xos-policy.py
index 33935e8..cfb05d6 100644
--- a/xos/synchronizers/new_base/xos-policy.py
+++ b/xos/synchronizers/new_base/xos-policy.py
@@ -24,15 +24,13 @@
 
 import os
 import sys
-
-sys.path.append('/opt/xos')
-
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
-from xosconfig import Config
-
 import time
-from synchronizers.new_base.model_policy_loop import XOSPolicyEngine
+
+sys.path.append("/opt/xos")
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
+
 from synchronizers.new_base.modelaccessor import *
+from synchronizers.new_base.model_policy_loop import XOSPolicyEngine
 
 from xosconfig import Config
 from multistructlog import create_logger
@@ -40,7 +38,7 @@
 
 def main():
 
-    log = create_logger(Config().get('logging'))
+    log = create_logger(Config().get("logging"))
 
     models_active = False
     wait = False
@@ -49,20 +47,22 @@
             _ = Instance.objects.first()
             _ = NetworkTemplate.objects.first()
             models_active = True
-        except Exception,e:
-            log.exception("Exception", e = e)
-            log.info('Waiting for data model to come up before starting...')
+        except Exception as e:
+            log.exception("Exception", e=e)
+            log.info("Waiting for data model to come up before starting...")
             time.sleep(10)
             wait = True
 
-    if (wait):
-        time.sleep(60) # Safety factor, seeing that we stumbled waiting for the data model to come up.
+    if wait:
+        time.sleep(
+            60
+        )  # Safety factor, seeing that we stumbled waiting for the data model to come up.
 
     # start model policies thread
     policies_dir = Config.get("model_policies_dir")
 
-
     XOSPolicyEngine(policies_dir=policies_dir, log=log).run()
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     main()
diff --git a/xos/synchronizers/new_base/xos-synchronizer.py b/xos/synchronizers/new_base/xos-synchronizer.py
index 22426a3..145c46e 100644
--- a/xos/synchronizers/new_base/xos-synchronizer.py
+++ b/xos/synchronizers/new_base/xos-synchronizer.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python
 
 # Copyright 2017-present Open Networking Foundation
 #
@@ -13,43 +14,45 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-#!/usr/bin/env python
 import os
-import argparse
 import sys
-
-sys.path.append('/opt/xos')
-
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
 import time
+
+sys.path.append("/opt/xos")
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
+
+from synchronizers.new_base.backend import Backend
 from synchronizers.new_base.modelaccessor import *
+
 from xosconfig import Config
 from multistructlog import create_logger
-from synchronizers.new_base.backend import Backend
+log = create_logger(Config().get("logging"))
 
-log = create_logger(Config().get('logging'))
+
 def main():
 
     models_active = False
     wait = False
     while not models_active:
         try:
-            _ = Instance.objects.first()
-            _ = NetworkTemplate.objects.first()
+            _i = Instance.objects.first()
+            _n = NetworkTemplate.objects.first()
             models_active = True
-        except Exception,e:
-            log.info("Exception", e = e)
-            log.info('Waiting for data model to come up before starting...')
+        except Exception as e:
+            log.info("Exception", e=e)
+            log.info("Waiting for data model to come up before starting...")
             time.sleep(10)
             wait = True
 
-    if (wait):
-        time.sleep(60) # Safety factor, seeing that we stumbled waiting for the data model to come up.
+    if wait:
+        time.sleep(
+            60
+        )  # Safety factor, seeing that we stumbled waiting for the data model to come up.
 
-    log_closure = log.bind(synchronizer_name = Config().get('name'))
-    backend = Backend(log = log_closure)
+    log_closure = log.bind(synchronizer_name=Config().get("name"))
+    backend = Backend(log=log_closure)
     backend.run()
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     main()
diff --git a/xos/tests/permissiontest.py b/xos/tests/permissiontest.py
index 15671ad..110171d 100644
--- a/xos/tests/permissiontest.py
+++ b/xos/tests/permissiontest.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,170 +16,209 @@
 import unittest
 from core.models import *
 
+
 class TestPermission(unittest.TestCase):
-    
     def setUp(self):
         self.test_objects = []
         # deployment
-        self.deployment = Deployment(name='TestDeployment')
+        self.deployment = Deployment(name="TestDeployment")
         self.deployment.save()
         self.test_objects.append(self.deployment)
         # site
-        self.site = Site(name='TestSite')
+        self.site = Site(name="TestSite")
         self.site.save()
         self.test_objects.append(self.site)
         # site deployment
-        self.site_deployment = SiteDeployment(site=self.site, deployment=self.deployment)
+        self.site_deployment = SiteDeployment(
+            site=self.site, deployment=self.deployment
+        )
         self.site_deployment.save()
         self.test_objects.append(self.site_deployment)
         # node
-        self.node = Node(name='TestNode', site_deployment=self.site_deployment)
+        self.node = Node(name="TestNode", site_deployment=self.site_deployment)
         self.node.save()
         self.test_objects.append(self.node)
         # slice
-        self.slice = Slice(name='TestSlice', site=self.site)
+        self.slice = Slice(name="TestSlice", site=self.site)
         self.slice.save()
         self.test_objects.appen(slice.slice)
         # admin user
-        self.user_admin = User(email='user_admin@test.com', first_name='Test', last_name='Test', is_admin=True)
+        self.user_admin = User(
+            email="user_admin@test.com",
+            first_name="Test",
+            last_name="Test",
+            is_admin=True,
+        )
         self.user_admin.site = self.site
         self.user_admin.save()
         self.test_objects.append(self.user_admin)
         # read only user
-        self.user_read_only = User(email='user_read_only@test.com', first_name='Test', last_name='Test')
+        self.user_read_only = User(
+            email="user_read_only@test.com", first_name="Test", last_name="Test"
+        )
         self.user_read_only.site = self.site
         self.user_read_only.save()
         self.test_objects.append(self.user_read_only)
         # default user
-        self.user_default = User(email='user_default@test.com', first_name='Test', last_name='Test')
-        self.user_default.site = self.site 
+        self.user_default = User(
+            email="user_default@test.com", first_name="Test", last_name="Test"
+        )
+        self.user_default.site = self.site
         self.user_default.save()
         self.test_objects.append(self.user_default)
 
-        # deployment admin 
-        self.user_deployment_admin = User(email='user_deployment_admin@test.com', first_name='Test', last_name='Test')
+        # deployment admin
+        self.user_deployment_admin = User(
+            email="user_deployment_admin@test.com", first_name="Test", last_name="Test"
+        )
         self.user_deployment_admin.site = self.site
         self.user_deployment_admin.save()
         self.test_objects.append(self.user_deployment_admin)
         deployment_privilege = DeploymentPrivilege(
-            user=self.user_deployment_admin,
-            deployment=self.deployment,
-            role='admin')
+            user=self.user_deployment_admin, deployment=self.deployment, role="admin"
+        )
         deployment_privilege.save()
         self.test_objects.append(deployment_privilege)
         # site admin
-        self.user_site_admin = User(email='user_site_admin@test.com', first_name='Test', last_name='Test')
+        self.user_site_admin = User(
+            email="user_site_admin@test.com", first_name="Test", last_name="Test"
+        )
         self.user_site_admin = self.site
         self.user_site_admin.save()
         self.test_objects.append(self.user_site_admin)
         site_admin_privilege = SitePrivilege(
-            user = self.user_site_admin,
-            site=self.site,
-            role='admin')
+            user=self.user_site_admin, site=self.site, role="admin"
+        )
         site_admin_privilege.save()
         self.test_objects.append(site_admin_privilege)
         # site pi
-        self.user_site_pi = User(email='user_site_pi@test.com', first_name='Test', last_name='Test')
+        self.user_site_pi = User(
+            email="user_site_pi@test.com", first_name="Test", last_name="Test"
+        )
         self.user_site_pi = self.site
         self.user_site_pi.save()
         self.test_objects.append(self.user_site_pi)
         site_pi_privilege = SitePrivilege(
-            user = self.user_site_pi,
-            site=self.site,
-            role='pi')
+            user=self.user_site_pi, site=self.site, role="pi"
+        )
         site_pi_privilege.save()
         self.test_objects.append(site_pi_privilege)
         # site tech
-        self.user_site_tech = User(email='user_site_tech@test.com', first_name='Test', last_name='Test')
+        self.user_site_tech = User(
+            email="user_site_tech@test.com", first_name="Test", last_name="Test"
+        )
         self.user_site_tech = self.site
         self.user_site_tech.save()
         self.test_objects.append(self.user_site_tech)
         site_tech_privilege = SitePrivilege(
-            user = self.user_site_tech,
-            site=self.site,
-            role='tech')
+            user=self.user_site_tech, site=self.site, role="tech"
+        )
         site_tech_privilege.save()
         self.test_objects.append(site_tech_privilege)
         # slice admin
-        self.user_slice_admin = User(email='user_slice_admin@test.com', first_name='Test', last_name='Test')
+        self.user_slice_admin = User(
+            email="user_slice_admin@test.com", first_name="Test", last_name="Test"
+        )
         self.user_slice_admin = self.site
         self.user_slice_admin.save()
         self.test_objects.append(self.user_slice_admin)
         slice_admin_privilege = SlicePrivilege(
-            user = self.user_slice_admin,
-            slice = self.slice,
-            role='admin')
+            user=self.user_slice_admin, slice=self.slice, role="admin"
+        )
         slice_admin_privilege.save()
         self.test_objects.append(slice_admin_privilege)
-        # slice access 
-        self.user_slice_access = User(email='user_slice_access@test.com', first_name='Test', last_name='Test')
-        self.user_slice_access = self.site 
+        # slice access
+        self.user_slice_access = User(
+            email="user_slice_access@test.com", first_name="Test", last_name="Test"
+        )
+        self.user_slice_access = self.site
         self.user_slice_access.save()
         self.test_objects.append(self.user_slice_access)
         slice_access_privilege = SlicePrivilege(
-            user = self.user_slice_access,
-            slice = self.slice,
-            role='access')
+            user=self.user_slice_access, slice=self.slice, role="access"
+        )
         slice_access_privilege.save()
         self.test_objects.append(slice_access_privilege)
 
-
     def test_deployment(self):
         for user in [self.user_admin, self.user_deployment_admin]:
-            self.assertEqual(
-                self.deployment.save(), None)
-        for user in [self.user_read_only, self.user_default, self.user_site_admin,
-                     self.user_site_pi, self.user_site_tech, self.user_slice_admin,
-                     self.user_slice_access]:
-            self.assertRaises(
-                PermissionDenied, 
-                self.deployment.save_by_user(user,))
+            self.assertEqual(self.deployment.save(), None)
+        for user in [
+            self.user_read_only,
+            self.user_default,
+            self.user_site_admin,
+            self.user_site_pi,
+            self.user_site_tech,
+            self.user_slice_admin,
+            self.user_slice_access,
+        ]:
+            self.assertRaises(PermissionDenied, self.deployment.save_by_user(user))
 
     def test_site(self):
         for user in [self.user_admin, self.user_site_admin, self.user_site_pi]:
-            self.assertEqual(
-                self.site.save_by_user(user), None)
-        for user in [self.user_read_only, self.user_default, self.user_deployment_admin,
-                     self.user_site_tech, self.user_slice_admin, self.user_slice_access]:
-            self.assertRaises(
-                PermissionDenied,
-                self.site.save_by_user(user,))
-    
+            self.assertEqual(self.site.save_by_user(user), None)
+        for user in [
+            self.user_read_only,
+            self.user_default,
+            self.user_deployment_admin,
+            self.user_site_tech,
+            self.user_slice_admin,
+            self.user_slice_access,
+        ]:
+            self.assertRaises(PermissionDenied, self.site.save_by_user(user))
+
     def test_node(self):
         for user in [self.user_admin, self.user_site_admin, self.user_site_tech]:
             self.assertEqual(self.node.save_by_user(user), None)
-        for user in [self.user_read_only, self.user_default, self.user_deployment_admin,
-                     self.user_site_pi, self.user_slice_admin, self.user_slice_access]:
-            self.assertRaises(
-                PermissionDenied,
-                self.node.save_by_user(user,))                 
-                                       
+        for user in [
+            self.user_read_only,
+            self.user_default,
+            self.user_deployment_admin,
+            self.user_site_pi,
+            self.user_slice_admin,
+            self.user_slice_access,
+        ]:
+            self.assertRaises(PermissionDenied, self.node.save_by_user(user))
+
     def test_slice(self):
-        for user in [self.user_admin, self.user_site_admin, self.user_site_pi, 
-                     self.user_slice_admin]:
-            self.assertEqual(
-                self.slice.save_by_user(user), None)
-        for user in [self.user_read_only, self.user_default, self.user_deployment_admin,
-                     self.user_site_tech, self.user_slice_access]:
-            self.assertRaises(
-                PermissionDenied,
-                self.slice.save_by_user(user,))
-            
+        for user in [
+            self.user_admin,
+            self.user_site_admin,
+            self.user_site_pi,
+            self.user_slice_admin,
+        ]:
+            self.assertEqual(self.slice.save_by_user(user), None)
+        for user in [
+            self.user_read_only,
+            self.user_default,
+            self.user_deployment_admin,
+            self.user_site_tech,
+            self.user_slice_access,
+        ]:
+            self.assertRaises(PermissionDenied, self.slice.save_by_user(user))
+
     def test_user(self):
-        for user in [self.user_admin, self.user_site_admin, self.user_deployment_admin,
-                     self.user_site_pi, self.user_default]:
-            self.assertEqual(
-                self.user_default.save_by_user(user), None)
-        for user in [self.user_read_only, self.user_deployment_admin, 
-                     self.user_site_tech, self.user_slice_admin, self.user_slice_access]:
-            self.assertRaises(
-                PermissionDenied,
-                self.user_default.save_by_user(user,))                    
-                                 
-         
+        for user in [
+            self.user_admin,
+            self.user_site_admin,
+            self.user_deployment_admin,
+            self.user_site_pi,
+            self.user_default,
+        ]:
+            self.assertEqual(self.user_default.save_by_user(user), None)
+        for user in [
+            self.user_read_only,
+            self.user_deployment_admin,
+            self.user_site_tech,
+            self.user_slice_admin,
+            self.user_slice_access,
+        ]:
+            self.assertRaises(PermissionDenied, self.user_default.save_by_user(user))
+
     def tearDown(self):
         for obj in self.test_objects:
-            obj.delete()       
-    
-if __name__ == '__main__':
-    unittest.main()     
+            obj.delete()
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/xos/tests/rest_useraccesstest.py b/xos/tests/rest_useraccesstest.py
index 64db088..c83648a 100644
--- a/xos/tests/rest_useraccesstest.py
+++ b/xos/tests/rest_useraccesstest.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -24,28 +23,31 @@
 """
 
 
+from __future__ import print_function
 import inspect
 import json
 import os
 import requests
 import sys
 import time
-from urllib  import urlencode
+from urllib import urlencode
 
 from operator import itemgetter, attrgetter
 
-if (len(sys.argv)!=6):
-    print "syntax: usertest <hostname> <username> <password> <admin_username> <admin_password>"
+if len(sys.argv) != 6:
+    print(
+        "syntax: usertest <hostname> <username> <password> <admin_username> <admin_password>"
+    )
     sys.exit(-1)
 
 hostname = sys.argv[1]
 username = sys.argv[2]
 password = sys.argv[3]
 
-opencloud_auth=(username, password)
-admin_auth=(sys.argv[4], sys.argv[5])
+opencloud_auth = (username, password)
+admin_auth = (sys.argv[4], sys.argv[5])
 
-REST_API="http://%s:8000/xos/" % hostname
+REST_API = "http://%s:8000/xos/" % hostname
 USERS_API = REST_API + "users/"
 SLICES_API = REST_API + "slices/"
 SITES_API = REST_API + "sites/"
@@ -54,19 +56,30 @@
 SITEROLE_API = REST_API + "site_roles/"
 SLICEROLE_API = REST_API + "slice_roles/"
 
-TEST_USER_EMAIL = "test" + str(time.time()) + "@test.com" # in case observer not running, objects won't be purged, so use unique email
+TEST_USER_EMAIL = (
+    "test" + str(time.time()) + "@test.com"
+)  # in case observer not running, objects won't be purged, so use unique email
+
 
 def fail_unless(x, msg):
     if not x:
-        (frame, filename, line_number, function_name, lines, index) = inspect.getouterframes(inspect.currentframe())[1]
-        print "FAIL (%s:%d)" % (function_name, line_number), msg
+        (
+            frame,
+            filename,
+            line_number,
+            function_name,
+            lines,
+            index,
+        ) = inspect.getouterframes(inspect.currentframe())[1]
+        print("FAIL (%s:%d)" % (function_name, line_number), msg)
 
-print "downloading objects using admin"
+
+print("downloading objects using admin")
 r = requests.get(USERS_API + "?no_hyperlinks=1", auth=admin_auth)
-fail_unless(r.status_code==200, "failed to get users")
+fail_unless(r.status_code == 200, "failed to get users")
 allUsers = r.json()
 r = requests.get(SLICES_API + "?no_hyperlinks=1", auth=admin_auth)
-fail_unless(r.status_code==200, "failed to get slices")
+fail_unless(r.status_code == 200, "failed to get slices")
 allSlices = r.json()
 r = requests.get(SITES_API + "?no_hyperlinks=1", auth=admin_auth)
 allSites = r.json()
@@ -79,62 +92,82 @@
 r = requests.get(SLICEROLE_API + "?no_hyperlinks=1", auth=admin_auth)
 allSliceRole = r.json()
 
+
 def should_see_user(myself, otherUser):
     if myself["is_admin"]:
         return True
     if myself["id"] == otherUser["id"]:
         return True
     for sitePriv in allSitePriv:
-        if (sitePriv["user"] == myself["id"]) and (sitePriv["site"] == otherUser["site"]):
+        if (sitePriv["user"] == myself["id"]) and (
+            sitePriv["site"] == otherUser["site"]
+        ):
             for role in allSiteRole:
-                if role["role"]=="pi" and role["id"] == sitePriv["role"]:
+                if role["role"] == "pi" and role["id"] == sitePriv["role"]:
                     return True
     return False
 
+
 def should_see_slice(myself, slice):
     if myself["is_admin"]:
         return True
     for sitePriv in allSitePriv:
         if (sitePriv["user"] == myself["id"]) and (sitePriv["site"] == slice["site"]):
             for role in allSiteRole:
-                if role["role"]=="pi" and role["id"] == sitePriv["role"]:
+                if role["role"] == "pi" and role["id"] == sitePriv["role"]:
                     return True
     for slicePriv in allSlicePriv:
         if (slicePriv["user"] == myself["id"]) and (sitePriv["slice"] == slice["id"]):
             for role in allSliceRole:
-                if role["role"]=="pi" and role["id"] == slicePriv["role"]:
+                if role["role"] == "pi" and role["id"] == slicePriv["role"]:
                     return True
     return False
 
+
 def flip_phone(user):
     if user["phone"] == "123":
         user["phone"] = "456"
     else:
         user["phone"] = "123"
 
+
 def flip_desc(slice):
     if slice["description"] == "some_description":
         slice["description"] = "some_other_description"
     else:
         slice["description"] = "some_description"
 
-def delete_user_if_exists(email):
-    r = requests.get(USERS_API +"?email=%s" % email, auth=admin_auth)
-    if r.status_code==200:
-        user = r.json()
-        if len(user)>0:
-            user=user[0]
-            r = requests.delete(USERS_API + str(user["id"]) + "/", auth=admin_auth)
-            fail_unless(r.status_code==200, "failed to delete the test user")
 
-print "  loaded user:%d slice:%d, site:%d, site_priv:%d slice_priv:%d" % (len(allUsers), len(allSlices), len(allSites), len(allSitePriv), len(allSlicePriv))
+def delete_user_if_exists(email):
+    r = requests.get(USERS_API + "?email=%s" % email, auth=admin_auth)
+    if r.status_code == 200:
+        user = r.json()
+        if len(user) > 0:
+            user = user[0]
+            r = requests.delete(USERS_API + str(user["id"]) + "/", auth=admin_auth)
+            fail_unless(r.status_code == 200, "failed to delete the test user")
+
+
+print(
+    "  loaded user:%d slice:%d, site:%d, site_priv:%d slice_priv:%d"
+    % (
+        len(allUsers),
+        len(allSlices),
+        len(allSites),
+        len(allSitePriv),
+        len(allSlicePriv),
+    )
+)
 
 # get our own user record
 
-r = requests.get(USERS_API + "?" + urlencode({"email": username, "no_hyperlinks": "1"}), auth=opencloud_auth)
-fail_unless(r.status_code==200, "failed to get user %s" % username)
+r = requests.get(
+    USERS_API + "?" + urlencode({"email": username, "no_hyperlinks": "1"}),
+    auth=opencloud_auth,
+)
+fail_unless(r.status_code == 200, "failed to get user %s" % username)
 myself = r.json()
-fail_unless(len(myself)==1, "wrong number of results when getting user %s" % username)
+fail_unless(len(myself) == 1, "wrong number of results when getting user %s" % username)
 myself = myself[0]
 
 # check to see that we see the users we should be able to
@@ -142,11 +175,17 @@
 r = requests.get(USERS_API, auth=opencloud_auth)
 myUsers = r.json()
 for user in myUsers:
-    fail_unless(should_see_user(myself, user), "saw user %s but we shouldn't have" % user["email"])
+    fail_unless(
+        should_see_user(myself, user),
+        "saw user %s but we shouldn't have" % user["email"],
+    )
 myUsersIds = [r["id"] for r in myUsers]
 for user in allUsers:
     if should_see_user(myself, user):
-        fail_unless(user["id"] in myUsersIds, "should have seen user %s but didnt" % user["email"])
+        fail_unless(
+            user["id"] in myUsersIds,
+            "should have seen user %s but didnt" % user["email"],
+        )
 
 # toggle the phone number on the users we should be able to
 
@@ -184,29 +223,45 @@
 
 # XXX - enacted and policed should not be required
 
-newUser = {"firstname": "test", "lastname": "1234", "email": TEST_USER_EMAIL, "password": "letmein", "site": allSites[0]["id"], "enacted": "2015-01-01T00:00", "policed": "2015-01-01T00:00"}
+newUser = {
+    "firstname": "test",
+    "lastname": "1234",
+    "email": TEST_USER_EMAIL,
+    "password": "letmein",
+    "site": allSites[0]["id"],
+    "enacted": "2015-01-01T00:00",
+    "policed": "2015-01-01T00:00",
+}
 r = requests.post(USERS_API + "?no_hyperlinks=1", data=newUser, auth=opencloud_auth)
 if myself["is_admin"]:
-    fail_unless(r.status_code==200, "failed to create %s" % TEST_USER_EMAIL)
+    fail_unless(r.status_code == 200, "failed to create %s" % TEST_USER_EMAIL)
 else:
-    fail_unless(r.status_code!=200, "created %s but we shouldn't have been able to" % TEST_USER_EMAIL)
+    fail_unless(
+        r.status_code != 200,
+        "created %s but we shouldn't have been able to" % TEST_USER_EMAIL,
+    )
 
 delete_user_if_exists(TEST_USER_EMAIL)
 
 # now create it as admin
 r = requests.post(USERS_API + "?no_hyperlinks=1", data=newUser, auth=admin_auth)
-if (r.status_code!=201):
-    print r.text
-fail_unless(r.status_code==201, "failed to create %s as admin" % TEST_USER_EMAIL)
+if r.status_code != 201:
+    print(r.text)
+fail_unless(r.status_code == 201, "failed to create %s as admin" % TEST_USER_EMAIL)
 
-r = requests.get(USERS_API +"?" + urlencode({"email": TEST_USER_EMAIL}), auth=admin_auth)
-fail_unless(r.status_code==200, "failed to get user %s" % TEST_USER_EMAIL)
-user=r.json()[0]
+r = requests.get(
+    USERS_API + "?" + urlencode({"email": TEST_USER_EMAIL}), auth=admin_auth
+)
+fail_unless(r.status_code == 200, "failed to get user %s" % TEST_USER_EMAIL)
+user = r.json()[0]
 r = requests.delete(USERS_API + str(user["id"]) + "/", auth=opencloud_auth)
 if myself["is_admin"]:
-    fail_unless(r.status_code==200, "failed to delete %s" % TEST_USER_EMAIL)
+    fail_unless(r.status_code == 200, "failed to delete %s" % TEST_USER_EMAIL)
 else:
-    fail_unless(r.status_code!=200, "deleted %s but we shouldn't have been able to" % TEST_USER_EMAIL)
+    fail_unless(
+        r.status_code != 200,
+        "deleted %s but we shouldn't have been able to" % TEST_USER_EMAIL,
+    )
 
 # slice tests
 
@@ -214,19 +269,30 @@
 mySlices = r.json()
 
 for slice in mySlices:
-    fail_unless(should_see_slice(myself, slice), "saw slice %s but we shouldn't have" % slice["name"])
+    fail_unless(
+        should_see_slice(myself, slice),
+        "saw slice %s but we shouldn't have" % slice["name"],
+    )
 mySlicesIds = [r["id"] for r in mySlices]
 for slice in allSlices:
     if should_see_slice(myself, slice):
-        fail_unless(slice["id"] in mySliceIds, "should have seen slice %s but didnt" % slice["name"])
+        fail_unless(
+            slice["id"] in mySliceIds,
+            "should have seen slice %s but didnt" % slice["name"],
+        )
 
 for slice in allSlices:
     slice = requests.get(SLICES_API + str(slice["id"]) + "/", auth=admin_auth).json()
     flip_desc(slice)
-    r = requests.put(SLICES_API + str(slice["id"]) +"/", data=slice, auth=opencloud_auth)
+    r = requests.put(
+        SLICES_API + str(slice["id"]) + "/", data=slice, auth=opencloud_auth
+    )
     if should_see_slice(myself, slice):
-        fail_unless(r.status_code==200, "failed to change desc on %s" % slice["name"])
+        fail_unless(r.status_code == 200, "failed to change desc on %s" % slice["name"])
     else:
-        fail_unless(r.status_code!=200, "was able to change desc on %s but shouldn't have" % slice["name"])
+        fail_unless(
+            r.status_code != 200,
+            "was able to change desc on %s but shouldn't have" % slice["name"],
+        )
 
-print "Done."
+print("Done.")
diff --git a/xos/tests/rest_usertest.py b/xos/tests/rest_usertest.py
index a37805f..894111b 100644
--- a/xos/tests/rest_usertest.py
+++ b/xos/tests/rest_usertest.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -22,6 +21,7 @@
                 Only admins should be able to set their is_admin bit
 """
 
+from __future__ import print_function
 import json
 import os
 import requests
@@ -30,24 +30,26 @@
 
 from operator import itemgetter, attrgetter
 
-if (len(sys.argv)!=6):
-    print "syntax: usertest <hostname> <username> <password> <admin_username> <admin_password>"
+if len(sys.argv) != 6:
+    print(
+        "syntax: usertest <hostname> <username> <password> <admin_username> <admin_password>"
+    )
     sys.exit(-1)
 
 hostname = sys.argv[1]
 username = sys.argv[2]
 password = sys.argv[3]
 
-opencloud_auth=(username, password)
-admin_auth=(sys.argv[4], sys.argv[5])
+opencloud_auth = (username, password)
+admin_auth = (sys.argv[4], sys.argv[5])
 
-REST_API="http://%s:8000/xos/" % hostname
+REST_API = "http://%s:8000/xos/" % hostname
 USERS_API = REST_API + "users/"
 
-print "fetching user record for %s:" % username
+print("fetching user record for %s:" % username)
 r = requests.get(USERS_API + "?" + urlencode({"email": username}), auth=opencloud_auth)
 for user in r.json():
-    print "  ", user["email"]
+    print("  ", user["email"])
 
 myself = r.json()[0]
 
@@ -56,42 +58,42 @@
 else:
     myself["phone"] = "123"
 
-r = requests.put(USERS_API + str(myself["id"]) +"/", data=myself, auth=opencloud_auth)
+r = requests.put(USERS_API + str(myself["id"]) + "/", data=myself, auth=opencloud_auth)
 if r.status_code == 200:
-    print "I updated my phone to", myself["phone"]
+    print("I updated my phone to", myself["phone"])
 else:
-    print "I failed to update my phone"
+    print("I failed to update my phone")
 
-if myself["is_admin"] == True:
+if myself["is_admin"]:
     myself["is_admin"] = False
 else:
     myself["is_admin"] = True
 
-r = requests.put(USERS_API + str(myself["id"]) +"/", data=myself, auth=opencloud_auth)
+r = requests.put(USERS_API + str(myself["id"]) + "/", data=myself, auth=opencloud_auth)
 if r.status_code == 200:
-    print "I updated my is_admin to", myself["is_admin"]
+    print("I updated my is_admin to", myself["is_admin"])
 else:
-    print "I failed to update my is_admin"
+    print("I failed to update my is_admin")
 
 r = requests.get(USERS_API + "?email=jhh@cs.arizona.edu", auth=opencloud_auth)
-if len(r.json())>0:
-    print "I was able to read jhh@cs.arizona.edu"
+if len(r.json()) > 0:
+    print("I was able to read jhh@cs.arizona.edu")
 else:
-    print "I was not able to read jhh@cs.arizona.edu"
+    print("I was not able to read jhh@cs.arizona.edu")
 
 # get john's record using admin, so we can try to update it
 r = requests.get(USERS_API + "?email=jhh@cs.arizona.edu", auth=admin_auth)
-if len(r.json())>0:
-    print "Admin was able to read jhh@cs.arizona.edu"
+if len(r.json()) > 0:
+    print("Admin was able to read jhh@cs.arizona.edu")
     jhh = r.json()[0]
 else:
-    print "Admin was not able to read jhh@cs.arizona.edu"
+    print("Admin was not able to read jhh@cs.arizona.edu")
     jhh = None
 
 if jhh:
     # try to update john's user record
     r = requests.put(USERS_API + str(jhh["id"]) + "/", data=jhh, auth=opencloud_auth)
     if r.status_code == 200:
-        print "I was able to update user", str(jhh["id"])
+        print("I was able to update user", str(jhh["id"]))
     else:
-        print "I was not able to update user", str(jhh["id"])
+        print("I was not able to update user", str(jhh["id"]))
diff --git a/xos/tests/ui-e2e/xos-e2e-test.py b/xos/tests/ui-e2e/xos-e2e-test.py
index 4da52f0..aa0830c 100644
--- a/xos/tests/ui-e2e/xos-e2e-test.py
+++ b/xos/tests/ui-e2e/xos-e2e-test.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -27,12 +26,12 @@
 class XosUI(unittest.TestCase):
     """Test cases for XOS"""
 
-    url = 'http://127.0.0.1:8000/'
+    url = "http://127.0.0.1:8000/"
 
     def setUp(self):
-        if(hasattr(self, 'browser') and self.browser == 'firefox'):
+        if hasattr(self, "browser") and self.browser == "firefox":
             self.driver = webdriver.Firefox()
-        elif(hasattr(self, 'browser') and self.browser == 'chrome'):
+        elif hasattr(self, "browser") and self.browser == "chrome":
             self.driver = webdriver.Chrome()
         else:
             self.driver = webdriver.PhantomJS()
@@ -45,72 +44,88 @@
         # self.driver.close()
 
     def doLogin(self):
-        username = self.driver.find_element_by_css_selector('#id_username')
-        password = self.driver.find_element_by_css_selector('#id_password')
-        sign_in = self.driver.find_element_by_css_selector('.btn.btn-primary')
+        username = self.driver.find_element_by_css_selector("#id_username")
+        password = self.driver.find_element_by_css_selector("#id_password")
+        sign_in = self.driver.find_element_by_css_selector(".btn.btn-primary")
 
-        username.send_keys('padmin@vicci.org')
-        password.send_keys('letmein')
+        username.send_keys("padmin@vicci.org")
+        password.send_keys("letmein")
         sign_in.click()
 
     def goToPage(self, page):
-        self.driver.get(self.url + '/admin/core/' + page)
+        self.driver.get(self.url + "/admin/core/" + page)
 
     def listPageAssertion(self, page):
         self.doLogin()
         self.goToPage(page)
-        title = self.driver.find_element_by_css_selector('#content > h2').text
-        assert (title == 'Select %s to change' % page), 'Title is wrong!'
+        title = self.driver.find_element_by_css_selector("#content > h2").text
+        assert title == "Select %s to change" % page, "Title is wrong!"
 
-        add_button = self.driver.find_element_by_css_selector('.addlink.btn.btn-success')
+        add_button = self.driver.find_element_by_css_selector(
+            ".addlink.btn.btn-success"
+        )
         assert add_button
 
-        table = self.driver.find_element_by_css_selector('table')
-        rows = table.find_elements_by_css_selector('tbody tr')
-        assert (len(rows) == 1), 'Elements are not printed in table!'
+        table = self.driver.find_element_by_css_selector("table")
+        rows = table.find_elements_by_css_selector("tbody tr")
+        assert len(rows) == 1, "Elements are not printed in table!"
 
     def detailPageAssertion(self, page, expectedTabs):
         self.doLogin()
         self.goToPage(page)
         try:
-            detail_link = self.driver.find_element_by_css_selector('table tbody tr td > a')
-        except:
+            detail_link = self.driver.find_element_by_css_selector(
+                "table tbody tr td > a"
+            )
+        except BaseException:
             # the user template is different, it has just one th (with the link) in the table,
             # not sure why or how to fix it
-            detail_link = self.driver.find_element_by_css_selector('table tbody tr th > a')
+            detail_link = self.driver.find_element_by_css_selector(
+                "table tbody tr th > a"
+            )
         detail_link.click()
 
-        title = self.driver.find_element_by_css_selector('#content > h2').text
-        assert (title == 'Change %s' % page), 'Expected "%s" to be "%s"!' % (title, 'Change %s' % page)
+        title = self.driver.find_element_by_css_selector("#content > h2").text
+        assert title == "Change %s" % page, 'Expected "%s" to be "%s"!' % (
+            title,
+            "Change %s" % page,
+        )
 
-        tabs = self.driver.find_elements_by_css_selector('#suit_form_tabs > li')
-        assert (len(tabs) == expectedTabs), 'Found %s of %s expected tabs' % (len(tabs), expectedTabs)
+        tabs = self.driver.find_elements_by_css_selector("#suit_form_tabs > li")
+        assert len(tabs) == expectedTabs, "Found %s of %s expected tabs" % (
+            len(tabs),
+            expectedTabs,
+        )
 
-        activeTab = self.driver.find_element_by_css_selector('#suit_form_tabs > li.active')
-        assert (activeTab), 'No tab is active!'
+        activeTab = self.driver.find_element_by_css_selector(
+            "#suit_form_tabs > li.active"
+        )
+        assert activeTab, "No tab is active!"
 
-        saveBtn = self.driver.find_element_by_css_selector('.btn.btn-success')
-        assert saveBtn, 'Save button is missing'
+        saveBtn = self.driver.find_element_by_css_selector(".btn.btn-success")
+        assert saveBtn, "Save button is missing"
 
         continueBtn = self.driver.find_element_by_css_selector('[name="_continue"]')
-        assert continueBtn, 'Save and continue button is missing'
+        assert continueBtn, "Save and continue button is missing"
 
         addanotherBtn = self.driver.find_element_by_css_selector('[name="_addanother"]')
-        assert addanotherBtn, 'Save and continue button is missing'
+        assert addanotherBtn, "Save and continue button is missing"
 
-        deleteBtn = self.driver.find_element_by_css_selector('.btn.btn-danger')
-        assert deleteBtn, 'Delete button is missing'
+        deleteBtn = self.driver.find_element_by_css_selector(".btn.btn-danger")
+        assert deleteBtn, "Delete button is missing"
 
     def test_login_page(self):
         """
         Test that the login page has the login form
         """
 
-        login_container = self.driver.find_element_by_css_selector('body.login #content-main')
+        login_container = self.driver.find_element_by_css_selector(
+            "body.login #content-main"
+        )
         assert login_container
-        username = login_container.find_element_by_css_selector('#id_username')
-        password = login_container.find_element_by_css_selector('#id_password')
-        sign_in = login_container.find_element_by_css_selector('.btn.btn-primary')
+        username = login_container.find_element_by_css_selector("#id_username")
+        password = login_container.find_element_by_css_selector("#id_password")
+        sign_in = login_container.find_element_by_css_selector(".btn.btn-primary")
         assert username
         assert password
         assert sign_in
@@ -122,53 +137,54 @@
         self.doLogin()
 
         # if we have a sidebar the login has worked
-        sidebar = self.driver.find_element_by_css_selector('#sidebar-wrapper')
+        sidebar = self.driver.find_element_by_css_selector("#sidebar-wrapper")
         assert sidebar
 
     def test_deployment_list(self):
-        self.listPageAssertion('deployment')
+        self.listPageAssertion("deployment")
 
     def test_deployment_detail(self):
-        self.detailPageAssertion('deployment', 3)
+        self.detailPageAssertion("deployment", 3)
 
     def test_site_list(self):
-        self.listPageAssertion('site')
+        self.listPageAssertion("site")
 
     def test_site_detail(self):
-        self.detailPageAssertion('site', 6)
+        self.detailPageAssertion("site", 6)
 
     def test_slice_list(self):
-        self.listPageAssertion('slice')
+        self.listPageAssertion("slice")
 
     def test_slice_detail(self):
-        self.detailPageAssertion('slice', 6)
+        self.detailPageAssertion("slice", 6)
 
     def test_user_list(self):
-        self.listPageAssertion('user')
+        self.listPageAssertion("user")
 
     def test_user_detail(self):
-        self.detailPageAssertion('user', 5)
+        self.detailPageAssertion("user", 5)
 
     def test_service_list(self):
         self.doLogin()
-        self.driver.get(self.url + '/serviceGrid/')
+        self.driver.get(self.url + "/serviceGrid/")
         element = WebDriverWait(self.driver, 10).until(
-            EC.visibility_of_element_located((By.CSS_SELECTOR, 'xos-table table'))
+            EC.visibility_of_element_located((By.CSS_SELECTOR, "xos-table table"))
         )
-        serviceGrid = self.driver.find_element_by_css_selector('service-grid')
+        serviceGrid = self.driver.find_element_by_css_selector("service-grid")
         assert serviceGrid, "Service Grid not found"
 
-        serviceList = serviceGrid.find_elements_by_css_selector('tbody')
-        services = serviceList[1].find_elements_by_css_selector('tr')
-        assert (len(services) == 4), 'Found %s of %s expected tabs' % (len(services), 4)
+        serviceList = serviceGrid.find_elements_by_css_selector("tbody")
+        services = serviceList[1].find_elements_by_css_selector("tr")
+        assert len(services) == 4, "Found %s of %s expected tabs" % (len(services), 4)
 
-        addBtn = self.driver.find_element_by_css_selector('.btn.btn-success')
-        assert addBtn, 'Add button is missing'
+        addBtn = self.driver.find_element_by_css_selector(".btn.btn-success")
+        assert addBtn, "Add button is missing"
+
 
 if __name__ == "__main__":
     if len(sys.argv) > 1:
         XosUI.browser = sys.argv[1]
-        if(sys.argv[2]):
+        if sys.argv[2]:
             XosUI.url = sys.argv[2]
             del sys.argv[2]
         del sys.argv[1]
diff --git a/xos/tools/ansible_hosts.py b/xos/tools/ansible_hosts.py
index b0f297a..a9a18d6 100644
--- a/xos/tools/ansible_hosts.py
+++ b/xos/tools/ansible_hosts.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 
 #! /usr/bin/env python
 
+from __future__ import print_function
 import json
 import os
 import requests
@@ -25,13 +25,14 @@
 
 opencloud_auth = None
 
-REST_API="http://portal.opencloud.us/xos/"
+REST_API = "http://portal.opencloud.us/xos/"
 
 NODES_API = REST_API + "nodes/"
 SITES_API = REST_API + "sites/"
 SLICES_API = REST_API + "slices/"
 SLIVERS_API = REST_API + "instance/"
 
+
 def get_nodes_by_site():
     r = requests.get(SITES_API + "?no_hyperlinks=1", auth=opencloud_auth)
     sites_list = r.json()
@@ -49,6 +50,7 @@
 
     return sites
 
+
 """
    WIP
 
@@ -81,17 +83,18 @@
     return slices
 """
 
+
 def main():
     global opencloud_auth
 
-    if len(sys.argv)!=3:
-        print >> sys.stderr, "syntax: get_instance_name.py <username>, <password>"
+    if len(sys.argv) != 3:
+        print("syntax: get_instance_name.py <username>, <password>", file=sys.stderr)
         sys.exit(-1)
 
     username = sys.argv[1]
     password = sys.argv[2]
 
-    opencloud_auth=(username, password)
+    opencloud_auth = (username, password)
 
     sites = get_nodes_by_site()
 
@@ -99,17 +102,17 @@
         if not site["hostnames"]:
             continue
 
-        print "[%s]" % site["name"]
+        print("[%s]" % site["name"])
         for hostname in site["hostnames"]:
-            print hostname
-        print ""
+            print(hostname)
+        print("")
 
-    print "[all-opencloud:children]"
+    print("[all-opencloud:children]")
     for site in sites.values():
         if not site["hostnames"]:
             continue
-        print site["name"]
+        print(site["name"])
+
 
 if __name__ == "__main__":
     main()
-
diff --git a/xos/tools/cleanup_unique.py b/xos/tools/cleanup_unique.py
index 3f6d167..b4a887f 100644
--- a/xos/tools/cleanup_unique.py
+++ b/xos/tools/cleanup_unique.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,111 +13,127 @@
 # limitations under the License.
 
 
+from core.models import *
+import django
+from __future__ import print_function
 import os
 import sys
+
 sys.path.append("/opt/xos")
 os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
-import django
-from core.models import *
+
 django.setup()
 
 for obj in ControllerNetwork.deleted_objects.all():
-    print "Purging deleted object", obj
+    print("Purging deleted object", obj)
     obj.delete(purge=True)
 
 for obj in ControllerSite.deleted_objects.all():
-    print "Purging deleted object", obj
+    print("Purging deleted object", obj)
     obj.delete(purge=True)
 
 for obj in ControllerSlice.deleted_objects.all():
-    print "Purging deleted object", obj
+    print("Purging deleted object", obj)
     obj.delete(purge=True)
 
 for obj in NetworkSlice.deleted_objects.all():
-    print "Purging deleted object", obj
+    print("Purging deleted object", obj)
     obj.delete(purge=True)
 
 for obj in Port.deleted_objects.all():
-    print "Purging deleted object", obj
+    print("Purging deleted object", obj)
     obj.delete(purge=True)
 
 for obj in DeploymentPrivilege.deleted_objects.all():
-    print "Purging deleted object", obj
+    print("Purging deleted object", obj)
     obj.delete(purge=True)
 
 for obj in SiteDeployment.deleted_objects.all():
-    print "Purging deleted object", obj
+    print("Purging deleted object", obj)
     obj.delete(purge=True)
 
-seen=[]
+seen = []
 for obj in ControllerNetwork.objects.all():
-     seen.append(obj.id)
-     conflicts = ControllerNetwork.objects.filter(network=obj.network, controller=obj.controller)
-     for conflict in conflicts:
-         if conflict.id not in seen:
-             print "Purging", conflict, conflict.id, "due to duplicate of", obj.id
-             conflict.delete(purge=True)
+    seen.append(obj.id)
+    conflicts = ControllerNetwork.objects.filter(
+        network=obj.network, controller=obj.controller
+    )
+    for conflict in conflicts:
+        if conflict.id not in seen:
+            print("Purging", conflict, conflict.id, "due to duplicate of", obj.id)
+            conflict.delete(purge=True)
 
-seen=[]
+seen = []
 for obj in NetworkSlice.objects.all():
-     seen.append(obj.id)
-     conflicts = NetworkSlice.objects.filter(network=obj.network, slice=obj.slice)
-     for conflict in conflicts:
-         if conflict.id not in seen:
-             print "Purging", conflict, conflict.id, "due to duplicate of", obj.id
-             conflict.delete(purge=True)
+    seen.append(obj.id)
+    conflicts = NetworkSlice.objects.filter(network=obj.network, slice=obj.slice)
+    for conflict in conflicts:
+        if conflict.id not in seen:
+            print("Purging", conflict, conflict.id, "due to duplicate of", obj.id)
+            conflict.delete(purge=True)
 
-seen=[]
+seen = []
 for obj in Port.objects.all():
-     seen.append(obj.id)
-     conflicts = Port.objects.filter(network=obj.network, instance=obj.instanc)
-     for conflict in conflicts:
-         if conflict.id not in seen:
-             print "Purging", conflict, conflict.id, "due to duplicate of", obj.id
-             conflict.delete(purge=True)
+    seen.append(obj.id)
+    conflicts = Port.objects.filter(network=obj.network, instance=obj.instanc)
+    for conflict in conflicts:
+        if conflict.id not in seen:
+            print("Purging", conflict, conflict.id, "due to duplicate of", obj.id)
+            conflict.delete(purge=True)
 
-seen=[]
+seen = []
 for obj in DeploymentPrivilege.objects.all():
-     seen.append(obj.id)
-     conflicts = DeploymentPrivilege.objects.filter(user=obj.user, deployment=obj.deployment, role=obj.role)
-     for conflict in conflicts:
-         if conflict.id not in seen:
-             print "Purging", conflict, conflict.id, "due to duplicate of", obj.id
-             conflict.delete(purge=True)
+    seen.append(obj.id)
+    conflicts = DeploymentPrivilege.objects.filter(
+        user=obj.user, deployment=obj.deployment, role=obj.role
+    )
+    for conflict in conflicts:
+        if conflict.id not in seen:
+            print("Purging", conflict, conflict.id, "due to duplicate of", obj.id)
+            conflict.delete(purge=True)
 
-seen=[]
+seen = []
 for obj in Privilege.objects.all():
-     seen.append(obj.id)
-     conflicts = Privilege.objects.filter(accessor_id=obj.accessor_id, object_id=obj.object_id, permission=obj.permission, accessor_type=obj.accessor_type, object_type=obj.object_type)
-     for conflict in conflicts:
-         if conflict.id not in seen:
-             print "Purging", conflict, conflict.id, "due to duplicate of", obj.id
-             conflict.delete(purge=True)
+    seen.append(obj.id)
+    conflicts = Privilege.objects.filter(
+        accessor_id=obj.accessor_id,
+        object_id=obj.object_id,
+        permission=obj.permission,
+        accessor_type=obj.accessor_type,
+        object_type=obj.object_type,
+    )
+    for conflict in conflicts:
+        if conflict.id not in seen:
+            print("Purging", conflict, conflict.id, "due to duplicate of", obj.id)
+            conflict.delete(purge=True)
 
-seen=[]
+seen = []
 for obj in SiteDeployment.objects.all():
-     seen.append(obj.id)
-     conflicts = SiteDeployment.objects.filter(site=obj.site, deployment=obj.deployment, controller=obj.controller)
-     for conflict in conflicts:
-         if conflict.id not in seen:
-             print "Purging", conflict, conflict.id, "due to duplicate of", obj.id
-             conflict.delete(purge=True)
+    seen.append(obj.id)
+    conflicts = SiteDeployment.objects.filter(
+        site=obj.site, deployment=obj.deployment, controller=obj.controller
+    )
+    for conflict in conflicts:
+        if conflict.id not in seen:
+            print("Purging", conflict, conflict.id, "due to duplicate of", obj.id)
+            conflict.delete(purge=True)
 
-seen=[]
+seen = []
 for obj in ControllerSite.objects.all():
-     seen.append(obj.id)
-     conflicts = ControllerSite.objects.filter(site=obj.site, controller=obj.controller)
-     for conflict in conflicts:
-         if conflict.id not in seen:
-             print "Purging", conflict, conflict.id, "due to duplicate of", obj.id
-             conflict.delete(purge=True)
+    seen.append(obj.id)
+    conflicts = ControllerSite.objects.filter(site=obj.site, controller=obj.controller)
+    for conflict in conflicts:
+        if conflict.id not in seen:
+            print("Purging", conflict, conflict.id, "due to duplicate of", obj.id)
+            conflict.delete(purge=True)
 
-seen=[]
+seen = []
 for obj in ControllerSlice.objects.all():
-     seen.append(obj.id)
-     conflicts = ControllerSlice.objects.filter(slice=obj.slice, controller=obj.controller)
-     for conflict in conflicts:
-         if conflict.id not in seen:
-             print "Purging", conflict, conflict.id, "due to duplicate of", obj.id
-             conflict.delete(purge=True)
-
+    seen.append(obj.id)
+    conflicts = ControllerSlice.objects.filter(
+        slice=obj.slice, controller=obj.controller
+    )
+    for conflict in conflicts:
+        if conflict.id not in seen:
+            print("Purging", conflict, conflict.id, "due to duplicate of", obj.id)
+            conflict.delete(purge=True)
diff --git a/xos/tools/get_instance_ip.py b/xos/tools/get_instance_ip.py
index 0e91fc3..4cb49d2 100644
--- a/xos/tools/get_instance_ip.py
+++ b/xos/tools/get_instance_ip.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 
 #! /usr/bin/env python
 
+from __future__ import print_function
 import json
 import os
 import requests
@@ -24,24 +24,27 @@
 
 from operator import itemgetter, attrgetter
 
+
 def get_slice_id(slice_name):
     r = requests.get(SLICES_API + "?name=%s" % slice_name, auth=OPENCLOUD_AUTH)
-    if (r.status_code!=200):
-        print >> sys.stderr, "Error: Slice REST API failed"
+    if r.status_code != 200:
+        print("Error: Slice REST API failed", file=sys.stderr)
         sys.exit(-1)
     return r.json()[0]["id"]
 
+
 def get_node_id(host_name):
-     r = requests.get(NODES_API, auth=OPENCLOUD_AUTH)
-     if (r.status_code!=200):
-        print >> sys.stderr, "Error: Node REST API failed"
+    r = requests.get(NODES_API, auth=OPENCLOUD_AUTH)
+    if r.status_code != 200:
+        print("Error: Node REST API failed", file=sys.stderr)
         sys.exit(-1)
-     nodes = r.json()
-     for node in nodes:
-         if node["name"].lower() == host_name.lower():
-             return node["id"]
-     print >> sys.stderr, "Error: failed to find node %s" % host_name
-     sys.exit(-1)
+    nodes = r.json()
+    for node in nodes:
+        if node["name"].lower() == host_name.lower():
+            return node["id"]
+    print("Error: failed to find node %s" % host_name, file=sys.stderr)
+    sys.exit(-1)
+
 
 def get_instances(slice_id=None, node_id=None):
     queries = []
@@ -56,40 +59,89 @@
         query_string = ""
 
     r = requests.get(INSTANCES_API + query_string, auth=OPENCLOUD_AUTH)
-    if (r.status_code!=200):
-        print >> sys.stderr, "Error: Instance REST API failed"
+    if r.status_code != 200:
+        print("Error: Instance REST API failed", file=sys.stderr)
         sys.exit(-1)
     return r.json()
 
+
 def get_networks():
     r = requests.get(NETWORKS_API, auth=OPENCLOUD_AUTH)
-    if (r.status_code!=200):
-        print >> sys.stderr, "Error: Network REST API failed"
+    if r.status_code != 200:
+        print("Error: Network REST API failed", file=sys.stderr)
         sys.exit(-1)
     return r.json()
 
+
 def main():
     global OPENCLOUD_AUTH, REST_API, NODES_API, SLICES_API, INSTANCES_API, PORTS_API, NETWORKS_API
 
-    parser = OptionParser(usage="get_instance_ip.py [options] <rest_hostname> <rest_port>", )
+    parser = OptionParser(
+        usage="get_instance_ip.py [options] <rest_hostname> <rest_port>"
+    )
 
-    parser.add_option("-u", "--username", dest="username", help="XOS admin username", metavar="NAME", default="padmin@vicci.org")
-    parser.add_option("-p", "--password", dest="password", help="XOS admin password", metavar="PASSWORD", default="letmein")
-    parser.add_option("-n", "--node", dest="node", help="show instances on node", metavar="HOSTNAME", default=None)
-    parser.add_option("-s", "--slice", dest="slice", help="show instances in slice", metavar="SLICENAME", default=None)
-    parser.add_option("-N", "--network", dest="filter_network_name", help="filter network name", metavar="NAME", default=None)
-    parser.add_option("-b", "--brief", dest="brief", help="only display the IP, nothing else", action="store_true", default=False)
+    parser.add_option(
+        "-u",
+        "--username",
+        dest="username",
+        help="XOS admin username",
+        metavar="NAME",
+        default="padmin@vicci.org",
+    )
+    parser.add_option(
+        "-p",
+        "--password",
+        dest="password",
+        help="XOS admin password",
+        metavar="PASSWORD",
+        default="letmein",
+    )
+    parser.add_option(
+        "-n",
+        "--node",
+        dest="node",
+        help="show instances on node",
+        metavar="HOSTNAME",
+        default=None,
+    )
+    parser.add_option(
+        "-s",
+        "--slice",
+        dest="slice",
+        help="show instances in slice",
+        metavar="SLICENAME",
+        default=None,
+    )
+    parser.add_option(
+        "-N",
+        "--network",
+        dest="filter_network_name",
+        help="filter network name",
+        metavar="NAME",
+        default=None,
+    )
+    parser.add_option(
+        "-b",
+        "--brief",
+        dest="brief",
+        help="only display the IP, nothing else",
+        action="store_true",
+        default=False,
+    )
 
     (options, args) = parser.parse_args(sys.argv[1:])
 
-    if len(args)!=2:
-        print >> sys.stderr, "syntax: get_instance_name.py [options] <rest_hostname> <rest_port>"
+    if len(args) != 2:
+        print(
+            "syntax: get_instance_name.py [options] <rest_hostname> <rest_port>",
+            file=sys.stderr,
+        )
         sys.exit(-1)
 
     rest_hostname = args[0]
     rest_port = args[1]
 
-    REST_API="http://%s:%s/api/core/" % (rest_hostname, rest_port)
+    REST_API = "http://%s:%s/api/core/" % (rest_hostname, rest_port)
 
     NODES_API = REST_API + "nodes/"
     SLICES_API = REST_API + "slices/"
@@ -97,7 +149,7 @@
     PORTS_API = REST_API + "ports/"
     NETWORKS_API = REST_API + "networks/"
 
-    OPENCLOUD_AUTH=(options.username, options.password)
+    OPENCLOUD_AUTH = (options.username, options.password)
 
     if options.slice:
         slice_id = get_slice_id(options.slice)
@@ -118,21 +170,26 @@
     # get (instance_name, ip) pairs for instances with names and ips
 
     instances = [x for x in instances if x["instance_name"]]
-    instances = sorted(instances, key = lambda instance: instance["instance_name"])
+    instances = sorted(instances, key=lambda instance: instance["instance_name"])
 
     for instance in instances:
-        r = requests.get(PORTS_API + "?instance=%s&no_hyperlinks=1" % instance["id"], auth=OPENCLOUD_AUTH)
+        r = requests.get(
+            PORTS_API + "?instance=%s&no_hyperlinks=1" % instance["id"],
+            auth=OPENCLOUD_AUTH,
+        )
         ports = r.json()
 
         for x in ports:
-           net_name = networks_by_id.get(x["network"],{"name": "unknown"})["name"]
-           if (options.filter_network_name) and (net_name!=options.filter_network_name):
-              continue
-           if options.brief:
-                print x["ip"]
-           else:
-               print instance["instance_name"], net_name, x["ip"]
+            net_name = networks_by_id.get(x["network"], {"name": "unknown"})["name"]
+            if (options.filter_network_name) and (
+                net_name != options.filter_network_name
+            ):
+                continue
+            if options.brief:
+                print(x["ip"])
+            else:
+                print(instance["instance_name"], net_name, x["ip"])
+
 
 if __name__ == "__main__":
     main()
-
diff --git a/xos/tools/get_instance_name.py b/xos/tools/get_instance_name.py
index 8b0cf5c..3ae9850 100644
--- a/xos/tools/get_instance_name.py
+++ b/xos/tools/get_instance_name.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,31 +15,35 @@
 
 #! /usr/bin/env python
 
+from __future__ import print_function
 import json
 import os
 import requests
 import sys
 
-REST_API="http://alpha.opencloud.us:8000/xos/"
+REST_API = "http://alpha.opencloud.us:8000/xos/"
 
 NODES_API = REST_API + "nodes/"
 SLICES_API = REST_API + "slices/"
 SLIVERS_API = REST_API + "instances/"
 
-opencloud_auth=("demo@onlab.us", "demo")
+opencloud_auth = ("demo@onlab.us", "demo")
+
 
 def get_slice_id(slice_name):
     r = requests.get(SLICES_API + "?name=%s" % slice_name, auth=opencloud_auth)
     return r.json()[0]["id"]
 
+
 def get_node_id(host_name):
-     r = requests.get(NODES_API)
-     nodes = r.json()
-     for node in nodes:
-         if node["name"].lower() == host_name.lower():
-             return node["id"]
-     print >> sys.stderr, "Error: failed to find node %s" % host_name
-     sys.exit(-1)
+    r = requests.get(NODES_API)
+    nodes = r.json()
+    for node in nodes:
+        if node["name"].lower() == host_name.lower():
+            return node["id"]
+    print("Error: failed to find node %s" % host_name, file=sys.stderr)
+    sys.exit(-1)
+
 
 def get_instances(slice_id=None, node_id=None):
     queries = []
@@ -57,11 +60,15 @@
     r = requests.get(SLIVERS_API + query_string, auth=opencloud_auth)
     return r.json()
 
+
 def main():
     global opencloud_auth
 
-    if len(sys.argv)!=5:
-        print >> sys.stderr, "syntax: get_instance_name.py <username>, <password>, <hostname> <slicename>"
+    if len(sys.argv) != 5:
+        print(
+            "syntax: get_instance_name.py <username>, <password>, <hostname> <slicename>",
+            file=sys.stderr,
+        )
         sys.exit(-1)
 
     username = sys.argv[1]
@@ -69,7 +76,7 @@
     hostname = sys.argv[3]
     slice_name = sys.argv[4]
 
-    opencloud_auth=(username, password)
+    opencloud_auth = (username, password)
 
     slice_id = get_slice_id(slice_name)
     node_id = get_node_id(hostname)
@@ -79,8 +86,8 @@
 
     # return the last one in the list (i.e. the newest one)
 
-    print sorted(instance_names)[-1]
+    print(sorted(instance_names)[-1])
+
 
 if __name__ == "__main__":
     main()
-
diff --git a/xos/tools/openstack-healthcheck.py b/xos/tools/openstack-healthcheck.py
index dd7e40a..f4180d6 100644
--- a/xos/tools/openstack-healthcheck.py
+++ b/xos/tools/openstack-healthcheck.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -22,41 +21,60 @@
     then restart them.
 """
 
+from __future__ import print_function
 import os
 import sys
 import subprocess
 import time
 
+
 def get_systemd_status(service):
-    p=subprocess.Popen(["/bin/systemctl", "is-active", service], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    p = subprocess.Popen(
+        ["/bin/systemctl", "is-active", service],
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+    )
     (out, err) = p.communicate()
     out = out.strip()
     return out
 
-libvirt_enabled = os.system("systemctl -q is-enabled libvirtd.service")==0
-nova_compute_enabled = os.system("systemctl -q is-enabled openstack-nova-compute.service")==0
-openvswitch_agent_enabled = os.system("systemctl -q is-enabled quantum-openvswitch-agent.service")==0
 
-print "enabled:"
-print "  libvirtd=", libvirt_enabled
-print "  openstack-nova-compute=", nova_compute_enabled
-print "  quantum-openvswitch-agent=", openvswitch_agent_enabled
+libvirt_enabled = os.system("systemctl -q is-enabled libvirtd.service") == 0
+nova_compute_enabled = (
+    os.system("systemctl -q is-enabled openstack-nova-compute.service") == 0
+)
+openvswitch_agent_enabled = (
+    os.system("systemctl -q is-enabled quantum-openvswitch-agent.service") == 0
+)
 
-if (not libvirt_enabled) or (not nova_compute_enabled) or (not openvswitch_agent_enabled):
-    print "services are not enabled. exiting"
+print("enabled:")
+print("  libvirtd=", libvirt_enabled)
+print("  openstack-nova-compute=", nova_compute_enabled)
+print("  quantum-openvswitch-agent=", openvswitch_agent_enabled)
+
+if (
+    (not libvirt_enabled)
+    or (not nova_compute_enabled)
+    or (not openvswitch_agent_enabled)
+):
+    print("services are not enabled. exiting")
     sys.exit(0)
 
 libvirt_status = get_systemd_status("libvirtd.service")
 nova_compute_status = get_systemd_status("openstack-nova-compute.service")
 openvswitch_agent_status = get_systemd_status("quantum-openvswitch-agent.service")
 
-print "status:"
-print "  libvirtd=", libvirt_status
-print "  openstack-nova-compute=", nova_compute_status
-print "  quantum-openvswitch-agent=", openvswitch_agent_status
+print("status:")
+print("  libvirtd=", libvirt_status)
+print("  openstack-nova-compute=", nova_compute_status)
+print("  quantum-openvswitch-agent=", openvswitch_agent_status)
 
-if (libvirt_status=="failed") or (nova_compute_status=="failed") or (openvswitch_agent_status=="failed"):
-    print "services have failed. doing the big restart"
+if (
+    (libvirt_status == "failed")
+    or (nova_compute_status == "failed")
+    or (openvswitch_agent_status == "failed")
+):
+    print("services have failed. doing the big restart")
     os.system("systemctl stop openstack-nova-compute.service")
     os.system("systemctl stop quantum-openvswitch-agent.service")
     os.system("systemctl stop libvirtd.service")
@@ -66,8 +84,4 @@
     os.system("systemctl start quantum-openvswitch-agent.service")
     time.sleep(5)
     os.system("systemctl start openstack-nova-compute.service")
-    print "done"
-
-
-
-
+    print("done")
diff --git a/xos/tools/purge.py b/xos/tools/purge.py
index 797187d..333b7c6 100644
--- a/xos/tools/purge.py
+++ b/xos/tools/purge.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,17 +13,21 @@
 # limitations under the License.
 
 
+from core.models import *
+import django
 import os
 import sys
+
 sys.path.append("/opt/xos")
 os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
-import django
-from core.models import *
+
 django.setup()
 
+
 def purge(cls):
     for obj in cls.deleted_objects.all():
         obj.delete(purge=True)
 
+
 for model in [Instance, Slice, Site, Service, User, Image, ImageDeployments, Port]:
     purge(model)
diff --git a/xos/tools/rebuild.py b/xos/tools/rebuild.py
index 4377cd8..bb0939a 100755
--- a/xos/tools/rebuild.py
+++ b/xos/tools/rebuild.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,20 +13,22 @@
 # limitations under the License.
 
 
-#!/usr/bin/env python                                                                                                               
+#!/usr/bin/env python
 
+from core.models import XOS
+import django
+from __future__ import print_function
 import os
 import sys
+
 sys.path.append("/opt/xos")
 os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
-import django
-from core.models import XOS
+
 django.setup()
 
 xoses = XOS.objects.all()
 if not xoses:
-    print "There is no XOS model"
+    print("There is no XOS model")
 
 for xos in xoses:
     xos.rebuild()
-
diff --git a/xos/tools/xossh b/xos/tools/xossh
deleted file mode 100755
index afa033f..0000000
--- a/xos/tools/xossh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-# This is a stub for launching xossh in the xosproject/xos-client container.
-# It's assumed that this script is being invoked on the head node. 
-
-docker pull docker-registry:5000/xosproject/xos-client:candidate
-docker run --rm -it -v /opt/cord_profile/im_cert_chain.pem:/usr/local/share/ca-certificates/local_certs.crt docker-registry:5000/xosproject/xos-client:candidate -u xosadmin@opencord.org -p `cat /opt/credentials/xosadmin@opencord.org`
\ No newline at end of file
diff --git a/xos/tosca/__init__.py b/xos/tosca/__init__.py
index 65fdf5b..b0fb0b2 100644
--- a/xos/tosca/__init__.py
+++ b/xos/tosca/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,6 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
-
diff --git a/xos/xos/__init__.py b/xos/xos/__init__.py
index d4e8062..b0fb0b2 100644
--- a/xos/xos/__init__.py
+++ b/xos/xos/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/xos/xos/admin_customize/__init__.py b/xos/xos/admin_customize/__init__.py
index 65fdf5b..b0fb0b2 100644
--- a/xos/xos/admin_customize/__init__.py
+++ b/xos/xos/admin_customize/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,6 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
-
diff --git a/xos/xos/admin_customize/templatetags/__init__.py b/xos/xos/admin_customize/templatetags/__init__.py
index 65fdf5b..b0fb0b2 100644
--- a/xos/xos/admin_customize/templatetags/__init__.py
+++ b/xos/xos/admin_customize/templatetags/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,6 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
-
diff --git a/xos/xos/admin_customize/templatetags/admin_modify.py b/xos/xos/admin_customize/templatetags/admin_modify.py
index 45f54c5..f685cc6 100644
--- a/xos/xos/admin_customize/templatetags/admin_modify.py
+++ b/xos/xos/admin_customize/templatetags/admin_modify.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,27 +14,42 @@
 
 
 from django.contrib.admin.templatetags.admin_modify import *
-from django.contrib.admin.templatetags.admin_modify import submit_row as original_submit_row
+from django.contrib.admin.templatetags.admin_modify import (
+    submit_row as original_submit_row,
+)
 from django.conf import settings
 import random
-@register.inclusion_tag('admin/submit_line.html', takes_context=True)
+
+
+@register.inclusion_tag("admin/submit_line.html", takes_context=True)
 def submit_row(context):
     ctx = original_submit_row(context)
-    ctx.update({
-        'show_save': context.get('show_save', ctx['show_save']),
-        'show_save_and_add_another': context.get('show_save_and_add_another', ctx['show_save_and_add_another']),
-        'show_save_and_continue': context.get('show_save_and_continue', ctx['show_save_and_continue']),
-        'custom_delete_url': context.get("custom_delete_url",None),
-        })                                                                  
-    return ctx 
-
+    ctx.update(
+        {
+            "show_save": context.get("show_save", ctx["show_save"]),
+            "show_save_and_add_another": context.get(
+                "show_save_and_add_another", ctx["show_save_and_add_another"]
+            ),
+            "show_save_and_continue": context.get(
+                "show_save_and_continue", ctx["show_save_and_continue"]
+            ),
+            "custom_delete_url": context.get("custom_delete_url", None),
+        }
+    )
+    return ctx
 
 
 @register.simple_tag
 def random_str(a):
-    a = ["You are now signed out. Thank you and have a great day",
-         "Thanks for spending some quality time with the Web site today.",
-         "Thanks for using " +settings.XOS_BRANDING_NAME + " to manage your network today.",
-         "You have successfully logged out, Thanks for spending some quality time",
-         "The "+settings.XOS_BRANDING_NAME +" team is glad that you used our product to get your work done."]
-    return a[random.randint(0,4)]
+    a = [
+        "You are now signed out. Thank you and have a great day",
+        "Thanks for spending some quality time with the Web site today.",
+        "Thanks for using "
+        + settings.XOS_BRANDING_NAME
+        + " to manage your network today.",
+        "You have successfully logged out, Thanks for spending some quality time",
+        "The "
+        + settings.XOS_BRANDING_NAME
+        + " team is glad that you used our product to get your work done.",
+    ]
+    return a[random.randint(0, 4)]
diff --git a/xos/xos/apibase.py b/xos/xos/apibase.py
index 44fb6bd..c5344e4 100644
--- a/xos/xos/apibase.py
+++ b/xos/xos/apibase.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -24,26 +23,29 @@
 from django.core.exceptions import ValidationError as DjangoValidationError
 from xos.exceptions import *
 
+
 class XOSRetrieveUpdateDestroyAPIView(generics.RetrieveUpdateDestroyAPIView):
 
     # To handle fine-grained field permissions, we have to check can_update
     # the object has been updated but before it has been saved.
 
     def update(self, request, *args, **kwargs):
-        partial = kwargs.pop('partial', False)
+        partial = kwargs.pop("partial", False)
         self.object = self.get_object()
 
         if self.object is None:
             raise XOSProgrammingError("Use the List API for creating objects")
 
-        serializer = self.get_serializer(self.object, data=request.data, partial=partial)
+        serializer = self.get_serializer(
+            self.object, data=request.data, partial=partial
+        )
 
         if not serializer.is_valid():
-            raise XOSValidationError('Invalid serializer', fields=serializer._errors)
+            raise XOSValidationError("Invalid serializer", fields=serializer._errors)
 
         # Do the XOS perm check
 
-        assert(serializer.instance is not None)
+        assert serializer.instance is not None
         obj = serializer.instance
         for attr, value in serializer.validated_data.items():
             setattr(obj, attr, value)
@@ -63,12 +65,22 @@
         # REST API drops the string attached to Django's PermissionDenied
         # exception, and replaces it with a generic "Permission Denied"
         if isinstance(exc, DjangoPermissionDenied):
-            response=Response({'detail': {"error": "PermissionDenied", "specific_error": str(exc), "fields": {}}}, status=status.HTTP_403_FORBIDDEN)
-            response.exception=True
+            response = Response(
+                {
+                    "detail": {
+                        "error": "PermissionDenied",
+                        "specific_error": str(exc),
+                        "fields": {},
+                    }
+                },
+                status=status.HTTP_403_FORBIDDEN,
+            )
+            response.exception = True
             return response
         else:
             return super(XOSRetrieveUpdateDestroyAPIView, self).handle_exception(exc)
 
+
 class XOSListCreateAPIView(generics.ListCreateAPIView):
     def create(self, request, *args, **kwargs):
         serializer = self.get_serializer(data=request.data)
@@ -76,7 +88,7 @@
         # In rest_framework 3.x: we can pass raise_exception=True instead of
         # raising the exception ourselves
         if not serializer.is_valid():
-            raise XOSValidationError('Invalid serializer', fields=serializer._errors)
+            raise XOSValidationError("Invalid serializer", fields=serializer._errors)
 
         # now do XOS can_update permission checking
         obj = serializer.Meta.model(**serializer.validated_data)
@@ -84,16 +96,25 @@
         self.perform_create(serializer)
 
         headers = self.get_success_headers(serializer.data)
-        return Response(serializer.data, status=status.HTTP_201_CREATED,
-                        headers=headers)
+        return Response(
+            serializer.data, status=status.HTTP_201_CREATED, headers=headers
+        )
 
     def handle_exception(self, exc):
         # REST API drops the string attached to Django's PermissionDenied
         # exception, and replaces it with a generic "Permission Denied"
         if isinstance(exc, DjangoPermissionDenied):
-            response=Response({'detail': {"error": "PermissionDenied", "specific_error": str(exc), "fields": {}}}, status=status.HTTP_403_FORBIDDEN)
-            response.exception=True
+            response = Response(
+                {
+                    "detail": {
+                        "error": "PermissionDenied",
+                        "specific_error": str(exc),
+                        "fields": {},
+                    }
+                },
+                status=status.HTTP_403_FORBIDDEN,
+            )
+            response.exception = True
             return response
         else:
             return super(XOSListCreateAPIView, self).handle_exception(exc)
-
diff --git a/xos/xos/exceptions.py b/xos/xos/exceptions.py
index aac2f4e..1033138 100644
--- a/xos/xos/exceptions.py
+++ b/xos/xos/exceptions.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,136 +17,149 @@
 from rest_framework.exceptions import APIException
 from rest_framework.exceptions import PermissionDenied as RestFrameworkPermissionDenied
 
+
 def _get_json_error_details(data):
     """
     Convert error details to JSON
     """
     if isinstance(data, dict):
-        ret = {
-            key: value for key, value in data.items()
-        }
+        ret = {key: value for key, value in data.items()}
     elif isinstance(data, list):
-        ret = [
-            item for item in data
-        ]
+        ret = [item for item in data]
 
     return json.dumps(ret)
 
 
 class XOSProgrammingError(APIException):
-    status_code=400
+    status_code = 400
+
     def __init__(self, why="programming error", fields={}):
         raw_detail = {
             "error": "XOSProgrammingError",
             "specific_error": why,
-            "fields": fields
+            "fields": fields,
         }
         APIException.__init__(self, raw_detail)
         self.raw_detail = raw_detail
         self.json_detail = _get_json_error_details(raw_detail)
 
+
 class XOSPermissionDenied(RestFrameworkPermissionDenied):
     def __init__(self, why="permission error", fields={}):
         raw_detail = {
             "error": "XOSPermissionDenied",
             "specific_error": why,
-            "fields": fields
+            "fields": fields,
         }
         APIException.__init__(self, raw_detail)
         self.raw_detail = raw_detail
         self.json_detail = _get_json_error_details(raw_detail)
 
+
 class XOSNotAuthenticated(RestFrameworkPermissionDenied):
-    status_code=401
+    status_code = 401
+
     def __init__(self, why="you must be authenticated to use this api", fields={}):
         raw_detail = {
             "error": "XOSNotAuthenticated",
             "specific_error": why,
-            "fields": fields
+            "fields": fields,
         }
         APIException.__init__(self, raw_detail)
         self.raw_detail = raw_detail
         self.json_detail = _get_json_error_details(raw_detail)
 
+
 class XOSNotFound(RestFrameworkPermissionDenied):
-    status_code=404
+    status_code = 404
+
     def __init__(self, why="object not found", fields={}):
-        raw_detail = {
-            "error": "XOSNotFound",
-            "specific_error": why,
-            "fields": fields
-        }
+        raw_detail = {"error": "XOSNotFound", "specific_error": why, "fields": fields}
         APIException.__init__(self, raw_detail)
         self.raw_detail = raw_detail
         self.json_detail = _get_json_error_details(raw_detail)
 
+
 class XOSValidationError(APIException):
-    status_code=403
+    status_code = 403
+
     def __init__(self, why="validation error", fields={}):
         raw_detail = {
             "error": "XOSValidationError",
             "specific_error": why,
-            "fields": fields
+            "fields": fields,
         }
         APIException.__init__(self, raw_detail)
         self.raw_detail = raw_detail
         self.json_detail = _get_json_error_details(raw_detail)
 
+
 class XOSDuplicateKey(APIException):
-    status_code=400
+    status_code = 400
+
     def __init__(self, why="duplicate key", fields={}):
         raw_detail = {
             "error": "XOSDuplicateKey",
             "specific_error": why,
-            "fields": fields
+            "fields": fields,
         }
         APIException.__init__(self, raw_detail)
         self.raw_detail = raw_detail
         self.json_detail = _get_json_error_details(raw_detail)
 
+
 class XOSMissingField(APIException):
-    status_code=400
+    status_code = 400
+
     def __init__(self, why="missing field", fields={}):
         raw_detail = {
             "error": "XOSMissingField",
             "specific_error": why,
-            "fields": fields
+            "fields": fields,
         }
         APIException.__init__(self, raw_detail)
         self.raw_detail = raw_detail
         self.json_detail = _get_json_error_details(raw_detail)
 
+
 class XOSConfigurationError(APIException):
-    status_code=400
+    status_code = 400
+
     def __init__(self, why="configuration error", fields={}):
         raw_detail = {
             "error": "XOSConfigurationError",
             "specific_error": why,
-            "fields": fields
+            "fields": fields,
         }
         APIException.__init__(self, raw_detail)
         self.raw_detail = raw_detail
         self.json_detail = _get_json_error_details(raw_detail)
 
+
 class XOSConflictingField(APIException):
-    status_code=400
+    status_code = 400
+
     def __init__(self, why="conflicting field", fields={}):
         raw_detail = {
             "error": "XOSMissingField",
             "specific_error": why,
-            "fields": fields
+            "fields": fields,
         }
         APIException.__init__(self, raw_detail)
         self.raw_detail = raw_detail
         self.json_detail = _get_json_error_details(raw_detail)
 
+
 class XOSServiceUnavailable(APIException):
-    status_code=503
-    def __init__(self, why="Service temporarily unavailable, try again later", fields={}):
+    status_code = 503
+
+    def __init__(
+        self, why="Service temporarily unavailable, try again later", fields={}
+    ):
         raw_detail = {
             "error": "XOSServiceUnavailable",
             "specific_error": why,
-            "fields": fields
+            "fields": fields,
         }
         APIException.__init__(self, raw_detail)
         self.raw_detail = raw_detail
diff --git a/xos/xos/exceptions_test.py b/xos/xos/exceptions_test.py
index ec00646..c52aa7d 100644
--- a/xos/xos/exceptions_test.py
+++ b/xos/xos/exceptions_test.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,23 +13,26 @@
 # limitations under the License.
 
 
+from xos.exceptions import *
+import xos.exceptions
 import unittest
 import sys
 import os
 import inspect
 import json
-sys.path.append(os.path.abspath('..'))
+
+sys.path.append(os.path.abspath(".."))
 os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
-import xos.exceptions
-from xos.exceptions import *
+
 
 class TestXosExceptions(unittest.TestCase):
     """
     Test the conversion from excenption to json
     """
+
     def test_get_json_error_details(self):
-        res = xos.exceptions._get_json_error_details({'foo': 'bar'})
-        assert res == json.dumps({"foo":"bar"})
+        res = xos.exceptions._get_json_error_details({"foo": "bar"})
+        assert res == json.dumps({"foo": "bar"})
 
     def test_exceptions(self):
         """
@@ -38,11 +40,17 @@
          validate the json_detail output
         """
         for name, item in inspect.getmembers(xos.exceptions):
-            if inspect.isclass(item) and name.startswith('XOS'):
-                e = item('test error', {'foo': 'bar'})
+            if inspect.isclass(item) and name.startswith("XOS"):
+                e = item("test error", {"foo": "bar"})
                 res = e.json_detail
                 assert res == json.dumps(
-                    {"fields": {"foo": "bar"}, "specific_error": "test error", "error": name})
+                    {
+                        "fields": {"foo": "bar"},
+                        "specific_error": "test error",
+                        "error": name,
+                    }
+                )
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/xos/xos/logger.py b/xos/xos/logger.py
index e7cf2cf..8fc96fc 100644
--- a/xos/xos/logger.py
+++ b/xos/xos/logger.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -39,6 +38,7 @@
 # IN THE WORK.
 # ----------------------------------------------------------------------
 
+from __future__ import print_function
 import os
 import sys
 import traceback
@@ -62,22 +62,23 @@
 
         # Logstash config
         try:
-            logstash_host, logstash_port = 'cordloghost', '5617'
+            logstash_host, logstash_port = "cordloghost", "5617"
             logstash_handler = logstash.LogstashHandler(
-                logstash_host, int(logstash_port), version=1)
+                logstash_host, int(logstash_port), version=1
+            )
             # always log at DEBUG level to logstash
             logstash_handler.setLevel(logging.DEBUG)
             raise Exception("Disabled")
-        except:
+        except BaseException:
             # if connection fails (eg: logstash is not there) just move on
             logstash_handler = None
 
         # default is to locate loggername from the logfile if avail.
         if not logfile:
-            logfile = '/var/log/xos_legacy.log'
+            logfile = "/var/log/xos_legacy.log"
 
         # allow config-file override of console/logfile level
-        level_str = 'info'
+        level_str = "info"
         if level_str:
             level_str = level_str.lower()
 
@@ -90,7 +91,7 @@
         elif level_str == "error":
             level = logging.ERROR
 
-        if (logfile == "console"):
+        if logfile == "console":
             loggername = "console"
             handler = logging.StreamHandler()
 
@@ -99,27 +100,32 @@
                 loggername = os.path.basename(logfile)
             try:
                 handler = logging.handlers.RotatingFileHandler(
-                    logfile, maxBytes=1000000, backupCount=5)
+                    logfile, maxBytes=1000000, backupCount=5
+                )
             except IOError:
                 # This is usually a permissions error becaue the file is
                 # owned by root, but httpd is trying to access it.
-                tmplogfile = os.getenv(
-                    "TMPDIR", "/tmp") + os.path.sep + os.path.basename(logfile)
+                tmplogfile = (
+                    os.getenv("TMPDIR", "/tmp")
+                    + os.path.sep
+                    + os.path.basename(logfile)
+                )
                 # In strange uses, 2 users on same machine might use same code,
                 # meaning they would clobber each others files
                 # We could (a) rename the tmplogfile, or (b)
                 # just log to the console in that case.
                 # Here we default to the console.
-                if os.path.exists(tmplogfile) and not os.access(
-                        tmplogfile, os.W_OK):
+                if os.path.exists(tmplogfile) and not os.access(tmplogfile, os.W_OK):
                     loggername = loggername + "-console"
                     handler = logging.StreamHandler()
                 else:
                     handler = logging.handlers.RotatingFileHandler(
-                        tmplogfile, maxBytes=1000000, backupCount=5)
+                        tmplogfile, maxBytes=1000000, backupCount=5
+                    )
 
-        handler.setFormatter(logging.Formatter(
-            "%(asctime)s - %(levelname)s - %(message)s"))
+        handler.setFormatter(
+            logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
+        )
         self.logger = logging.getLogger(loggername)
         self.logger.setLevel(level)
 
@@ -127,7 +133,7 @@
         if not len(self.logger.handlers):
             self.logger.addHandler(handler)
 
-            if (logstash_handler):
+            if logstash_handler:
                 self.logger.addHandler(logstash_handler)
 
         self.loggername = loggername
@@ -153,6 +159,7 @@
             self.logger.setLevel(logging.INFO)
         elif verbose >= 2:
             self.logger.setLevel(logging.DEBUG)
+
     # in case some other code needs a boolean
 
     def getBoolVerboseFromOpt(self, verbose):
@@ -166,8 +173,8 @@
     def extract_context(self, cur):
         try:
             observer_name = Config.get("name")
-            cur['synchronizer_name'] = observer_name
-        except:
+            cur["synchronizer_name"] = observer_name
+        except BaseException:
             pass
 
         self.sanitize_extra_args(cur)
@@ -175,12 +182,12 @@
 
     def sanitize_extra_args(self, extra):
         illegal_keys = logging.LogRecord(
-            None, None, None, None, None, None, None, None).__dict__.keys()
+            None, None, None, None, None, None, None, None
+        ).__dict__.keys()
         for k in illegal_keys:
             try:
                 del extra[k]
-                self.logger.warn(
-                    "*** WARNING: Dropped field %s from extra args ***")
+                self.logger.warn("*** WARNING: Dropped field %s from extra args ***")
             except KeyError:
                 pass
 
@@ -213,21 +220,17 @@
     def log_exc(self, message, extra={}):
         extra = self.extract_context(extra)
         self.error(
-            "%s BEG TRACEBACK" %
-            message +
-            "\n" +
-            traceback.format_exc().strip("\n"),
-            extra=extra)
+            "%s BEG TRACEBACK" % message + "\n" + traceback.format_exc().strip("\n"),
+            extra=extra,
+        )
         self.error("%s END TRACEBACK" % message, extra=extra)
 
     def log_exc_critical(self, message, extra={}):
         extra = self.extract_context(extra)
         self.critical(
-            "%s BEG TRACEBACK" %
-            message +
-            "\n" +
-            traceback.format_exc().strip("\n"),
-            extra=extra)
+            "%s BEG TRACEBACK" % message + "\n" + traceback.format_exc().strip("\n"),
+            extra=extra,
+        )
         self.critical("%s END TRACEBACK" % message, extra=extra)
 
     # for investigation purposes, can be placed anywhere
@@ -244,16 +247,15 @@
         self.logger.addHandler(handler)
 
 
-info_logger = Logger(loggername='info', level=logging.INFO)
-debug_logger = Logger(loggername='debug', level=logging.DEBUG)
-warn_logger = Logger(loggername='warning', level=logging.WARNING)
-error_logger = Logger(loggername='error', level=logging.ERROR)
-critical_logger = Logger(loggername='critical', level=logging.CRITICAL)
+info_logger = Logger(loggername="info", level=logging.INFO)
+debug_logger = Logger(loggername="debug", level=logging.DEBUG)
+warn_logger = Logger(loggername="warning", level=logging.WARNING)
+error_logger = Logger(loggername="error", level=logging.ERROR)
+critical_logger = Logger(loggername="critical", level=logging.CRITICAL)
 logger = info_logger
 observer_logger = Logger(
-    logfile='/var/log/observer.log',
-    loggername='observer',
-    level=logging.DEBUG)
+    logfile="/var/log/observer.log", loggername="observer", level=logging.DEBUG
+)
 ########################################
 
 
@@ -265,32 +267,41 @@
     def foo(...):
         ...
     """
+
     def logger_profile(callable):
         def wrapper(*args, **kwds):
             start = time.time()
             result = callable(*args, **kwds)
             end = time.time()
             args = map(str, args)
-            args += ["%s = %s" % (name, str(value))
-                     for (name, value) in kwds.iteritems()]
+            args += [
+                "%s = %s" % (name, str(value)) for (name, value) in kwds.iteritems()
+            ]
             # should probably use debug, but then debug is not always enabled
-            logger.info("PROFILED %s (%s): %.02f s" %
-                        (callable.__name__, ", ".join(args), end - start))
+            logger.info(
+                "PROFILED %s (%s): %.02f s"
+                % (callable.__name__, ", ".join(args), end - start)
+            )
             return result
+
         return wrapper
+
     return logger_profile
 
 
-if __name__ == '__main__':
-    print 'testing logging into logger.log'
-    logger1 = Logger('logger.log', loggername='std(info)')
-    logger2 = Logger('logger.log', loggername='error', level=logging.ERROR)
-    logger3 = Logger('logger.log', loggername='debug', level=logging.DEBUG)
+if __name__ == "__main__":
+    print("testing logging into logger.log")
+    logger1 = Logger("logger.log", loggername="std(info)")
+    logger2 = Logger("logger.log", loggername="error", level=logging.ERROR)
+    logger3 = Logger("logger.log", loggername="debug", level=logging.DEBUG)
 
-    for (logger, msg) in [(logger1, "std(info)"),
-                          (logger2, "error"), (logger3, "debug")]:
+    for (logger, msg) in [
+        (logger1, "std(info)"),
+        (logger2, "error"),
+        (logger3, "debug"),
+    ]:
 
-        print "====================", msg, logger.logger.handlers
+        print("====================", msg, logger.logger.handlers)
 
         logger.enable_console()
         logger.critical("logger.critical")
@@ -305,7 +316,7 @@
         def sleep(seconds=1):
             time.sleep(seconds)
 
-        logger.info('console.info')
+        logger.info("console.info")
         sleep(0.5)
         logger.setLevel(logging.DEBUG)
         sleep(0.25)
diff --git a/xos/xos/settings.py b/xos/xos/settings.py
index 57ffec1..d8e4314 100644
--- a/xos/xos/settings.py
+++ b/xos/xos/settings.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -29,7 +28,7 @@
     Config.init()
 
 GEOIP_PATH = "/usr/share/GeoIP"
-XOS_DIR = Config.get('xos_dir')
+XOS_DIR = Config.get("xos_dir")
 
 DEBUG = True
 TEMPLATE_DEBUG = DEBUG
@@ -42,22 +41,22 @@
 )
 
 # LOGIN_REDIRECT_URL = '/admin/core/user'
-LOGIN_REDIRECT_URL = '/admin/loggedin/'
+LOGIN_REDIRECT_URL = "/admin/loggedin/"
 
 MANAGERS = ADMINS
 
 DATABASES = {
-    'default': {
-        'ENGINE': 'django.db.backends.postgresql_psycopg2',
-        'NAME': Config.get('database.name'),
-        'USER': Config.get('database.username'),
-        'PASSWORD': Config.get('database.password'),
-        'HOST': "xos-db",
-        'PORT': 5432,
+    "default": {
+        "ENGINE": "django.db.backends.postgresql_psycopg2",
+        "NAME": Config.get("database.name"),
+        "USER": Config.get("database.username"),
+        "PASSWORD": Config.get("database.password"),
+        "HOST": "xos-db",
+        "PORT": 5432,
     }
 }
 
-AUTH_USER_MODEL = 'core.User'
+AUTH_USER_MODEL = "core.User"
 
 # Hosts/domain names that are valid for this site; required if DEBUG is False
 # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
@@ -67,17 +66,20 @@
 # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
 # although not all choices may be available on all operating systems.
 # In a Windows environment this must be set to your system time zone.
-TIME_ZONE = 'America/New_York'
+TIME_ZONE = "America/New_York"
 
 # Verbose warnings when a naive datetime is used, gives a traceback
 # from: https://docs.djangoproject.com/en/1.9/topics/i18n/timezones/#code
 warnings.filterwarnings(
-        'error', r"DateTimeField .* received a naive datetime",
-        RuntimeWarning, r'django\.db\.models\.fields')
+    "error",
+    r"DateTimeField .* received a naive datetime",
+    RuntimeWarning,
+    r"django\.db\.models\.fields",
+)
 
 # Language code for this installation. All choices can be found here:
 # http://www.i18nguy.com/unicode/language-identifiers.html
-LANGUAGE_CODE = 'en-us'
+LANGUAGE_CODE = "en-us"
 
 SITE_ID = 1
 
@@ -94,83 +96,81 @@
 
 # Absolute filesystem path to the directory that will hold user-uploaded files.
 # Example: "/var/www/example.com/media/"
-MEDIA_ROOT = '/var/www/html/files/'
+MEDIA_ROOT = "/var/www/html/files/"
 
 # URL that handles the media served from MEDIA_ROOT. Make sure to use a
 # trailing slash.
 # Examples: "http://example.com/media/", "http://media.example.com/"
-MEDIA_URL = '/files/'
+MEDIA_URL = "/files/"
 
 # Absolute path to the directory static files should be collected to.
 # Don't put anything in this directory yourself; store your static files
 # in apps' "static/" subdirectories and in STATICFILES_DIRS.
 # Example: "/var/www/example.com/static/"
-STATIC_ROOT = ''
+STATIC_ROOT = ""
 
 # URL prefix for static files.
 # Example: "http://example.com/static/", "http://static.example.com/"
-STATIC_URL = '/static/'
+STATIC_URL = "/static/"
 
 # Additional locations of static files
-STATICFILES_DIRS = ( XOS_DIR + "/core/static/",
-                     XOS_DIR + "/core/xoslib/static/",
-)
+STATICFILES_DIRS = (XOS_DIR + "/core/static/", XOS_DIR + "/core/xoslib/static/")
 
 # List of finder classes that know how to find static files in
 # various locations.
 STATICFILES_FINDERS = (
-    'django.contrib.staticfiles.finders.FileSystemFinder',
-    'django.contrib.staticfiles.finders.AppDirectoriesFinder',
-#    'django.contrib.staticfiles.finders.DefaultStorageFinder',
+    "django.contrib.staticfiles.finders.FileSystemFinder",
+    "django.contrib.staticfiles.finders.AppDirectoriesFinder",
+    #    'django.contrib.staticfiles.finders.DefaultStorageFinder',
 )
 
 # Make this unique, and don't share it with anybody.
-SECRET_KEY = 'i0=a)c7_#2)5m%k_fu#%53xap$tlqc+#&z5as+bl7&)(@be_f9'
+SECRET_KEY = "i0=a)c7_#2)5m%k_fu#%53xap$tlqc+#&z5as+bl7&)(@be_f9"
 
 # List of callables that know how to import templates from various sources.
 TEMPLATE_LOADERS = (
-    'django.template.loaders.filesystem.Loader',
-    'django.template.loaders.app_directories.Loader',
-#     'django.template.loaders.eggs.Loader',
+    "django.template.loaders.filesystem.Loader",
+    "django.template.loaders.app_directories.Loader",
+    #     'django.template.loaders.eggs.Loader',
 )
 
 MIDDLEWARE_CLASSES = (
-    'django.middleware.common.CommonMiddleware',
-    'django.contrib.sessions.middleware.SessionMiddleware',
-    'django.middleware.csrf.CsrfViewMiddleware',
-    'django.contrib.auth.middleware.AuthenticationMiddleware',
-    'django.contrib.messages.middleware.MessageMiddleware',
-    'core.middleware.GlobalRequestMiddleware',
+    "django.middleware.common.CommonMiddleware",
+    "django.contrib.sessions.middleware.SessionMiddleware",
+    "django.middleware.csrf.CsrfViewMiddleware",
+    "django.contrib.auth.middleware.AuthenticationMiddleware",
+    "django.contrib.messages.middleware.MessageMiddleware",
+    "core.middleware.GlobalRequestMiddleware",
     # Uncomment the next line for simple clickjacking protection:
     # 'django.middleware.clickjacking.XFrameOptionsMiddleware',
 )
 
-ROOT_URLCONF = 'xos.urls'
+ROOT_URLCONF = "xos.urls"
 
 # Python dotted path to the WSGI application used by Django's runserver.
-WSGI_APPLICATION = 'xos.wsgi.application'
+WSGI_APPLICATION = "xos.wsgi.application"
 # Default: 'csrftoken'
-CSRF_COOKIE_NAME = 'xoscsrftoken'
+CSRF_COOKIE_NAME = "xoscsrftoken"
 # Default: 'django_language'
-LANGUAGE_COOKIE_NAME = 'xos_django_language'
+LANGUAGE_COOKIE_NAME = "xos_django_language"
 # Default: 'sessionid'
-SESSION_COOKIE_NAME = 'xossessionid'
+SESSION_COOKIE_NAME = "xossessionid"
 
 TEMPLATE_DIRS = (
     # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
     # Always use forward slashes, even on Windows.
     # Don't forget to use absolute paths, not relative paths.
     XOS_DIR + "/templates",
-#    XOS_DIR + "/core/xoslib/templates",
+    #    XOS_DIR + "/core/xoslib/templates",
 )
 
 INSTALLED_APPS = (
-    'django.contrib.auth',
-    'django.contrib.contenttypes',
-    'django.contrib.sessions',
-    'django.contrib.messages',
-    'django_extensions',
-    'core',
+    "django.contrib.auth",
+    "django.contrib.contenttypes",
+    "django.contrib.sessions",
+    "django.contrib.messages",
+    "django_extensions",
+    "core",
 )
 
 # add services that were configured by xosbuilder to INSTALLED_APPS
@@ -186,39 +186,30 @@
 # See http://docs.djangoproject.com/en/dev/topics/logging for
 # more details on how to customize your logging configuration.
 LOGGING = {
-    'version': 1,
-    'disable_existing_loggers': False,
-    'filters': {
-        'require_debug_false': {
-            '()': 'django.utils.log.RequireDebugFalse'
-        }
-    },
-    'handlers': {
-        'file': {
-            'level': 'DEBUG',
-            'class': 'logging.FileHandler',
-            'filename': '/var/log/django_debug.log',
+    "version": 1,
+    "disable_existing_loggers": False,
+    "filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}},
+    "handlers": {
+        "file": {
+            "level": "DEBUG",
+            "class": "logging.FileHandler",
+            "filename": "/var/log/django_debug.log",
         },
-        'mail_admins': {
-            'level': 'ERROR',
-            'filters': ['require_debug_false'],
-            'class': 'django.utils.log.AdminEmailHandler'
+        "mail_admins": {
+            "level": "ERROR",
+            "filters": ["require_debug_false"],
+            "class": "django.utils.log.AdminEmailHandler",
         },
     },
-    'loggers': {
-        'django': {
-            'handlers': ['file'],
-            'level': 'DEBUG',
-            'propagate': True,
+    "loggers": {
+        "django": {"handlers": ["file"], "level": "DEBUG", "propagate": True},
+        "django.request": {
+            "handlers": ["mail_admins"],
+            "level": "ERROR",
+            "propagate": True,
         },
-        'django.request': {
-            'handlers': ['mail_admins'],
-            'level': 'ERROR',
-            'propagate': True,
-        },'django.db.backends': {
-            'level': 'WARNING',
-        },
-    }
+        "django.db.backends": {"level": "WARNING"},
+    },
 }
 
 XOS_BRANDING_NAME = "OpenCloud"
@@ -228,14 +219,13 @@
 XOS_BRANDING_BG = "/static/bg.png"
 
 DISABLE_MINIDASHBOARD = False
-ENCRYPTED_FIELDS_KEYDIR = XOS_DIR + '/private_keys'
-ENCRYPTED_FIELD_MODE = 'ENCRYPT'
+ENCRYPTED_FIELDS_KEYDIR = XOS_DIR + "/private_keys"
+ENCRYPTED_FIELD_MODE = "ENCRYPT"
 
 STATISTICS_DRIVER = "statistics_driver"
 
 # prevents warnings on django 1.7
-TEST_RUNNER = 'django.test.runner.DiscoverRunner'
+TEST_RUNNER = "django.test.runner.DiscoverRunner"
 
 # API key for Google Maps, created by zdw on 2016-06-29. Testing only, not for production
-GEOPOSITION_GOOGLE_MAPS_API_KEY = 'AIzaSyBWAHP9mvLqWLRkVqK8o5wMskaIe9w7DaM'
-
+GEOPOSITION_GOOGLE_MAPS_API_KEY = "AIzaSyBWAHP9mvLqWLRkVqK8o5wMskaIe9w7DaM"
diff --git a/xos/xos/urls.py b/xos/xos/urls.py
index 6ed5c28..1aa57e5 100644
--- a/xos/xos/urls.py
+++ b/xos/xos/urls.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,4 +17,3 @@
 from core.models import *
 
 urlpatterns = []
-
diff --git a/xos/xos/wsgi.py b/xos/xos/wsgi.py
index 416c5ea..459cc22 100644
--- a/xos/xos/wsgi.py
+++ b/xos/xos/wsgi.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,6 +13,8 @@
 # limitations under the License.
 
 
+from django.core.wsgi import get_wsgi_application
+
 """
 WSGI config for XOS.
 
@@ -40,7 +41,7 @@
 # This application object is used by any WSGI server configured to use this
 # file. This includes Django's development server, if the WSGI_APPLICATION
 # setting points here.
-from django.core.wsgi import get_wsgi_application
+
 application = get_wsgi_application()
 
 # Apply WSGI middleware here.
diff --git a/xos/xos/xml_util.py b/xos/xos/xml_util.py
index 53b21ba..d1c90a9 100644
--- a/xos/xos/xml_util.py
+++ b/xos/xos/xml_util.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,14 +19,15 @@
 from StringIO import StringIO
 
 # helper functions to help build xpaths
+
+
 class XpathFilter:
     @staticmethod
-
     def filter_value(key, value):
         xpath = ""
         if isinstance(value, str):
-            if '*' in value:
-                value = value.replace('*', '')
+            if "*" in value:
+                value = value.replace("*", "")
                 xpath = 'contains(%s, "%s")' % (key, value)
             else:
                 xpath = '%s="%s"' % (key, value)
@@ -39,20 +39,23 @@
         if filter:
             filter_list = []
             for (key, value) in filter.items():
-                if key == 'text':
-                    key = 'text()'
+                if key == "text":
+                    key = "text()"
                 else:
-                    key = '@'+key
+                    key = "@" + key
                 if isinstance(value, str):
                     filter_list.append(XpathFilter.filter_value(key, value))
                 elif isinstance(value, list):
-                    stmt = ' or '.join([XpathFilter.filter_value(key, str(val)) for val in value])
+                    stmt = " or ".join(
+                        [XpathFilter.filter_value(key, str(val)) for val in value]
+                    )
                     filter_list.append(stmt)
             if filter_list:
-                xpath = ' and '.join(filter_list)
-                xpath = '[' + xpath + ']'
+                xpath = " and ".join(filter_list)
+                xpath = "[" + xpath + "]"
         return xpath
 
+
 # a wrapper class around lxml.etree._Element
 # the reason why we need this one is because of the limitations
 # we've found in xpath to address documents with multiple namespaces defined
@@ -98,7 +101,7 @@
         """
         if not instance_class:
             instance_class = Object
-        if not fields and hasattr(instance_class, 'fields'):
+        if not fields and hasattr(instance_class, "fields"):
             fields = instance_class.fields
 
         if not fields:
@@ -107,7 +110,7 @@
             instance = instance_class({}, self)
             for field in fields:
                 if field in self.attrib:
-                   instance[field] = self.attrib[field]
+                    instance[field] = self.attrib[field]
         return instance
 
     def add_instance(self, name, instance, fields=[]):
@@ -115,7 +118,7 @@
         Adds the specifed instance(s) as a child element of this xml
         element.
         """
-        if not fields and hasattr(instance, 'keys'):
+        if not fields and hasattr(instance, "keys"):
             fields = instance.keys()
         elem = self.add_element(name)
         for field in fields:
@@ -129,9 +132,9 @@
         specified root_node if specified, otherwise start at tree's root.
         """
 
-        if not name.startswith('//'):
-            name = '//' + name 
-        elements = self.element.xpath('%s ' % name, namespaces=self.namespaces)
+        if not name.startswith("//"):
+            name = "//" + name
+        elements = self.element.xpath("%s " % name, namespaces=self.namespaces)
         for element in elements:
             parent = element.getparent()
             parent.remove(element)
@@ -154,19 +157,19 @@
         del self.element.attrib[key]
 
     def toxml(self):
-        return etree.tostring(self.element, encoding='UTF-8', pretty_print=True)
+        return etree.tostring(self.element, encoding="UTF-8", pretty_print=True)
 
     def __str__(self):
         return self.toxml()
 
     # are redirected on self.element
-    def __getattr__ (self, name):
+    def __getattr__(self, name):
         if not hasattr(self.element, name):
-            raise AttributeError, name
+            raise AttributeError(name)
         return getattr(self.element, name)
 
-class Xml:
 
+class Xml:
     def __init__(self, xml=None, namespaces=None):
         self.root = None
         self.namespaces = namespaces
@@ -191,40 +194,40 @@
             # 'rspec' file doesnt exist. 'rspec' is proably an xml string
             try:
                 tree = etree.parse(StringIO(xml), parser)
-            except Exception, e:
-                raise Exception, str(e)
+            except Exception as e:
+                raise Exception(str(e))
         root = tree.getroot()
         self.namespaces = dict(root.nsmap)
         # set namespaces map
-        if 'default' not in self.namespaces and None in self.namespaces:
+        if "default" not in self.namespaces and None in self.namespaces:
             # If the 'None' exist, then it's pointing to the default namespace. This makes
             # it hard for us to write xpath queries for the default naemspace because lxml
             # wont understand a None prefix. We will just associate the default namespeace
             # with a key named 'default'.
-            self.namespaces['default'] = self.namespaces.pop(None)
+            self.namespaces["default"] = self.namespaces.pop(None)
 
         else:
-            self.namespaces['default'] = 'default'
+            self.namespaces["default"] = "default"
 
         self.root = XmlElement(root, self.namespaces)
         # set schema
         for key in self.root.attrib.keys():
-            if key.endswith('schemaLocation'):
+            if key.endswith("schemaLocation"):
                 # schemaLocation should be at the end of the list.
                 # Use list comprehension to filter out empty strings
-                schema_parts  = [x for x in self.root.attrib[key].split(' ') if x]
+                schema_parts = [x for x in self.root.attrib[key].split(" ") if x]
                 self.schema = schema_parts[1]
-                namespace, schema  = schema_parts[0], schema_parts[1]
+                namespace, schema = schema_parts[0], schema_parts[1]
                 break
 
-    def parse_dict(self, d, root_tag_name='xml', element = None):
+    def parse_dict(self, d, root_tag_name="xml", element=None):
         if element is None:
             if self.root is None:
-                self.parse_xml('<%s/>' % root_tag_name)
+                self.parse_xml("<%s/>" % root_tag_name)
             element = self.root.element
 
-        if 'text' in d:
-            text = d.pop('text')
+        if "text" in d:
+            text = d.pop("text")
             element.text = text
 
         # handle repeating fields
@@ -245,10 +248,10 @@
 
         # element.attrib.update will explode if DateTimes are in the
         # dcitionary.
-        d=d.copy()
+        d = d.copy()
         # looks like iteritems won't stand side-effects
         for k in d.keys():
-            if not isinstance(d[k],StringTypes):
+            if not isinstance(d[k], StringTypes):
                 del d[k]
 
         element.attrib.update(d)
@@ -262,7 +265,7 @@
         if not relaxng(self.root):
             error = relaxng.error_log.last_error
             message = "%s (line %s)" % (error.message, error.line)
-            raise Exception, message
+            raise Exception(message)
         return True
 
     def xpath(self, xpath, namespaces=None):
@@ -286,7 +289,7 @@
         """
         return self.root.add_element(*args, **kwds)
 
-    def remove_elements(self, name, element = None):
+    def remove_elements(self, name, element=None):
         """
         Removes all occurences of an element from the tree. Start at
         specified root_node if specified, otherwise start at tree's root.
@@ -303,24 +306,26 @@
         return self.root.get_instnace(*args, **kwds)
 
     def get_element_attributes(self, elem=None, depth=0):
-        if elem == None:
+        if elem is None:
             elem = self.root
-        if not hasattr(elem, 'attrib'):
+        if not hasattr(elem, "attrib"):
             # this is probably not an element node with attribute. could be just and an
             # attribute, return it
             return elem
         attrs = dict(elem.attrib)
-        attrs['text'] = str(elem.text).strip()
-        attrs['parent'] = elem.getparent()
+        attrs["text"] = str(elem.text).strip()
+        attrs["parent"] = elem.getparent()
         if isinstance(depth, int) and depth > 0:
             for child_elem in list(elem):
                 key = str(child_elem.tag)
                 if key not in attrs:
-                    attrs[key] = [self.get_element_attributes(child_elem, depth-1)]
+                    attrs[key] = [self.get_element_attributes(child_elem, depth - 1)]
                 else:
-                    attrs[key].append(self.get_element_attributes(child_elem, depth-1))
+                    attrs[key].append(
+                        self.get_element_attributes(child_elem, depth - 1)
+                    )
         else:
-            attrs['child_nodes'] = list(elem)
+            attrs["child_nodes"] = list(elem)
         return attrs
 
     def append(self, elem):
@@ -336,7 +341,7 @@
         return self.toxml()
 
     def toxml(self):
-        return etree.tostring(self.root.element, encoding='UTF-8', pretty_print=True)
+        return etree.tostring(self.root.element, encoding="UTF-8", pretty_print=True)
 
     # XXX smbaker, for record.load_from_string
     def todict(self, elem=None):
@@ -344,20 +349,18 @@
             elem = self.root
         d = {}
         d.update(elem.attrib)
-        d['text'] = elem.text
+        d["text"] = elem.text
         for child in elem.iterchildren():
             if child.tag not in d:
                 d[child.tag] = []
             d[child.tag].append(self.todict(child))
 
-        if len(d)==1 and ("text" in d):
+        if len(d) == 1 and ("text" in d):
             d = d["text"]
 
         return d
 
     def save(self, filename):
-        f = open(filename, 'w')
+        f = open(filename, "w")
         f.write(self.toxml())
         f.close()
-
-    
diff --git a/xos/xos_client/setup.py b/xos/xos_client/setup.py
index 49afcaf..d10b5c6 100644
--- a/xos/xos_client/setup.py
+++ b/xos/xos_client/setup.py
@@ -15,11 +15,8 @@
 # limitations under the License.
 
 import os
-import sys
-import site
 from setuptools.command.install import install
 
-
 try:
     from xosutil.autoversion_setup import setup_with_auto_version as setup
 except ImportError:
@@ -29,7 +26,7 @@
 
 from xosapi.version import __version__
 
-CHAMELEON_DIR='xosapi/chameleon'
+CHAMELEON_DIR = "xosapi/chameleon"
 
 if not os.path.exists(CHAMELEON_DIR):
     raise Exception("%s does not exist!" % CHAMELEON_DIR)
@@ -38,31 +35,37 @@
     raise Exception("Please make the chameleon protos")
 
 # Chameleon requires these files have executable permission set.
+
+
 class InstallFixChameleonPermissions(install):
     def run(self):
         install.run(self)
         for filepath in self.get_outputs():
-            if filepath.endswith("chameleon/protoc_plugins/gw_gen.py") or \
-               filepath.endswith("chameleon/protoc_plugins/swagger_gen.py"):
-               os.chmod(filepath, 0777)
-
-setup_result = setup(name='xosapi',
-      version=__version__,
-      cmdclass={"install": InstallFixChameleonPermissions},
-      description='XOS api client',
-      package_dir= {'xosapi.chameleon': CHAMELEON_DIR},
-      packages=['xosapi.chameleon.grpc_client',
-                'xosapi.chameleon.protos',
-                'xosapi.chameleon.utils',
-                'xosapi.chameleon.protoc_plugins',
-                'xosapi',
-                'xosapi.convenience'],
-      py_modules= ['xosapi.chameleon.__init__'],
-      include_package_data=True,
-      package_data = {'xosapi.chameleon.protos': ['*.proto'],
-                      'xosapi.chameleon.protoc_plugins': ['*.desc']},
-      scripts = ['xossh'],
-     )
+            if filepath.endswith(
+                "chameleon/protoc_plugins/gw_gen.py"
+            ) or filepath.endswith("chameleon/protoc_plugins/swagger_gen.py"):
+                os.chmod(filepath, 0o777)
 
 
-
+setup_result = setup(
+    name="xosapi",
+    version=__version__,
+    cmdclass={"install": InstallFixChameleonPermissions},
+    description="XOS api client",
+    package_dir={"xosapi.chameleon": CHAMELEON_DIR},
+    packages=[
+        "xosapi.chameleon.grpc_client",
+        "xosapi.chameleon.protos",
+        "xosapi.chameleon.utils",
+        "xosapi.chameleon.protoc_plugins",
+        "xosapi",
+        "xosapi.convenience",
+    ],
+    py_modules=["xosapi.chameleon.__init__"],
+    include_package_data=True,
+    package_data={
+        "xosapi.chameleon.protos": ["*.proto"],
+        "xosapi.chameleon.protoc_plugins": ["*.desc"],
+    },
+    scripts=["xossh"],
+)
diff --git a/xos/xos_client/tests/csr_introspect.py b/xos/xos_client/tests/csr_introspect.py
index c5ca1a7..501cc78 100644
--- a/xos/xos_client/tests/csr_introspect.py
+++ b/xos/xos_client/tests/csr_introspect.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,23 +12,34 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
+from xosapi import xos_grpc_client
 import sys
+
 sys.path.append("..")
 
-from xosapi import xos_grpc_client
 
 def test_callback():
-    print "TEST: csr_introspect"
+    print("TEST: csr_introspect")
 
     c = xos_grpc_client.coreclient
 
     for csr in c.xos_orm.CordSubscriberRoot.objects.all():
-        print "  csr", csr.id
-        for field_name in ["firewall_enable", "firewall_rules", "url_filter_enable", "url_filter_rules", "cdn_enable", "uplink_speed", "downlink_speed", "enable_uverse", "status"]:
-            print "    %s: %s" % (field_name, getattr(csr, field_name))
+        print("  csr", csr.id)
+        for field_name in [
+            "firewall_enable",
+            "firewall_rules",
+            "url_filter_enable",
+            "url_filter_rules",
+            "cdn_enable",
+            "uplink_speed",
+            "downlink_speed",
+            "enable_uverse",
+            "status",
+        ]:
+            print("    %s: %s" % (field_name, getattr(csr, field_name)))
 
-    print "    okay"
+    print("    okay")
+
 
 xos_grpc_client.start_api_parseargs(test_callback)
-
diff --git a/xos/xos_client/tests/nopper.py b/xos/xos_client/tests/nopper.py
index dd27f32..ffb8b38 100644
--- a/xos/xos_client/tests/nopper.py
+++ b/xos/xos_client/tests/nopper.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,6 +12,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
+from xosapi import xos_grpc_client
+import sys
+import time
 
 """ nopper
 
@@ -20,14 +23,11 @@
     performance.
 """
 
-import sys
-import time
 sys.path.append("..")
 
-from xosapi import xos_grpc_client
 
 def test_callback():
-    print "TEST: nop"
+    print("TEST: nop")
 
     c = xos_grpc_client.coreclient
 
@@ -35,16 +35,16 @@
         tStart = time.time()
         count = 0
         while True:
-            if type(xos_grpc_client.coreclient) == xos_grpc_client.SecureClient:
-               c.utility.AuthenticatedNoOp(xos_grpc_client.Empty())
+            if isinstance(xos_grpc_client.coreclient, xos_grpc_client.SecureClient):
+                c.utility.AuthenticatedNoOp(xos_grpc_client.Empty())
             else:
-               c.utility.NoOp(xos_grpc_client.Empty())
+                c.utility.NoOp(xos_grpc_client.Empty())
             count = count + 1
-            elap = time.time()-tStart
-            if (elap >= 10):
-                print "nops/second = %d" % int(count/elap)
+            elap = time.time() - tStart
+            if elap >= 10:
+                print("nops/second = %d" % int(count / elap))
                 tStart = time.time()
                 count = 0
 
-xos_grpc_client.start_api_parseargs(test_callback)
 
+xos_grpc_client.start_api_parseargs(test_callback)
diff --git a/xos/xos_client/tests/orm_listall.py b/xos/xos_client/tests/orm_listall.py
index b353ce8..74875be 100644
--- a/xos/xos_client/tests/orm_listall.py
+++ b/xos/xos_client/tests/orm_listall.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,15 +12,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
+from xosapi import xos_grpc_client
 import sys
 import traceback
+
 sys.path.append("..")
 
-from xosapi import xos_grpc_client
 
 def test_callback():
-    print "TEST: orm_listall_crud"
+    print("TEST: orm_listall_crud")
 
     c = xos_grpc_client.coreclient
 
@@ -29,16 +29,16 @@
         model_class = getattr(c.xos_orm, model_name)
 
         try:
-            print "   list all %s ..." % model_name,
+            print("   list all %s ..." % model_name, end=" ")
 
             objs = model_class.objects.all()
 
-            print "[%d] okay" % len(objs)
-        except:
-            print "   fail!"
+            print("[%d] okay" % len(objs))
+        except BaseException:
+            print("   fail!")
             traceback.print_exc()
 
-    print "    done"
+    print("    done")
+
 
 xos_grpc_client.start_api_parseargs(test_callback)
-
diff --git a/xos/xos_client/tests/orm_nodelabel.py b/xos/xos_client/tests/orm_nodelabel.py
index 72d3069..009ca43 100644
--- a/xos/xos_client/tests/orm_nodelabel.py
+++ b/xos/xos_client/tests/orm_nodelabel.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,33 +15,36 @@
 # These are functional tests of ManyToMany relations. These tests need to be conducted end-to-end with a real
 # API to verify that the client and server ends of the API are working with each other.
 
-import random
-import string
+from xosapi import xos_grpc_client
 import sys
 import unittest
 
 orm = None
 
-from xosapi import xos_grpc_client
 
 TEST_NODE_LABEL_1_NAME = "test_node_label_1"
 
+
 class TestORM(unittest.TestCase):
     def setUp(self):
         self.test_node_label_1_name = TEST_NODE_LABEL_1_NAME
 
         nodes1 = orm.Node.objects.filter(name="test_node_1")
         if nodes1:
-            self.node1=nodes1[0]
+            self.node1 = nodes1[0]
         else:
-            self.node1 = orm.Node(name="test_node_1", site_deployment=orm.SiteDeployment.objects.first())
+            self.node1 = orm.Node(
+                name="test_node_1", site_deployment=orm.SiteDeployment.objects.first()
+            )
             self.node1.save()
 
         nodes2 = orm.Node.objects.filter(name="test_node_2")
         if nodes2:
-            self.node2=nodes2[0]
+            self.node2 = nodes2[0]
         else:
-            self.node2 = orm.Node(name="test_node_2", site_deployment=orm.SiteDeployment.objects.first())
+            self.node2 = orm.Node(
+                name="test_node_2", site_deployment=orm.SiteDeployment.objects.first()
+            )
             self.node2.save()
 
     def tearDown(self):
@@ -61,54 +63,54 @@
         pass
 
     def test_create_empty_node_label(self):
-        n = orm.NodeLabel(name = self.test_node_label_1_name)
+        n = orm.NodeLabel(name=self.test_node_label_1_name)
         n.save()
 
-        labels = orm.NodeLabel.objects.filter(name = self.test_node_label_1_name)
-        self.assertEqual(len(labels),1)
+        labels = orm.NodeLabel.objects.filter(name=self.test_node_label_1_name)
+        self.assertEqual(len(labels), 1)
 
-        n=labels[0]
+        n = labels[0]
         self.assertNotEqual(n, None)
         self.assertEqual(len(n.node.all()), 0)
 
     def test_create_node_label_one_node(self):
-        n = orm.NodeLabel(name = self.test_node_label_1_name)
+        n = orm.NodeLabel(name=self.test_node_label_1_name)
         n.node.add(self.node1)
         n.save()
 
-        labels = orm.NodeLabel.objects.filter(name = self.test_node_label_1_name)
-        self.assertEqual(len(labels),1)
+        labels = orm.NodeLabel.objects.filter(name=self.test_node_label_1_name)
+        self.assertEqual(len(labels), 1)
 
-        n=labels[0]
+        n = labels[0]
         self.assertNotEqual(n, None)
         self.assertEqual(len(n.node.all()), 1)
 
     def test_create_node_label_two_nodes(self):
-        n = orm.NodeLabel(name = self.test_node_label_1_name)
+        n = orm.NodeLabel(name=self.test_node_label_1_name)
         n.node.add(self.node1)
         n.node.add(self.node2)
         n.save()
 
-        labels = orm.NodeLabel.objects.filter(name = self.test_node_label_1_name)
-        self.assertEqual(len(labels),1)
+        labels = orm.NodeLabel.objects.filter(name=self.test_node_label_1_name)
+        self.assertEqual(len(labels), 1)
 
-        n=labels[0]
+        n = labels[0]
         self.assertNotEqual(n, None)
         self.assertEqual(len(n.node.all()), 2)
 
     def test_add_node_to_label(self):
-        n = orm.NodeLabel(name = self.test_node_label_1_name)
+        n = orm.NodeLabel(name=self.test_node_label_1_name)
         n.save()
 
-        labels = orm.NodeLabel.objects.filter(name = self.test_node_label_1_name)
+        labels = orm.NodeLabel.objects.filter(name=self.test_node_label_1_name)
         self.assertEqual(len(labels), 1)
-        n=labels[0]
+        n = labels[0]
         n.node.add(self.node1)
         n.save()
 
-        labels = orm.NodeLabel.objects.filter(name = self.test_node_label_1_name)
+        labels = orm.NodeLabel.objects.filter(name=self.test_node_label_1_name)
         self.assertEqual(len(labels), 1)
-        n=labels[0]
+        n = labels[0]
         self.assertEqual(len(n.node.all()), 1)
 
     def test_remove_node_from_label(self):
@@ -152,8 +154,8 @@
 
     orm = xos_grpc_client.coreclient.xos_orm
 
-    sys.argv=sys.argv[:1]  # unittest gets mad about the orm command line arguments
+    sys.argv = sys.argv[:1]  # unittest gets mad about the orm command line arguments
     unittest.main()
 
-xos_grpc_client.start_api_parseargs(test_callback)
 
+xos_grpc_client.start_api_parseargs(test_callback)
diff --git a/xos/xos_client/tests/orm_reverse_relations.py b/xos/xos_client/tests/orm_reverse_relations.py
index a958e95..943a7fb 100644
--- a/xos/xos_client/tests/orm_reverse_relations.py
+++ b/xos/xos_client/tests/orm_reverse_relations.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,14 +15,12 @@
 # These are functional tests of ManyToMany relations. These tests need to be conducted end-to-end with a real
 # API to verify that the client and server ends of the API are working with each other.
 
-import random
-import string
+from xosapi import xos_grpc_client
 import sys
 import unittest
 
 orm = None
 
-from xosapi import xos_grpc_client
 
 SERVICE_1_NAME = "test_service_1"
 SERVICEINSTANCE_1_NAME = "test_service_instance_1"
@@ -31,6 +28,7 @@
 SERVICE_2_NAME = "test_service_2"
 SERVICEINSTANCE_2_NAME = "test_service_instance_2"
 
+
 class TestORMReverseRelations(unittest.TestCase):
     def setUp(self):
         pass
@@ -40,7 +38,6 @@
         for obj in objs:
             obj.delete()
 
-
     def tearDown(self):
         self.cleanup_models(orm.ServiceInstance, SERVICEINSTANCE_1_NAME)
         self.cleanup_models(orm.ServiceInstance, SERVICEINSTANCE_2_NAME)
@@ -51,31 +48,39 @@
         service1 = orm.Service(name=SERVICE_1_NAME)
         service1.save()
 
-        serviceinstance1 = orm.ServiceInstance(name=SERVICEINSTANCE_1_NAME, owner=service1)
+        serviceinstance1 = orm.ServiceInstance(
+            name=SERVICEINSTANCE_1_NAME, owner=service1
+        )
         serviceinstance1.save()
 
         service2 = orm.Service(name=SERVICE_2_NAME)
         service2.save()
 
-        serviceinstance2 = orm.ServiceInstance(name=SERVICEINSTANCE_2_NAME, owner=service2)
+        serviceinstance2 = orm.ServiceInstance(
+            name=SERVICEINSTANCE_2_NAME, owner=service2
+        )
         serviceinstance2.save()
 
-        link = orm.ServiceInstanceLink(provider_service_instance = serviceinstance1, subscriber_service_instance = serviceinstance2)
+        link = orm.ServiceInstanceLink(
+            provider_service_instance=serviceinstance1,
+            subscriber_service_instance=serviceinstance2,
+        )
         link.save()
 
-        si1_readback = orm.ServiceInstance.objects.get(id = serviceinstance1.id)
-        si2_readback = orm.ServiceInstance.objects.get(id = serviceinstance2.id)
+        si1_readback = orm.ServiceInstance.objects.get(id=serviceinstance1.id)
+        si2_readback = orm.ServiceInstance.objects.get(id=serviceinstance2.id)
 
         self.assertEqual(si1_readback.provided_links.count(), 1)
         self.assertEqual(si2_readback.subscribed_links.count(), 1)
 
+
 def test_callback():
     global orm
 
     orm = xos_grpc_client.coreclient.xos_orm
 
-    sys.argv=sys.argv[:1]  # unittest gets mad about the orm command line arguments
+    sys.argv = sys.argv[:1]  # unittest gets mad about the orm command line arguments
     unittest.main()
 
-xos_grpc_client.start_api_parseargs(test_callback)
 
+xos_grpc_client.start_api_parseargs(test_callback)
diff --git a/xos/xos_client/tests/orm_user_crud.py b/xos/xos_client/tests/orm_user_crud.py
index 270b16e..96f38d6 100644
--- a/xos/xos_client/tests/orm_user_crud.py
+++ b/xos/xos_client/tests/orm_user_crud.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,69 +12,74 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
+from xosapi import xos_grpc_client
 import sys
+
 sys.path.append("..")
 
-from xosapi import xos_grpc_client
 
 def test_callback():
-    print "TEST: orm_user_crud"
+    print("TEST: orm_user_crud")
 
     c = xos_grpc_client.coreclient
 
     # create a new user and save it
-    u=c.xos_orm.User.objects.new()
-    assert(u.id==0)
-    import random, string
-    u.email=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
-    u.site=c.xos_orm.Site.objects.all()[0]
+    u = c.xos_orm.User.objects.new()
+    assert u.id == 0
+    import random
+    import string
+
+    u.email = "".join(
+        random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+    )
+    u.site = c.xos_orm.Site.objects.all()[0]
     u.save()
 
     # when we created the user, he should be assigned an id
     orig_id = u.id
-    assert(orig_id!=0)
+    assert orig_id != 0
 
     # invalidate u.site so it's reloaded from the server
     u.invalidate_cache("site")
 
     # site object should be populated
-    assert(u.site is not None)
+    assert u.site is not None
 
     # site object should have a backpointer to user
     u_all = u.site.users.all()
     u_all = [x for x in u_all if x.email == u.email]
-    assert(len(u_all)==1)
+    assert len(u_all) == 1
 
     # update the user
-    u.password="foobar"
+    u.password = "foobar"
     u.save()
 
     # update should not have changed it
-    assert(u.id==orig_id)
+    assert u.id == orig_id
 
     # check a listall and make sure the user is listed
     u_all = c.xos_orm.User.objects.all()
     u_all = [x for x in u_all if x.email == u.email]
-    assert(len(u_all)==1)
+    assert len(u_all) == 1
     u2 = u_all[0]
-    assert(u2.id == u.id)
+    assert u2.id == u.id
 
     # get and make sure the password was updated
     u3 = c.xos_orm.User.objects.get(id=orig_id)
-    assert(u3.password=="foobar")
+    assert u3.password == "foobar"
 
     # try a partial update
     u3.password = "should_not_change"
     u3.firstname = "new_first_name"
     u3.lastname = "new_last_name"
-    u3.save(update_fields = ["firstname", "lastname"])
+    u3.save(update_fields=["firstname", "lastname"])
 
     # get and make sure the password was not updated, but first and last name were
     u4 = c.xos_orm.User.objects.get(id=orig_id)
-    assert(u4.password=="foobar")
-    assert(u4.firstname == "new_first_name")
-    assert(u4.lastname == "new_last_name")
+    assert u4.password == "foobar"
+    assert u4.firstname == "new_first_name"
+    assert u4.lastname == "new_last_name"
 
     # delete the user
     u4.delete()
@@ -83,9 +87,9 @@
     # make sure it is deleted
     u_all = c.xos_orm.User.objects.all()
     u_all = [x for x in u_all if x.email == u.email]
-    assert(len(u_all)==0)
+    assert len(u_all) == 0
 
-    print "    okay"
+    print("    okay")
+
 
 xos_grpc_client.start_api_parseargs(test_callback)
-
diff --git a/xos/xos_client/tests/run_tests.sh b/xos/xos_client/tests/run_tests.sh
index e052049..5dcf599 100755
--- a/xos/xos_client/tests/run_tests.sh
+++ b/xos/xos_client/tests/run_tests.sh
@@ -1,3 +1,4 @@
+#!/usr/bin/env bash
 
 # Copyright 2017-present Open Networking Foundation
 #
@@ -13,9 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-#! /bin/bash
-
 # Run the tests from the head-node against an xos-client VM
 
 PW=`cat /opt/cord/build/platform-install/credentials/xosadmin@opencord.org`
@@ -24,4 +22,4 @@
 docker run -it --entrypoint python xosproject/xos-client /tmp/xos_client/tests/orm_listall.py -u xosadmin@opencord.org -p $PW -qq
 docker run -it --entrypoint python xosproject/xos-client /tmp/xos_client/tests/vtr_crud.py -u xosadmin@opencord.org -p $PW -qq
 docker run -it --entrypoint python xosproject/xos-client /tmp/xos_client/tests/vsg_introspect.py -u xosadmin@opencord.org -p $PW -qq
-docker run -it --entrypoint python xosproject/xos-client /tmp/xos_client/tests/csr_introspect.py -u xosadmin@opencord.org -p $PW -qq
\ No newline at end of file
+docker run -it --entrypoint python xosproject/xos-client /tmp/xos_client/tests/csr_introspect.py -u xosadmin@opencord.org -p $PW -qq
diff --git a/xos/xos_client/tests/vsg_introspect.py b/xos/xos_client/tests/vsg_introspect.py
index ed34117..3255697 100644
--- a/xos/xos_client/tests/vsg_introspect.py
+++ b/xos/xos_client/tests/vsg_introspect.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,23 +12,32 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
+from xosapi import xos_grpc_client
 import sys
+
 sys.path.append("..")
 
-from xosapi import xos_grpc_client
 
 def test_callback():
-    print "TEST: vsg_introspect"
+    print("TEST: vsg_introspect")
 
     c = xos_grpc_client.coreclient
 
     for vsg in c.xos_orm.VSGTenant.objects.all():
-        print "  vsg", vsg.id
-        for field_name in ["wan_container_ip", "wan_container_mac", "wan_container_netbits", "wan_container_gateway_ip", "wan_container_gateway_mac", "wan_vm_ip", "wan_vm_mac"]:
-            print "    %s: %s" % (field_name, getattr(vsg, field_name))
+        print("  vsg", vsg.id)
+        for field_name in [
+            "wan_container_ip",
+            "wan_container_mac",
+            "wan_container_netbits",
+            "wan_container_gateway_ip",
+            "wan_container_gateway_mac",
+            "wan_vm_ip",
+            "wan_vm_mac",
+        ]:
+            print("    %s: %s" % (field_name, getattr(vsg, field_name)))
 
-    print "    okay"
+    print("    okay")
+
 
 xos_grpc_client.start_api_parseargs(test_callback)
-
diff --git a/xos/xos_client/tests/vtr_crud.py b/xos/xos_client/tests/vtr_crud.py
index 3d5fab5..e776d13 100644
--- a/xos/xos_client/tests/vtr_crud.py
+++ b/xos/xos_client/tests/vtr_crud.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,46 +12,47 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
+from xosapi import xos_grpc_client
 import sys
+
 sys.path.append("..")
 
-from xosapi import xos_grpc_client
 
 def test_callback():
-    print "TEST: vtr_crud"
+    print("TEST: vtr_crud")
 
     c = xos_grpc_client.coreclient
 
     sr = c.xos_orm.CordSubscriberRoot.objects.first()
     if not sr:
-        print "No subscriber roots!"
+        print("No subscriber roots!")
         return
 
     vt = c.xos_orm.VTRTenant.objects.new()
     vt.target = sr
-    vt.test="ping"
-    vt.scope="vm"
-    vt.argument="8.8.8.8"
+    vt.test = "ping"
+    vt.scope = "vm"
+    vt.argument = "8.8.8.8"
     vt.save()
 
-    assert(vt.id is not None)
-    assert(vt.id>0)
+    assert vt.id is not None
+    assert vt.id > 0
 
     # Check and make sure we can read it back, pay particular attention to
     # the generic foreign key.
     vt2 = c.xos_orm.VTRTenant.objects.get(id=vt.id)
-    assert(vt2.target_id == sr.id)
-    assert(vt2.target_type_id == sr.self_content_type_id)
-    assert("TenantRoot" in vt2.target.class_names)
+    assert vt2.target_id == sr.id
+    assert vt2.target_type_id == sr.self_content_type_id
+    assert "TenantRoot" in vt2.target.class_names
 
     vt2.delete()
 
     # now, make sure it has been deleted
     vt3 = c.xos_orm.VTRTenant.objects.filter(id=vt.id)
-    assert(not vt3)
+    assert not vt3
 
-    print "    okay"
+    print("    okay")
+
 
 xos_grpc_client.start_api_parseargs(test_callback)
-
diff --git a/xos/xos_client/xosapi/__init__.py b/xos/xos_client/xosapi/__init__.py
index d4e8062..b0fb0b2 100644
--- a/xos/xos_client/xosapi/__init__.py
+++ b/xos/xos_client/xosapi/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/xos/xos_client/xosapi/convenience/__init__.py b/xos/xos_client/xosapi/convenience/__init__.py
index d4e8062..b0fb0b2 100644
--- a/xos/xos_client/xosapi/convenience/__init__.py
+++ b/xos/xos_client/xosapi/convenience/__init__.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,5 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
diff --git a/xos/xos_client/xosapi/convenience/addresspool.py b/xos/xos_client/xosapi/convenience/addresspool.py
index eb88447..13006c5 100644
--- a/xos/xos_client/xosapi/convenience/addresspool.py
+++ b/xos/xos_client/xosapi/convenience/addresspool.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
 
 from xosapi.orm import ORMWrapper, register_convenience_wrapper
 
+
 class ORMWrapperAddressPool(ORMWrapper):
     def get_address(self):
         ap = self
@@ -46,7 +46,6 @@
 
         return None
 
-
     def put_address(self, addr):
         ap = self
         addresses = ap.addresses or ""
@@ -61,4 +60,5 @@
             parts.remove(addr)
             ap.inuse = " ".join(parts)
 
+
 register_convenience_wrapper("AddressPool", ORMWrapperAddressPool)
diff --git a/xos/xos_client/xosapi/convenience/controller.py b/xos/xos_client/xosapi/convenience/controller.py
index 2808cc5..437b54a 100644
--- a/xos/xos_client/xosapi/convenience/controller.py
+++ b/xos/xos_client/xosapi/convenience/controller.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,16 +12,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-import json
 from xosapi.orm import ORMWrapper, register_convenience_wrapper
 
+
 class ORMWrapperController(ORMWrapper):
     @property
     def auth_url_v3(self):
-        if self.auth_url and self.auth_url[-1] == '/':
-            return '{}/v3/'.format('/'.join(self.auth_url.split('/')[:-2]))
+        if self.auth_url and self.auth_url[-1] == "/":
+            return "{}/v3/".format("/".join(self.auth_url.split("/")[:-2]))
         else:
-            return '{}/v3/'.format('/'.join(self.auth_url.split('/')[:-1]))
+            return "{}/v3/".format("/".join(self.auth_url.split("/")[:-1]))
+
 
 register_convenience_wrapper("Controller", ORMWrapperController)
diff --git a/xos/xos_client/xosapi/convenience/instance.py b/xos/xos_client/xosapi/convenience/instance.py
index d7234c8..3941643 100644
--- a/xos/xos_client/xosapi/convenience/instance.py
+++ b/xos/xos_client/xosapi/convenience/instance.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,26 +15,30 @@
 
 from xosapi.orm import ORMWrapper, register_convenience_wrapper
 
-class ORMWrapperInstance(ORMWrapper):
 
+class ORMWrapperInstance(ORMWrapper):
     def all_ips(self):
-        ips={}
+        ips = {}
         for ns in self.ports.all():
-           if ns.ip:
-               ips[ns.network.name] = ns.ip
+            if ns.ip:
+                ips[ns.network.name] = ns.ip
         return ips
 
     def all_ips_string(self):
         result = []
         ips = self.all_ips()
         for key in sorted(ips.keys()):
-            #result.append("%s = %s" % (key, ips[key]))
+            # result.append("%s = %s" % (key, ips[key]))
             result.append(ips[key])
         return ", ".join(result)
 
     def get_public_ip(self):
         for ns in self.ports.all():
-            if (ns.ip) and (ns.network.template.visibility=="public") and (ns.network.template.translation=="none"):
+            if (
+                (ns.ip)
+                and (ns.network.template.visibility == "public")
+                and (ns.network.template.translation == "none")
+            ):
                 return ns.ip
         return None
 
@@ -50,11 +53,14 @@
     def get_ssh_ip(self):
         # first look specifically for a management_local network
         for ns in self.ports.all():
-            if ns.network.template and ns.network.template.vtn_kind=="MANAGEMENT_LOCAL":
+            if (
+                ns.network.template
+                and ns.network.template.vtn_kind == "MANAGEMENT_LOCAL"
+            ):
                 return ns.ip
 
         # for compatibility, now look for any management network
-        management=self.get_network_ip("management")
+        management = self.get_network_ip("management")
         if management:
             return management
 
@@ -68,4 +74,5 @@
         else:
             return None
 
+
 register_convenience_wrapper("Instance", ORMWrapperInstance)
diff --git a/xos/xos_client/xosapi/convenience/network.py b/xos/xos_client/xosapi/convenience/network.py
index 7ea792a..ffa89f7 100644
--- a/xos/xos_client/xosapi/convenience/network.py
+++ b/xos/xos_client/xosapi/convenience/network.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,10 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-import json
 from xosapi.orm import ORMWrapper, ORMLocalObjectManager, register_convenience_wrapper
 
+
 class ORMWrapperNetwork(ORMWrapper):
     # slices- emulates the ManyToMany from Slice to Network via NetworkSlice
     @property
@@ -30,4 +28,5 @@
         idList = [x.instance.id for x in self.links.all()]
         return ORMLocalObjectManager(self.stub, "Instance", idList, False)
 
+
 register_convenience_wrapper("Network", ORMWrapperNetwork)
diff --git a/xos/xos_client/xosapi/convenience/onosapp.py b/xos/xos_client/xosapi/convenience/onosapp.py
index 187084c..fd791a8 100644
--- a/xos/xos_client/xosapi/convenience/onosapp.py
+++ b/xos/xos_client/xosapi/convenience/onosapp.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,12 +12,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-import json
-from xosapi.orm import ORMWrapper, register_convenience_wrapper
+from xosapi.orm import register_convenience_wrapper
 from xosapi.convenience.serviceinstance import ORMWrapperServiceInstance
 
+
 class ORMWrapperONOSApp(ORMWrapperServiceInstance):
     pass
 
+
 register_convenience_wrapper("ONOSApp", ORMWrapperONOSApp)
diff --git a/xos/xos_client/xosapi/convenience/port.py b/xos/xos_client/xosapi/convenience/port.py
index a329bba..61e7377 100644
--- a/xos/xos_client/xosapi/convenience/port.py
+++ b/xos/xos_client/xosapi/convenience/port.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,17 +12,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-import json
 from xosapi.orm import ORMWrapper, register_convenience_wrapper
 
+
 class ORMWrapperPort(ORMWrapper):
     def get_parameters(self):
         parameter_dict = {}
 
-        for param in self.stub.NetworkParameter.objects.filter(content_type=self.self_content_type_id, object_id=self.id):
+        for param in self.stub.NetworkParameter.objects.filter(
+            content_type=self.self_content_type_id, object_id=self.id
+        ):
             parameter_dict[param.parameter.name] = param.value
 
         return parameter_dict
 
+
 register_convenience_wrapper("Port", ORMWrapperPort)
diff --git a/xos/xos_client/xosapi/convenience/privilege.py b/xos/xos_client/xosapi/convenience/privilege.py
index 72623da..9bdb635 100644
--- a/xos/xos_client/xosapi/convenience/privilege.py
+++ b/xos/xos_client/xosapi/convenience/privilege.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,7 +15,9 @@
 
 from xosapi.orm import ORMWrapper, register_convenience_wrapper
 
+
 class ORMWrapperPrivilege(ORMWrapper):
     pass
-    
+
+
 register_convenience_wrapper("Privilege", ORMWrapperPrivilege)
diff --git a/xos/xos_client/xosapi/convenience/service.py b/xos/xos_client/xosapi/convenience/service.py
index 7e6ed09..ab82908 100644
--- a/xos/xos_client/xosapi/convenience/service.py
+++ b/xos/xos_client/xosapi/convenience/service.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,10 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-import json
 from xosapi.orm import ORMWrapper, register_convenience_wrapper
 
+
 class ORMWrapperService(ORMWrapper):
     @property
     def serviceattribute_dict(self):
@@ -26,12 +24,14 @@
         return attrs
 
     def get_composable_networks(self):
-        SUPPORTED_VTN_SERVCOMP_KINDS = ['VSG','PRIVATE']
+        SUPPORTED_VTN_SERVCOMP_KINDS = ["VSG", "PRIVATE"]
 
         nets = []
         for slice in self.slices.all():
             for net in slice.networks.all():
-                if (net.template.vtn_kind not in SUPPORTED_VTN_SERVCOMP_KINDS) or (net.owner.id != slice.id):
+                if (net.template.vtn_kind not in SUPPORTED_VTN_SERVCOMP_KINDS) or (
+                    net.owner.id != slice.id
+                ):
                     continue
 
                 if not net.controllernetworks.exists():
@@ -78,11 +78,13 @@
         ServiceInstanceLink = self.stub.ServiceInstanceLink
 
         eastbound_si_class = getattr(self.stub, si_classname)
-        eastbound_si = eastbound_si_class(owner_id = self.id)
+        eastbound_si = eastbound_si_class(owner_id=self.id)
         eastbound_si.save()
 
-        link = ServiceInstanceLink(provider_service_instance=eastbound_si,
-                                   subscriber_service_instance=subscriber_service_instance)
+        link = ServiceInstanceLink(
+            provider_service_instance=eastbound_si,
+            subscriber_service_instance=subscriber_service_instance,
+        )
         link.save()
 
     def validate_links(self, subscriber_service_instance):
diff --git a/xos/xos_client/xosapi/convenience/serviceinstance.py b/xos/xos_client/xosapi/convenience/serviceinstance.py
index 76beb7b..33ab553 100644
--- a/xos/xos_client/xosapi/convenience/serviceinstance.py
+++ b/xos/xos_client/xosapi/convenience/serviceinstance.py
@@ -16,10 +16,10 @@
 from xosconfig import Config
 from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
+
 
 class ORMWrapperServiceInstance(ORMWrapper):
-
     @property
     def serviceinstanceattribute_dict(self):
         attrs = {}
@@ -71,7 +71,9 @@
             eastbound_si = eastbound_si_class()
             eastbound_si.owner_id = link.provider_service_id
             eastbound_si.save()
-            link = ServiceInstanceLink(provider_service_instance=eastbound_si, subscriber_service_instance=si)
+            link = ServiceInstanceLink(
+                provider_service_instance=eastbound_si, subscriber_service_instance=si
+            )
             link.save()
 
     def get_westbound_service_instance_properties(self, prop_name, include_self=False):
@@ -81,8 +83,14 @@
         wi = self.westbound_service_instances
 
         if len(wi) == 0:
-            log.error("ServiceInstance with id %s has no westbound service instances, can't find property %s in the chain" % (self.id, prop_name))
-            raise Exception("ServiceInstance with id %s has no westbound service instances" % self.id)
+            log.error(
+                "ServiceInstance with id %s has no westbound service instances, can't find property %s in the chain"
+                % (self.id, prop_name)
+            )
+            raise Exception(
+                "ServiceInstance with id %s has no westbound service instances"
+                % self.id
+            )
 
         for i in wi:
             if hasattr(i, prop_name):
@@ -92,4 +100,5 @@
                 i = self.stub.ServiceInstance.objects.get(id=i.id)
                 return i.get_westbound_service_instance_properties(prop_name)
 
-register_convenience_wrapper("ServiceInstance", ORMWrapperServiceInstance)
\ No newline at end of file
+
+register_convenience_wrapper("ServiceInstance", ORMWrapperServiceInstance)
diff --git a/xos/xos_client/xosapi/convenience/slice.py b/xos/xos_client/xosapi/convenience/slice.py
index 8431bd2..abdd485 100644
--- a/xos/xos_client/xosapi/convenience/slice.py
+++ b/xos/xos_client/xosapi/convenience/slice.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,10 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-import json
 from xosapi.orm import ORMWrapper, ORMLocalObjectManager, register_convenience_wrapper
 
+
 class ORMWrapperSlice(ORMWrapper):
     # TODO: this looks to be incorrect
     @property
@@ -29,4 +27,5 @@
         idList = [x.network.id for x in self.networkslices.all()]
         return ORMLocalObjectManager(self.stub, "Network", idList, False)
 
+
 register_convenience_wrapper("Slice", ORMWrapperSlice)
diff --git a/xos/xos_client/xosapi/convenience/tag.py b/xos/xos_client/xosapi/convenience/tag.py
index be7bcf6..aaeaf65 100644
--- a/xos/xos_client/xosapi/convenience/tag.py
+++ b/xos/xos_client/xosapi/convenience/tag.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,11 +12,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
 from xosapi.orm import ORMWrapper, register_convenience_wrapper
 
+
 class ORMWrapperTag(ORMWrapper):
     def get_generic_foreignkeys(self):
-        return [{"name": "content_object", "content_type": "content_type", "id": "object_id"}]
+        return [
+            {
+                "name": "content_object",
+                "content_type": "content_type",
+                "id": "object_id",
+            }
+        ]
+
 
 register_convenience_wrapper("Tag", ORMWrapperTag)
diff --git a/xos/xos_client/xosapi/convenience/user.py b/xos/xos_client/xosapi/convenience/user.py
index b9ed933..3205680 100644
--- a/xos/xos_client/xosapi/convenience/user.py
+++ b/xos/xos_client/xosapi/convenience/user.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,14 +12,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
 import hashlib
-import json
 from xosapi.orm import ORMWrapper, register_convenience_wrapper
 
+
 class ORMWrapperUser(ORMWrapper):
     @property
     def remote_password(self):
         return hashlib.md5(self.password).hexdigest()[:12]
 
+
 register_convenience_wrapper("User", ORMWrapperUser)
diff --git a/xos/xos_client/xosapi/fake_stub.py b/xos/xos_client/xosapi/fake_stub.py
index 406e8a7..abfce00 100644
--- a/xos/xos_client/xosapi/fake_stub.py
+++ b/xos/xos_client/xosapi/fake_stub.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
 """ fake_stub.py
 
     Implements a simple fake grpc stub to use for unit testing.
@@ -23,8 +21,9 @@
 
 ContentTypeMap = {}
 
+
 class FakeObj(object):
-    BASES=[]
+    BASES = []
 
     def __init__(self, fields=[], **kwargs):
         super(FakeObj, self).__setattr__("is_set", {})
@@ -36,7 +35,7 @@
             setattr(self, name, f["default"])
 
         super(FakeObj, self).__setattr__("is_set", {})
-        for (k,v) in kwargs.items():
+        for (k, v) in kwargs.items():
             setattr(self, k, v)
 
     def __repr__(self):
@@ -74,16 +73,17 @@
 
     def ListFields(self):
         fbn = self.DESCRIPTOR.fields_by_name
-        l = []
-        for (k,v) in fbn.items():
+        fieldlist = []
+        for (k, v) in fbn.items():
             if self.is_set.get(k, False):
-                l.append( (v, getattr(self, k)) )
-        return l
+                fieldlist.append((v, getattr(self, k)))
+        return fieldlist
 
     @property
     def self_content_type_id(self):
         return "xos.%s" % self.__class__.__name__.lower()
 
+
 class FakeExtensionManager(object):
     def __init__(self, obj, extensions):
         self.obj = obj
@@ -97,11 +97,13 @@
             return self.extensions[name]
         return default
 
+
 class FakeFieldOption(object):
     def __init__(self, modelName=None, reverseFieldName=None):
         self.modelName = modelName
         self.reverseFieldName = reverseFieldName
 
+
 class FakeField(object):
     def __init__(self, field):
         extensions = {}
@@ -112,7 +114,9 @@
         fk_model = field.get("fk_model", None)
         if fk_model:
             reverseFieldName = field.get("fk_reverseFieldName", None)
-            extensions["xos.foreignKey"] = FakeFieldOption(modelName=fk_model, reverseFieldName=reverseFieldName)
+            extensions["xos.foreignKey"] = FakeFieldOption(
+                modelName=fk_model, reverseFieldName=reverseFieldName
+            )
 
         fk_reverse = field.get("fk_reverse", None)
         if fk_reverse:
@@ -123,6 +127,7 @@
     def GetOptions(self):
         return self
 
+
 class FakeDescriptor(object):
     def __init__(self, objName):
         global ContentTypeIdCounter
@@ -144,101 +149,128 @@
         fbn = {}
         for field in cls.FIELDS:
             fake_field = FakeField(field)
-            fbn[ field["name"] ] = fake_field
+            fbn[field["name"]] = fake_field
 
         return fbn
 
+
 class Controller(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "name", "default": ""},
-               {"name": "deployment_id", "default": 0, "fk_model": "Deployment"},
-               {"name": "class_names", "default": "Controller"}
-             )
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "name", "default": ""},
+        {"name": "deployment_id", "default": 0, "fk_model": "Deployment"},
+        {"name": "class_names", "default": "Controller"},
+    )
 
     def __init__(self, **kwargs):
         return super(Controller, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("Controller")
 
+
 class Deployment(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "name", "default": ""},
-               {"name": "class_names", "default": "Deployment"}
-             )
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "name", "default": ""},
+        {"name": "class_names", "default": "Deployment"},
+    )
 
     def __init__(self, **kwargs):
         return super(Deployment, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("Controller")
 
+
 class User(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "email", "default": ""},
-               {"name": "site_id", "default": 0, "fk_model": "Site"},
-               {"name": "class_names", "default": "User"} )
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "email", "default": ""},
+        {"name": "site_id", "default": 0, "fk_model": "Site"},
+        {"name": "class_names", "default": "User"},
+    )
 
     def __init__(self, **kwargs):
         return super(User, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("User")
 
+
 class Slice(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "name", "default": ""},
-               {"name": "site_id", "default": 0, "fk_model": "Site", "fk_reverseFieldName": "slices"},
-               {"name": "service_id", "default": 0, "fk_model": "Service"},
-               {"name": "creator_id", "default": 0, "fk_model": "User"},
-               {"name": "networks_ids", "default": [], "fk_reverse": "Network"},
-               {"name": "network", "default": ""},
-               {"name": "leaf_model_name", "default": "Slice"},
-               {"name": "class_names", "default": "Slice"} )
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "name", "default": ""},
+        {
+            "name": "site_id",
+            "default": 0,
+            "fk_model": "Site",
+            "fk_reverseFieldName": "slices",
+        },
+        {"name": "service_id", "default": 0, "fk_model": "Service"},
+        {"name": "creator_id", "default": 0, "fk_model": "User"},
+        {"name": "networks_ids", "default": [], "fk_reverse": "Network"},
+        {"name": "network", "default": ""},
+        {"name": "leaf_model_name", "default": "Slice"},
+        {"name": "class_names", "default": "Slice"},
+    )
 
     def __init__(self, **kwargs):
         return super(Slice, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("Slice")
 
+
 class Site(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "name", "default": ""},
-               {"name": "login_base", "default": ""},
-               {"name": "slices_ids", "default": [], "fk_reverse": "Slice"},
-               {"name": "leaf_model_name", "default": "Site"},
-               {"name": "class_names", "default": "Site"})
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "name", "default": ""},
+        {"name": "login_base", "default": ""},
+        {"name": "slices_ids", "default": [], "fk_reverse": "Slice"},
+        {"name": "leaf_model_name", "default": "Site"},
+        {"name": "class_names", "default": "Site"},
+    )
 
     def __init__(self, **kwargs):
         return super(Site, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("Site")
 
+
 class Service(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "name", "default": ""},
-               {"name": "slices_ids", "default": [], "fk_reverse": "Slice"},
-               {"name": "leaf_model_name", "default": "Service"},
-               {"name": "class_names", "default": "Service"})
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "name", "default": ""},
+        {"name": "slices_ids", "default": [], "fk_reverse": "Slice"},
+        {"name": "leaf_model_name", "default": "Service"},
+        {"name": "class_names", "default": "Service"},
+    )
 
     def __init__(self, **kwargs):
         return super(Service, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("Service")
 
+
 class ServiceInstance(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "owher", "default": 0, "fk_model": "Service"},
-               {"name": "leaf_model_name", "default": "ServiceInstance"},
-               {"name": "class_names", "default": "ServiceInstance"})
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "owher", "default": 0, "fk_model": "Service"},
+        {"name": "leaf_model_name", "default": "ServiceInstance"},
+        {"name": "class_names", "default": "ServiceInstance"},
+    )
 
     def __init__(self, **kwargs):
         return super(ServiceInstance, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("ServiceInstance")
 
+
 class ONOSService(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "name", "default": ""},
-               {"name": "leaf_model_name", "default": "ONOSService"},
-               {"name": "class_names", "default": "ONOSService,Service"})
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "name", "default": ""},
+        {"name": "leaf_model_name", "default": "ONOSService"},
+        {"name": "class_names", "default": "ONOSService,Service"},
+    )
 
     BASES = ["Service"]
 
@@ -247,89 +279,119 @@
 
     DESCRIPTOR = FakeDescriptor("ONOSService")
 
+
 class Network(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "name", "default": ""},
-               {"name": "owner_id", "default": 0, "fk_model": "Slice"},
-               {"name": "template_id", "default": 0, "fk_model": "NetworkTemplate"},
-               {"name": "controllernetworks_ids", "default": [], "fk_reverse": "ControllerNetwork"},
-               {"name": "leaf_model_name", "default": "Network"},
-               {"name": "class_names", "default": "Network"})
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "name", "default": ""},
+        {"name": "owner_id", "default": 0, "fk_model": "Slice"},
+        {"name": "template_id", "default": 0, "fk_model": "NetworkTemplate"},
+        {
+            "name": "controllernetworks_ids",
+            "default": [],
+            "fk_reverse": "ControllerNetwork",
+        },
+        {"name": "leaf_model_name", "default": "Network"},
+        {"name": "class_names", "default": "Network"},
+    )
 
     def __init__(self, **kwargs):
         return super(Network, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("Network")
 
+
 class NetworkTemplate(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "name", "default": ""},
-               {"name": "vtn_kind", "default": ""},
-               {"name": "leaf_model_name", "default": "NetworkTemplate"},
-               {"name": "class_names", "default": "NetworkTemplate"})
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "name", "default": ""},
+        {"name": "vtn_kind", "default": ""},
+        {"name": "leaf_model_name", "default": "NetworkTemplate"},
+        {"name": "class_names", "default": "NetworkTemplate"},
+    )
 
     def __init__(self, **kwargs):
         return super(NetworkTemplate, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("NetworkTemplate")
 
+
 class ControllerNetwork(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "network_id", "default": 0, "fk_model": "Network"},
-               {"name": "controller_id", "default": 0, "fk_model": "Controller"},
-               {"name": "leaf_model_name", "default": "ControllerNetwork"},
-               {"name": "class_names", "default": "ControllerNetwork"})
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "network_id", "default": 0, "fk_model": "Network"},
+        {"name": "controller_id", "default": 0, "fk_model": "Controller"},
+        {"name": "leaf_model_name", "default": "ControllerNetwork"},
+        {"name": "class_names", "default": "ControllerNetwork"},
+    )
 
     def __init__(self, **kwargs):
         return super(ControllerNetwork, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("ControllerNetwork")
 
+
 class NetworkSlice(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "network_id", "default": 0, "fk_model": "Network"},
-               {"name": "slice_id", "default": 0, "fk_model": "Slice"},
-               {"name": "leaf_model_name", "default": "NetworkSlice"},
-               {"name": "class_names", "default": "NetworkSlice"})
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "network_id", "default": 0, "fk_model": "Network"},
+        {"name": "slice_id", "default": 0, "fk_model": "Slice"},
+        {"name": "leaf_model_name", "default": "NetworkSlice"},
+        {"name": "class_names", "default": "NetworkSlice"},
+    )
 
     def __init__(self, **kwargs):
         return super(NetworkSlice, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("NetworkSlice")
 
+
 class Tag(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "service_id", "default": None},
-               {"name": "name", "default": ""},
-               {"name": "value", "default": ""},
-               {"name": "content_type", "default": None},
-               {"name": "object_id", "default": None},
-               {"name": "leaf_model_name", "default": "Tag"},
-               {"name": "class_names", "default": "Tag"})
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "service_id", "default": None},
+        {"name": "name", "default": ""},
+        {"name": "value", "default": ""},
+        {"name": "content_type", "default": None},
+        {"name": "object_id", "default": None},
+        {"name": "leaf_model_name", "default": "Tag"},
+        {"name": "class_names", "default": "Tag"},
+    )
 
     def __init__(self, **kwargs):
         return super(Tag, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("Tag")
 
+
 class TestModel(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "intfield", "default": 0},
-               {"name": "stringfield", "default": "somestring"},
-               {"name": "testmodeltwos_ids", "default": [], "fk_reverse": "TestModelTwo"},
-               {"name": "class_names", "default": "TestModel"} )
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "intfield", "default": 0},
+        {"name": "stringfield", "default": "somestring"},
+        {"name": "testmodeltwos_ids", "default": [], "fk_reverse": "TestModelTwo"},
+        {"name": "class_names", "default": "TestModel"},
+    )
 
     def __init__(self, **kwargs):
         return super(TestModel, self).__init__(self.FIELDS, **kwargs)
 
     DESCRIPTOR = FakeDescriptor("TestModel")
 
+
 class TestModelTwo(FakeObj):
-    FIELDS = ( {"name": "id", "default": 0},
-               {"name": "intfieldtwo", "default": 0},
-               {"name": "stringfieldtwo", "default": "somestringtwo"},
-               {"name": "testmodel_id", "default": 0, "fk_model": "TestModel", "fk_reverseFieldName": "testmodeltwos"},
-               {"name": "class_names", "default": "TestModel"})
+    FIELDS = (
+        {"name": "id", "default": 0},
+        {"name": "intfieldtwo", "default": 0},
+        {"name": "stringfieldtwo", "default": "somestringtwo"},
+        {
+            "name": "testmodel_id",
+            "default": 0,
+            "fk_model": "TestModel",
+            "fk_reverseFieldName": "testmodeltwos",
+        },
+        {"name": "class_names", "default": "TestModel"},
+    )
 
     def __init__(self, **kwargs):
         return super(TestModelTwo, self).__init__(self.FIELDS, **kwargs)
@@ -340,23 +402,26 @@
 class ID(FakeObj):
     pass
 
+
 class FakeItemList(object):
     def __init__(self, items):
         self.items = items
 
+
 class FakeElement(object):
-    EQUAL="equal"
-    IEXACT="iexact"
+    EQUAL = "equal"
+    IEXACT = "iexact"
 
     def __init__(self):
         pass
 
+
 class FakeElements(object):
     def __init__(self):
         self.items = []
 
     def add(self):
-        el=FakeElement()
+        el = FakeElement()
         self.items.append(el)
         return el
 
@@ -366,8 +431,9 @@
     def __len__(self):
         return len(self.items)
 
+
 class FakeQuery(object):
-    DEFAULT=0
+    DEFAULT = 0
     ALL = 1
     SYNCHRONIZER_DIRTY_OBJECTS = 2
     SYNCHRONIZER_DELETED_OBJECTS = 3
@@ -377,14 +443,29 @@
     def __init__(self):
         self.elements = FakeElements()
 
+
 class FakeStub(object):
     def __init__(self):
         self.id_counter = 1
         self.objs = {}
         self.deleted_objs = {}
-        for name in ["Controller", "Deployment", "Slice", "Site", "Tag", "Service", "ServiceInstance", "ONOSService",
-                     "User", "Network", "NetworkTemplate", "ControllerNetwork", "NetworkSlice",
-                     "TestModel", "TestModelTwo"]:
+        for name in [
+            "Controller",
+            "Deployment",
+            "Slice",
+            "Site",
+            "Tag",
+            "Service",
+            "ServiceInstance",
+            "ONOSService",
+            "User",
+            "Network",
+            "NetworkTemplate",
+            "ControllerNetwork",
+            "NetworkSlice",
+            "TestModel",
+            "TestModelTwo",
+        ]:
             setattr(self, "Get%s" % name, functools.partial(self.get, name))
             setattr(self, "List%s" % name, functools.partial(self.list, name))
             setattr(self, "Create%s" % name, functools.partial(self.create, name))
@@ -392,7 +473,6 @@
             setattr(self, "Update%s" % name, functools.partial(self.update, name))
             setattr(self, "Filter%s" % name, functools.partial(self.filter, name))
 
-
     def make_key(self, name, id):
         return "%s:%d" % (name, id.id)
 
@@ -402,10 +482,10 @@
 
     def list(self, classname, empty, metadata=None):
         items = []
-        for (k,v) in self.objs.items():
+        for (k, v) in self.objs.items():
             (this_classname, id) = k.split(":")
             if this_classname == classname:
-                    items.append(v)
+                items.append(v)
         return FakeItemList(items)
 
     def filter(self, classname, query, metadata=None):
@@ -416,14 +496,14 @@
         else:
             objs = self.objs.items()
 
-        for (k,v) in objs:
+        for (k, v) in objs:
             (this_classname, id) = k.split(":")
             if this_classname != classname:
                 continue
             match = True
             for q in query.elements.items:
                 iValue = getattr(q, "iValue", None)
-                if (iValue is not None) and getattr(v,q.name)!=iValue:
+                if (iValue is not None) and getattr(v, q.name) != iValue:
                     match = False
                 sValue = getattr(q, "sValue", None)
                 if (sValue is not None) and getattr(v, q.name) != sValue:
@@ -440,7 +520,7 @@
 
         for base_classname in obj.BASES:
             base_class = globals()[base_classname]
-            base_obj = base_class(id=obj.id, leaf_model_name = classname)
+            base_obj = base_class(id=obj.id, leaf_model_name=classname)
             k = self.make_key(base_classname, base_obj)
             self.objs[k] = base_obj
 
@@ -458,26 +538,56 @@
         del self.objs[k]
         self.deleted_objs[k] = obj
 
+
 class FakeCommonProtos(object):
     def __init__(self):
         self.ID = ID
         self.Query = FakeQuery
 
+
 class FakeProtos(object):
     def __init__(self):
-        for name in ["Controller", "Deployment", "Slice", "Site", "ID", "Tag", "Service", "ServiceInstance",
-                     "ONOSService", "User", "Network", "NetworkTemplate", "ControllerNetwork", "NetworkSlice",
-                     "TestModel", "TestModelTwo"]:
+        for name in [
+            "Controller",
+            "Deployment",
+            "Slice",
+            "Site",
+            "ID",
+            "Tag",
+            "Service",
+            "ServiceInstance",
+            "ONOSService",
+            "User",
+            "Network",
+            "NetworkTemplate",
+            "ControllerNetwork",
+            "NetworkSlice",
+            "TestModel",
+            "TestModelTwo",
+        ]:
             setattr(self, name, globals()[name])
             self.common__pb2 = FakeCommonProtos()
 
+
 class FakeSymDb(object):
     def __init__(self):
         self._classes = {}
-        for name in ["Controller", "Deployment", "Slice", "Site", "ID", "Tag", "Service", "ServiceInstance",
-                     "ONOSService", "User", "Network", "NetworkTemplate", "ControllerNetwork", "NetworkSlice",
-                     "TestModel", "TestModelTwo"]:
+        for name in [
+            "Controller",
+            "Deployment",
+            "Slice",
+            "Site",
+            "ID",
+            "Tag",
+            "Service",
+            "ServiceInstance",
+            "ONOSService",
+            "User",
+            "Network",
+            "NetworkTemplate",
+            "ControllerNetwork",
+            "NetworkSlice",
+            "TestModel",
+            "TestModelTwo",
+        ]:
             self._classes["xos.%s" % name] = globals()[name]
-
-
-
diff --git a/xos/xos_client/xosapi/orm.py b/xos/xos_client/xosapi/orm.py
index 07144f5..398d199 100644
--- a/xos/xos_client/xosapi/orm.py
+++ b/xos/xos_client/xosapi/orm.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,6 +12,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
+import os
+import sys
+import threading
+import time
+import imp
+from xosconfig import Config
+from multistructlog import create_logger
 
 """
 Django-like ORM layer for gRPC
@@ -35,24 +42,20 @@
 u=c.xos_orm.User.objects.get(id=1)
 """
 
-import os
-import sys
-import threading
-import time
-import imp
-from xosconfig import Config
-from multistructlog import create_logger
 
-log = create_logger(Config().get('logging'))
+log = create_logger(Config().get("logging"))
 
 convenience_wrappers = {}
 
+
 class ORMGenericContentNotFoundException(Exception):
     pass
 
+
 class ORMGenericObjectNotFoundException(Exception):
     pass
 
+
 class ORMWrapper(object):
     """ Wraps a protobuf object to provide ORM features """
 
@@ -65,14 +68,14 @@
         super(ORMWrapper, self).__setattr__("dependent", None)
         super(ORMWrapper, self).__setattr__("is_new", is_new)
         super(ORMWrapper, self).__setattr__("post_save_fixups", [])
-        fkmap=self.gen_fkmap()
+        fkmap = self.gen_fkmap()
         super(ORMWrapper, self).__setattr__("_fkmap", fkmap)
-        reverse_fkmap=self.gen_reverse_fkmap()
+        reverse_fkmap = self.gen_reverse_fkmap()
         super(ORMWrapper, self).__setattr__("_reverse_fkmap", reverse_fkmap)
         super(ORMWrapper, self).__setattr__("_initial", self._dict)
 
-    def fields_differ(self,f1,f2):
-        return (f1 != f2)
+    def fields_differ(self, f1, f2):
+        return f1 != f2
 
     @property
     def _dict(self):
@@ -81,7 +84,7 @@
             This differs for the xos-core implementation of XOSBase. For new object, XOSBase will include field names
             that are set to default values. ORM ignores fields that are set to default values.
         """
-        d={}
+        d = {}
         for (fieldDesc, val) in self._wrapped_class.ListFields():
             name = fieldDesc.name
             d[name] = val
@@ -92,12 +95,12 @@
         d1 = self._initial
         d2 = self._dict
         all_field_names = self._wrapped_class.DESCRIPTOR.fields_by_name.keys()
-        diffs=[]
+        diffs = []
         for k in all_field_names:
-            if (d1.get(k,None) != d2.get(k,None)):
-                diffs.append( (k, (d1.get(k,None), d2.get(k,None))) )
+            if d1.get(k, None) != d2.get(k, None):
+                diffs.append((k, (d1.get(k, None), d2.get(k, None))))
 
-        #diffs = [(k, (v, d2[k])) for k, v in d1.items() if self.fields_differ(v,d2[k])]
+        # diffs = [(k, (v, d2[k])) for k, v in d1.items() if self.fields_differ(v,d2[k])]
         return dict(diffs)
 
     @property
@@ -129,7 +132,10 @@
             update_fields = self.changed_fields
             if always_update_timestamp and "updated" not in update_fields:
                 update_fields.append("updated")
-            self.save(update_fields=sorted(update_fields), always_update_timestamp=always_update_timestamp)
+            self.save(
+                update_fields=sorted(update_fields),
+                always_update_timestamp=always_update_timestamp,
+            )
 
     def create_attr(self, name, value=None):
         """ setattr(self, ...) will fail for attributes that don't exist in the
@@ -151,23 +157,37 @@
         all_field_names = self._wrapped_class.DESCRIPTOR.fields_by_name.keys()
 
         for (name, field) in self._wrapped_class.DESCRIPTOR.fields_by_name.items():
-           if name.endswith("_id"):
-               foreignKey = field.GetOptions().Extensions._FindExtensionByName("xos.foreignKey")
-               fk = field.GetOptions().Extensions[foreignKey]
-               if fk and fk.modelName:
-                   fkdict = {"src_fieldName": name, "modelName": fk.modelName, "kind": "fk"}
-                   if fk.reverseFieldName:
-                       fkdict["reverse_fieldName"] = fk.reverseFieldName
-                   fkmap[name[:-3]] = fkdict
-               else:
-                   # If there's a corresponding _type_id field, then see if this
-                   # is a generic foreign key.
-                   type_name = name[:-3] + "_type_id"
-                   if type_name in all_field_names:
-                       fkmap[name[:-3]] = {"src_fieldName": name, "ct_fieldName": type_name, "kind": "generic_fk"}
+            if name.endswith("_id"):
+                foreignKey = field.GetOptions().Extensions._FindExtensionByName(
+                    "xos.foreignKey"
+                )
+                fk = field.GetOptions().Extensions[foreignKey]
+                if fk and fk.modelName:
+                    fkdict = {
+                        "src_fieldName": name,
+                        "modelName": fk.modelName,
+                        "kind": "fk",
+                    }
+                    if fk.reverseFieldName:
+                        fkdict["reverse_fieldName"] = fk.reverseFieldName
+                    fkmap[name[:-3]] = fkdict
+                else:
+                    # If there's a corresponding _type_id field, then see if this
+                    # is a generic foreign key.
+                    type_name = name[:-3] + "_type_id"
+                    if type_name in all_field_names:
+                        fkmap[name[:-3]] = {
+                            "src_fieldName": name,
+                            "ct_fieldName": type_name,
+                            "kind": "generic_fk",
+                        }
 
         for gfk in self.get_generic_foreignkeys():
-            fkmap[gfk["name"]] = {"src_fieldName": gfk["id"], "ct_fieldName": gfk["content_type"], "kind": "generic_fk"}
+            fkmap[gfk["name"]] = {
+                "src_fieldName": gfk["id"],
+                "ct_fieldName": gfk["content_type"],
+                "kind": "generic_fk",
+            }
 
         return fkmap
 
@@ -175,17 +195,28 @@
         reverse_fkmap = {}
 
         for (name, field) in self._wrapped_class.DESCRIPTOR.fields_by_name.items():
-           if name.endswith("_ids"):
-               reverseForeignKey = field.GetOptions().Extensions._FindExtensionByName("xos.reverseForeignKey")
-               fk = field.GetOptions().Extensions[reverseForeignKey]
-               if fk and fk.modelName:
-                   reverse_fkmap[name[:-4]] = {"src_fieldName": name, "modelName": fk.modelName, "writeable": False}
-               else:
-                   manyToManyForeignKey = field.GetOptions().Extensions._FindExtensionByName("xos.manyToManyForeignKey")
-                   fk = field.GetOptions().Extensions[manyToManyForeignKey]
-                   if fk and fk.modelName:
-                       reverse_fkmap[name[:-4]] = {"src_fieldName": name, "modelName": fk.modelName, "writeable": True}
-
+            if name.endswith("_ids"):
+                reverseForeignKey = field.GetOptions().Extensions._FindExtensionByName(
+                    "xos.reverseForeignKey"
+                )
+                fk = field.GetOptions().Extensions[reverseForeignKey]
+                if fk and fk.modelName:
+                    reverse_fkmap[name[:-4]] = {
+                        "src_fieldName": name,
+                        "modelName": fk.modelName,
+                        "writeable": False,
+                    }
+                else:
+                    manyToManyForeignKey = field.GetOptions().Extensions._FindExtensionByName(
+                        "xos.manyToManyForeignKey"
+                    )
+                    fk = field.GetOptions().Extensions[manyToManyForeignKey]
+                    if fk and fk.modelName:
+                        reverse_fkmap[name[:-4]] = {
+                            "src_fieldName": name,
+                            "modelName": fk.modelName,
+                            "writeable": True,
+                        }
 
         return reverse_fkmap
 
@@ -200,12 +231,14 @@
         if not fk_id:
             return None
 
-        if fk_kind=="fk":
-            id=self.stub.make_ID(id=fk_id)
+        if fk_kind == "fk":
+            id = self.stub.make_ID(id=fk_id)
             dest_model = self.stub.invoke("Get%s" % fk_entry["modelName"], id)
 
-        elif fk_kind=="generic_fk":
-            dest_model = self.stub.genericForeignKeyResolve(getattr(self, fk_entry["ct_fieldName"]), fk_id)._wrapped_class
+        elif fk_kind == "generic_fk":
+            dest_model = self.stub.genericForeignKeyResolve(
+                getattr(self, fk_entry["ct_fieldName"]), fk_id
+            )._wrapped_class
 
         else:
             raise Exception("unknown fk_kind")
@@ -218,7 +251,12 @@
     def reverse_fk_resolve(self, name):
         if name not in self.reverse_cache:
             fk_entry = self._reverse_fkmap[name]
-            self.reverse_cache[name] = ORMLocalObjectManager(self.stub, fk_entry["modelName"], getattr(self, fk_entry["src_fieldName"]), fk_entry["writeable"])
+            self.reverse_cache[name] = ORMLocalObjectManager(
+                self.stub,
+                fk_entry["modelName"],
+                getattr(self, fk_entry["src_fieldName"]),
+                fk_entry["writeable"],
+            )
 
         return self.reverse_cache[name]
 
@@ -231,29 +269,41 @@
             id = 0
         setattr(self._wrapped_class, fk_entry["src_fieldName"], id)
 
-        if fk_kind=="generic_fk":
-            setattr(self._wrapped_class, fk_entry["ct_fieldName"], model.self_content_type_id)
+        if fk_kind == "generic_fk":
+            setattr(
+                self._wrapped_class,
+                fk_entry["ct_fieldName"],
+                model.self_content_type_id,
+            )
 
         if name in self.cache:
             old_model = self.cache[name]
             if fk_entry.get("reverse_fieldName"):
                 # Note this fk change so that we can update the destination model after we save.
-                self.post_save_fixups.append({"src_fieldName": fk_entry["src_fieldName"],
-                                              "dest_id": id,
-                                              "dest_model": old_model,
-                                              "remove": True,
-                                              "reverse_fieldName": fk_entry.get("reverse_fieldName")})
+                self.post_save_fixups.append(
+                    {
+                        "src_fieldName": fk_entry["src_fieldName"],
+                        "dest_id": id,
+                        "dest_model": old_model,
+                        "remove": True,
+                        "reverse_fieldName": fk_entry.get("reverse_fieldName"),
+                    }
+                )
             del self.cache[name]
 
         if model:
             self.cache[name] = model
             if fk_entry.get("reverse_fieldName"):
                 # Note this fk change so that we can update the destination model after we save.
-                self.post_save_fixups.append({"src_fieldName": fk_entry["src_fieldName"],
-                                              "dest_id": id,
-                                              "dest_model": model,
-                                              "remove": False,
-                                              "reverse_fieldName": fk_entry.get("reverse_fieldName")})
+                self.post_save_fixups.append(
+                    {
+                        "src_fieldName": fk_entry["src_fieldName"],
+                        "dest_id": id,
+                        "dest_model": model,
+                        "remove": False,
+                        "reverse_fieldName": fk_entry.get("reverse_fieldName"),
+                    }
+                )
         elif name in self.cache:
             del self.cache[name]
 
@@ -284,7 +334,7 @@
         #       self.__dict__
 
         # pk is a synonym for id
-        if (name == "pk"):
+        if name == "pk":
             name = "id"
 
         if name in self._fkmap.keys():
@@ -309,7 +359,7 @@
         if name in self._fkmap.keys():
             self.fk_set(name, value)
         elif name in self.__dict__:
-            super(ORMWrapper,self).__setattr__(name, value)
+            super(ORMWrapper, self).__setattr__(name, value)
         elif value is None:
             # When handling requests, XOS interprets gRPC HasField(<fieldname>)==False to indicate that the caller
             # has not set the field and wants it to continue to use its existing (or default) value. That leaves us
@@ -340,7 +390,7 @@
         return self._wrapped_class.__repr__()
 
     def dump(self):
-        print self.dumpstr()
+        print(self.dumpstr())
 
     def invalidate_cache(self, name=None):
         if name:
@@ -352,22 +402,34 @@
             self.cache.clear()
             self.reverse_cache.clear()
 
-    def save(self, update_fields=None, always_update_timestamp=False, is_sync_save=False, is_policy_save=False):
+    def save(
+        self,
+        update_fields=None,
+        always_update_timestamp=False,
+        is_sync_save=False,
+        is_policy_save=False,
+    ):
         if self.is_new:
-           new_class = self.stub.invoke("Create%s" % self._wrapped_class.__class__.__name__, self._wrapped_class)
-           self._wrapped_class = new_class
-           self.is_new = False
+            new_class = self.stub.invoke(
+                "Create%s" % self._wrapped_class.__class__.__name__, self._wrapped_class
+            )
+            self._wrapped_class = new_class
+            self.is_new = False
         else:
-           metadata = []
-           if update_fields:
-               metadata.append( ("update_fields", ",".join(update_fields)) )
-           if always_update_timestamp:
-               metadata.append( ("always_update_timestamp", "1") )
-           if is_policy_save:
-               metadata.append( ("is_policy_save", "1") )
-           if is_sync_save:
-               metadata.append( ("is_sync_save", "1") )
-           self.stub.invoke("Update%s" % self._wrapped_class.__class__.__name__, self._wrapped_class, metadata=metadata)
+            metadata = []
+            if update_fields:
+                metadata.append(("update_fields", ",".join(update_fields)))
+            if always_update_timestamp:
+                metadata.append(("always_update_timestamp", "1"))
+            if is_policy_save:
+                metadata.append(("is_policy_save", "1"))
+            if is_sync_save:
+                metadata.append(("is_sync_save", "1"))
+            self.stub.invoke(
+                "Update%s" % self._wrapped_class.__class__.__name__,
+                self._wrapped_class,
+                metadata=metadata,
+            )
         self.do_post_save_fixups()
 
         # Now that object has saved, reset our initial state for diff calculation
@@ -379,8 +441,8 @@
 
     def tologdict(self):
         try:
-            d = {'model_name':self._wrapped_class.__class__.__name__, 'pk': self.pk}
-        except:
+            d = {"model_name": self._wrapped_class.__class__.__name__, "pk": self.pk}
+        except BaseException:
             d = {}
 
         return d
@@ -402,16 +464,19 @@
     def ansible_tag(self):
         return "%s_%s" % (self._wrapped_class.__class__.__name__, self.id)
 
+
 class ORMQuerySet(list):
     """ Makes lists look like django querysets """
+
     def first(self):
-        if len(self)>0:
+        if len(self) > 0:
             return self[0]
         else:
             return None
 
     def exists(self):
-        return len(self)>0
+        return len(self) > 0
+
 
 class ORMLocalObjectManager(object):
     """ Manages a local list of objects """
@@ -429,7 +494,9 @@
 
         models = []
         for id in self._idList:
-            models.append(self._stub.invoke("Get%s" % self._modelName, self._stub.make_ID(id=id)))
+            models.append(
+                self._stub.invoke("Get%s" % self._modelName, self._stub.make_ID(id=id))
+            )
 
         self._cache = models
 
@@ -437,17 +504,22 @@
 
     def all(self):
         models = self.resolve_queryset()
-        return [make_ORMWrapper(x,self._stub) for x in models]
+        return [make_ORMWrapper(x, self._stub) for x in models]
 
     def exists(self):
-        return len(self._idList)>0
+        return len(self._idList) > 0
 
     def count(self):
         return len(self._idList)
 
     def first(self):
         if self._idList:
-            model = make_ORMWrapper(self._stub.invoke("Get%s" % self._modelName, self._stub.make_ID(id=self._idList[0])), self._stub)
+            model = make_ORMWrapper(
+                self._stub.invoke(
+                    "Get%s" % self._modelName, self._stub.make_ID(id=self._idList[0])
+                ),
+                self._stub,
+            )
             return model
         else:
             return None
@@ -478,6 +550,7 @@
 
         self._idList.remove(id)
 
+
 class ORMObjectManager(object):
     """ Manages a remote list of objects """
 
@@ -499,14 +572,16 @@
         return make_ORMWrapper(obj, self._stub)
 
     def wrap_list(self, obj):
-        result=[]
+        result = []
         for item in obj.items:
             result.append(make_ORMWrapper(item, self._stub))
         return ORMQuerySet(result)
 
     def all(self):
-        if (self._kind == self.DEFAULT):
-            return self.wrap_list(self._stub.invoke("List%s" % self._modelName, self._stub.make_empty()))
+        if self._kind == self.DEFAULT:
+            return self.wrap_list(
+                self._stub.invoke("List%s" % self._modelName, self._stub.make_empty())
+            )
         else:
             return self.filter()
 
@@ -557,29 +632,41 @@
     def get(self, **kwargs):
         if kwargs.keys() == ["id"]:
             # the fast and easy case, look it up by id
-            return self.wrap_single(self._stub.invoke("Get%s" % self._modelName, self._stub.make_ID(id=kwargs["id"])))
+            return self.wrap_single(
+                self._stub.invoke(
+                    "Get%s" % self._modelName, self._stub.make_ID(id=kwargs["id"])
+                )
+            )
         else:
             # the slightly more difficult case, filter and return the first item
             objs = self.filter(**kwargs)
             return objs[0]
 
     def new(self, **kwargs):
-        if (self._kind != ORMObjectManager.DEFAULT):
-            raise Exception("Creating objects is only supported by the DEFAULT object manager")
+        if self._kind != ORMObjectManager.DEFAULT:
+            raise Exception(
+                "Creating objects is only supported by the DEFAULT object manager"
+            )
 
         cls = self._stub.all_grpc_classes[self._modelName]
         o = make_ORMWrapper(cls(), self._stub, is_new=True)
-        for (k,v) in  kwargs.items():
+        for (k, v) in kwargs.items():
             setattr(o, k, v)
         o.recompute_initial()
         return o
 
+
 class ORMModelClass(object):
     def __init__(self, stub, model_name, package_name):
         self.model_name = model_name
         self._stub = stub
         self.objects = ORMObjectManager(stub, model_name, package_name)
-        self.deleted_objects = ORMObjectManager(stub, model_name, package_name, ORMObjectManager.SYNCHRONIZER_DELETED_OBJECTS)
+        self.deleted_objects = ORMObjectManager(
+            stub,
+            model_name,
+            package_name,
+            ORMObjectManager.SYNCHRONIZER_DELETED_OBJECTS,
+        )
 
     @property
     def __name__(self):
@@ -592,9 +679,19 @@
     def __call__(self, *args, **kwargs):
         return self.objects.new(*args, **kwargs)
 
+
 class ORMStub(object):
-    def __init__(self, stub, protos, package_name, invoker=None, caller_kind="grpcapi", empty = None,
-                 enable_backoff=True, restart_on_disconnect=False):
+    def __init__(
+        self,
+        stub,
+        protos,
+        package_name,
+        invoker=None,
+        caller_kind="grpcapi",
+        empty=None,
+        enable_backoff=True,
+        restart_on_disconnect=False,
+    ):
         self.grpc_stub = stub
         self.protos = protos
         self.common_protos = protos.common__pb2
@@ -612,56 +709,62 @@
         self._empty = empty
 
         for name in dir(stub):
-           if name.startswith("Get"):
-               model_name = name[3:]
-               setattr(self,model_name, ORMModelClass(self, model_name, package_name))
+            if name.startswith("Get"):
+                model_name = name[3:]
+                setattr(self, model_name, ORMModelClass(self, model_name, package_name))
 
-               self.all_model_names.append(model_name)
+                self.all_model_names.append(model_name)
 
-               grpc_class = getattr(self.protos, model_name)
-               self.all_grpc_classes[model_name] = grpc_class
+                grpc_class = getattr(self.protos, model_name)
+                self.all_grpc_classes[model_name] = grpc_class
 
-               ct = grpc_class.DESCRIPTOR.GetOptions().Extensions._FindExtensionByName("xos.contentTypeId")
-               if ct:
-                   ct = grpc_class.DESCRIPTOR.GetOptions().Extensions[ct]
-                   if ct:
-                       self.content_type_map[ct] = model_name
-                       self.reverse_content_type_map[model_name] = ct
+                ct = grpc_class.DESCRIPTOR.GetOptions().Extensions._FindExtensionByName(
+                    "xos.contentTypeId"
+                )
+                if ct:
+                    ct = grpc_class.DESCRIPTOR.GetOptions().Extensions[ct]
+                    if ct:
+                        self.content_type_map[ct] = model_name
+                        self.reverse_content_type_map[model_name] = ct
 
     def genericForeignKeyResolve(self, content_type_id, id):
         if content_type_id.endswith("_decl"):
             content_type_id = content_type_id[:-5]
 
         if content_type_id not in self.content_type_map:
-            raise ORMGenericContentNotFoundException("Content_type %s not found in self.content_type_map" % content_type_id)
+            raise ORMGenericContentNotFoundException(
+                "Content_type %s not found in self.content_type_map" % content_type_id
+            )
 
         model_name = self.content_type_map[content_type_id]
 
         model = getattr(self, model_name)
         objs = model.objects.filter(id=id)
         if not objs:
-            raise ORMGenericObjectNotFoundException("Object %s of model %s was not found" % (id,model_name))
+            raise ORMGenericObjectNotFoundException(
+                "Object %s of model %s was not found" % (id, model_name)
+            )
 
         return model.objects.get(id=id)
 
     def add_default_metadata(self, metadata):
-        default_metadata = [ ("caller_kind", self.caller_kind) ]
+        default_metadata = [("caller_kind", self.caller_kind)]
 
         # introspect to see if we're running from a synchronizer thread
         if getattr(threading.current_thread(), "is_sync_thread", False):
-            default_metadata.append( ("is_sync_save", "1") )
+            default_metadata.append(("is_sync_save", "1"))
 
         # introspect to see if we're running from a model_policy thread
         if getattr(threading.current_thread(), "is_policy_thread", False):
-            default_metadata.append( ("is_policy_save", "1") )
+            default_metadata.append(("is_policy_save", "1"))
 
         # build up a list of metadata keys we already have
-        md_keys=[x[0] for x in metadata]
+        md_keys = [x[0] for x in metadata]
 
         # add any defaults that we don't already have
         for md in default_metadata:
             if md[0] not in md_keys:
-                metadata.append( (md[0], md[1]) )
+                metadata.append((md[0], md[1]))
 
     def invoke(self, name, request, metadata=[]):
         self.add_default_metadata(metadata)
@@ -669,17 +772,20 @@
         if self.invoker:
             # Hook in place to call Chameleon's invoke method, as soon as we
             # have rewritten the synchronizer to use reactor.
-            return self.invoker.invoke(self.grpc_stub.__class__, name, request, metadata={}).result[0]
+            return self.invoker.invoke(
+                self.grpc_stub.__class__, name, request, metadata={}
+            ).result[0]
         elif self.enable_backoff:
             # Our own retry mechanism. This works fine if there is a temporary
             # failure in connectivity, but does not re-download gRPC schema.
             import grpc
+
             backoff = [0.5, 1, 2, 4, 8]
             while True:
                 try:
                     method = getattr(self.grpc_stub, name)
                     return method(request, metadata=metadata)
-                except grpc._channel._Rendezvous, e:
+                except grpc._channel._Rendezvous as e:
                     code = e.code()
                     if code == grpc.StatusCode.UNAVAILABLE:
                         if self.restart_on_disconnect:
@@ -687,19 +793,18 @@
                             # the core is still serving up the same models it was when we established connectivity,
                             # so restart the synchronizer.
                             # TODO: Hash check on the core models to tell if something changed would be better.
-                            os.execv(sys.executable, ['python'] + sys.argv)
+                            os.execv(sys.executable, ["python"] + sys.argv)
                         if not backoff:
                             raise Exception("No more retries on %s" % name)
                         time.sleep(backoff.pop(0))
                     else:
                         raise
-                except:
+                except BaseException:
                     raise
         else:
             method = getattr(self.grpc_stub, name)
             return method(request, metadata=metadata)
 
-
     def make_ID(self, id):
         return getattr(self.common_protos, "ID")(id=id)
 
@@ -712,18 +817,20 @@
     def listObjects(self):
         return self.all_model_names
 
+
 def register_convenience_wrapper(class_name, wrapper):
     global convenience_wrappers
 
     convenience_wrappers[class_name] = wrapper
 
+
 def make_ORMWrapper(wrapped_class, *args, **kwargs):
     cls = None
 
     if (not cls) and wrapped_class.__class__.__name__ in convenience_wrappers:
         cls = convenience_wrappers[wrapped_class.__class__.__name__]
 
-    if (not cls):
+    if not cls:
         # Search the list of class names for this model to see if we have any applicable wrappers. The list is always
         # sorted from most specific to least specific, so the first one we find will automatically be the most relevant
         # one. If we don't find any, then default to ORMWrapper
@@ -737,11 +844,12 @@
             if name in convenience_wrappers:
                 cls = convenience_wrappers[name]
 
-    if (not cls):
+    if not cls:
         cls = ORMWrapper
 
     return cls(wrapped_class, *args, **kwargs)
 
+
 def import_convenience_methods():
 
     log.info("Loading convenience methods")
@@ -749,12 +857,13 @@
     cwd = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
     api_convenience_dir = os.path.join(cwd, "convenience")
     for file in os.listdir(api_convenience_dir):
-        if file.endswith(".py") and not "test" in file:
+        if file.endswith(".py") and "test" not in file:
             pathname = os.path.join(api_convenience_dir, file)
             try:
                 log.debug("Loading: %s" % file)
                 imp.load_source(file[:-3], pathname)
-            except Exception, e:
-                log.exception("Cannot import api convenience method for: %s, %s" % (file[:-3], pathname))
-
-
+            except Exception:
+                log.exception(
+                    "Cannot import api convenience method for: %s, %s"
+                    % (file[:-3], pathname)
+                )
diff --git a/xos/xos_client/xosapi/test_orm.py b/xos/xos_client/xosapi/test_orm.py
index 3eac9b2..88bf6d4 100644
--- a/xos/xos_client/xosapi/test_orm.py
+++ b/xos/xos_client/xosapi/test_orm.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,38 +12,41 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
 import exceptions
 import os
 import random
 import string
 import sys
 import unittest
-from mock import patch, ANY
+from mock import patch
 from StringIO import StringIO
 
 # by default, use fake stub rather than real core
-USE_FAKE_STUB=True
+USE_FAKE_STUB = True
 
-PARENT_DIR=os.path.join(os.path.dirname(__file__), "..")
+PARENT_DIR = os.path.join(os.path.dirname(__file__), "..")
+
 
 class TestORM(unittest.TestCase):
     def setUp(self):
         from xosconfig import Config
+
         test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
         config = os.path.join(test_path, "test_config.yaml")
         Config.clear()
-        Config.init(config, 'synchronizer-config-schema.yaml')
-        if (USE_FAKE_STUB):
+        Config.init(config, "synchronizer-config-schema.yaml")
+        if USE_FAKE_STUB:
             sys.path.append(PARENT_DIR)
 
         # Import these after config, in case they depend on config
         from xosapi.orm import ORMQuerySet, ORMLocalObjectManager
+
         self.ORMQuerySet = ORMQuerySet
         self.ORMLocalObjectManager = ORMLocalObjectManager
 
     def tearDown(self):
-        if (USE_FAKE_STUB):
+        if USE_FAKE_STUB:
             sys.path.remove(PARENT_DIR)
 
     def make_coreapi(self):
@@ -55,7 +57,13 @@
             xosapi.orm.import_convenience_methods()
 
             stub = FakeStub()
-            api = xosapi.orm.ORMStub(stub=stub, package_name = "xos", protos=FakeProtos(), empty = FakeObj, enable_backoff = False)
+            api = xosapi.orm.ORMStub(
+                stub=stub,
+                package_name="xos",
+                protos=FakeProtos(),
+                empty=FakeObj,
+                enable_backoff=False,
+            )
             return api
         else:
             return xos_grpc_client.coreapi
@@ -94,7 +102,7 @@
         orm = self.make_coreapi()
         s = orm.Slice()
         self.assertNotEqual(s, None)
-        self.assertEqual(s.dumpstr(), '')
+        self.assertEqual(s.dumpstr(), "")
 
     def test_dump(self):
         """ dump() is like dumpstr() but prints to stdout. Mock stdout by using a stringIO. """
@@ -117,7 +125,7 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        got_site = orm.Site.objects.get(id = site.id)
+        got_site = orm.Site.objects.get(id=site.id)
         self.assertNotEqual(got_site, None)
         self.assertEqual(got_site.id, site.id)
 
@@ -136,7 +144,6 @@
 
     def test_save_new(self):
         orm = self.make_coreapi()
-        orig_len_sites = len(orm.Site.objects.all())
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
@@ -149,12 +156,12 @@
         self.assertTrue(site.id > 0)
 
         # there should be one new site
-        self.assertEqual(len(orm.Site.objects.all()), orig_len_sites+1)
+        self.assertEqual(len(orm.Site.objects.all()), orig_len_sites + 1)
 
         # retrieve the site, and update it
         created_site_id = site.id
         site = orm.Site.objects.get(id=created_site_id)
-        site.name="mysitetwo"
+        site.name = "mysitetwo"
         site.save()
 
         # the site_id should not have changed
@@ -182,7 +189,7 @@
         site = orm.Site(name="mysite")
         site.save()
         sites = orm.Site.objects.all()
-        self.assertEqual(len(sites), orig_len_sites+1)
+        self.assertEqual(len(sites), orig_len_sites + 1)
 
     def test_objects_first(self):
         orm = self.make_coreapi()
@@ -193,19 +200,25 @@
 
     def test_content_type_map(self):
         orm = self.make_coreapi()
-        self.assertTrue( "Slice" in orm.content_type_map.values() )
-        self.assertTrue( "Site" in orm.content_type_map.values() )
-        self.assertTrue( "Tag" in orm.content_type_map.values() )
+        self.assertTrue("Slice" in orm.content_type_map.values())
+        self.assertTrue("Site" in orm.content_type_map.values())
+        self.assertTrue("Tag" in orm.content_type_map.values())
 
     def test_foreign_key_get(self):
         orm = self.make_coreapi()
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        user = orm.User(email="fake_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)), site_id=site.id)
+        user = orm.User(
+            email="fake_"
+            + "".join(
+                random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+            ),
+            site_id=site.id,
+        )
         user.save()
         self.assertTrue(user.id > 0)
-        slice = orm.Slice(name="mysite_foo", site_id = site.id, creator_id = user.id)
+        slice = orm.Slice(name="mysite_foo", site_id=site.id, creator_id=user.id)
         slice.save()
         self.assertTrue(slice.id > 0)
         self.assertNotEqual(slice.site, None)
@@ -216,10 +229,16 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        user = orm.User(email="fake_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)), site_id=site.id)
+        user = orm.User(
+            email="fake_"
+            + "".join(
+                random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+            ),
+            site_id=site.id,
+        )
         user.save()
         self.assertTrue(user.id > 0)
-        slice = orm.Slice(name="mysite_foo", site = site, creator_id=user.id)
+        slice = orm.Slice(name="mysite_foo", site=site, creator_id=user.id)
         slice.save()
         slice.invalidate_cache()
         self.assertTrue(slice.id > 0)
@@ -233,10 +252,16 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        user = orm.User(email="fake_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)), site_id=site.id)
+        user = orm.User(
+            email="fake_"
+            + "".join(
+                random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+            ),
+            site_id=site.id,
+        )
         user.save()
         self.assertTrue(user.id > 0)
-        slice = orm.Slice(name="mysite_foo", site = site, creator_id=user.id)
+        slice = orm.Slice(name="mysite_foo", site=site, creator_id=user.id)
         slice.save()
         self.assertTrue(slice.id > 0)
         self.assertNotEqual(slice.site, None)
@@ -251,10 +276,16 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        user = orm.User(email="fake_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)), site_id=site.id)
+        user = orm.User(
+            email="fake_"
+            + "".join(
+                random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+            ),
+            site_id=site.id,
+        )
         user.save()
         self.assertTrue(user.id > 0)
-        slice = orm.Slice(name="mysite_foo", site = site, creator_id=user.id)
+        slice = orm.Slice(name="mysite_foo", site=site, creator_id=user.id)
         slice.save()
         self.assertTrue(slice.id > 0)
         self.assertNotEqual(slice.site, None)
@@ -284,10 +315,16 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        user = orm.User(email="fake_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)), site_id=site.id)
+        user = orm.User(
+            email="fake_"
+            + "".join(
+                random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+            ),
+            site_id=site.id,
+        )
         user.save()
         self.assertTrue(user.id > 0)
-        slice = orm.Slice(name="mysite_foo", site = site, creator_id=user.id)
+        slice = orm.Slice(name="mysite_foo", site=site, creator_id=user.id)
         slice.save()
         self.assertTrue(slice.id > 0)
         self.assertNotEqual(slice.site, None)
@@ -316,10 +353,16 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        user = orm.User(email="fake_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)), site_id=site.id)
+        user = orm.User(
+            email="fake_"
+            + "".join(
+                random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+            ),
+            site_id=site.id,
+        )
         user.save()
         self.assertTrue(user.id > 0)
-        slice = orm.Slice(name="mysite_foo", site = site, creator_id=user.id)
+        slice = orm.Slice(name="mysite_foo", site=site, creator_id=user.id)
         slice.save()
         self.assertTrue(slice.id > 0)
         self.assertNotEqual(slice.site, None)
@@ -349,10 +392,18 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        user = orm.User(email="fake_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)), site_id=site.id)
+        user = orm.User(
+            email="fake_"
+            + "".join(
+                random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+            ),
+            site_id=site.id,
+        )
         user.save()
         self.assertTrue(user.id > 0)
-        slice = orm.Slice(name="mysite_foo", site = site, service=None, creator_id=user.id)
+        slice = orm.Slice(
+            name="mysite_foo", site=site, service=None, creator_id=user.id
+        )
         slice.save()
         slice.invalidate_cache()
         self.assertTrue(slice.id > 0)
@@ -363,14 +414,22 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        user = orm.User(email="fake_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)), site_id=site.id)
+        user = orm.User(
+            email="fake_"
+            + "".join(
+                random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+            ),
+            site_id=site.id,
+        )
         user.save()
         self.assertTrue(user.id > 0)
         service = orm.Service(name="myservice")
         service.save()
         self.assertTrue(service.id > 0)
         # start out slice.service is non-None
-        slice = orm.Slice(name="mysite_foo", site = site, service=service, creator_id=user.id)
+        slice = orm.Slice(
+            name="mysite_foo", site=site, service=service, creator_id=user.id
+        )
         slice.save()
         slice.invalidate_cache()
         self.assertTrue(slice.id > 0)
@@ -382,7 +441,6 @@
         slice.invalidate_cache()
         self.assertEqual(slice.service, None)
 
-
     def test_generic_foreign_key_get(self):
         orm = self.make_coreapi()
         service = orm.Service(name="myservice")
@@ -390,7 +448,13 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        tag = orm.Tag(service=service, name="mytag", value="somevalue", content_type=site.self_content_type_id, object_id=site.id)
+        tag = orm.Tag(
+            service=service,
+            name="mytag",
+            value="somevalue",
+            content_type=site.self_content_type_id,
+            object_id=site.id,
+        )
         tag.save()
         self.assertTrue(tag.id > 0)
         self.assertNotEqual(tag.content_object, None)
@@ -403,7 +467,13 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        tag = orm.Tag(service=service, name="mytag", value="somevalue", content_type=site.self_content_type_id + "_decl", object_id=site.id)
+        tag = orm.Tag(
+            service=service,
+            name="mytag",
+            value="somevalue",
+            content_type=site.self_content_type_id + "_decl",
+            object_id=site.id,
+        )
         tag.save()
         self.assertTrue(tag.id > 0)
         self.assertNotEqual(tag.content_object, None)
@@ -416,12 +486,21 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        tag = orm.Tag(service=service, name="mytag", value="somevalue", content_type="does_not_exist", object_id=site.id)
+        tag = orm.Tag(
+            service=service,
+            name="mytag",
+            value="somevalue",
+            content_type="does_not_exist",
+            object_id=site.id,
+        )
         tag.save()
         self.assertTrue(tag.id > 0)
         with self.assertRaises(Exception) as e:
-            obj = tag.content_object
-        self.assertEqual(e.exception.message, "Content_type does_not_exist not found in self.content_type_map")
+
+            self.assertEqual(
+                e.exception.message,
+                "Content_type does_not_exist not found in self.content_type_map",
+            )
 
     def test_generic_foreign_key_get_bad_id(self):
         orm = self.make_coreapi()
@@ -430,12 +509,19 @@
         site = orm.Site(name="mysite")
         site.save()
         self.assertTrue(site.id > 0)
-        tag = orm.Tag(service=service, name="mytag", value="somevalue", content_type=site.self_content_type_id, object_id=4567)
+        tag = orm.Tag(
+            service=service,
+            name="mytag",
+            value="somevalue",
+            content_type=site.self_content_type_id,
+            object_id=4567,
+        )
         tag.save()
         self.assertTrue(tag.id > 0)
         with self.assertRaises(Exception) as e:
-            obj = tag.content_object
-        self.assertEqual(e.exception.message, "Object 4567 of model Site was not found")
+            self.assertEqual(
+                e.exception.message, "Object 4567 of model Site was not found"
+            )
 
     def test_generic_foreign_key_set(self):
         orm = self.make_coreapi()
@@ -513,12 +599,15 @@
         tm = orm.TestModel()
         with self.assertRaises(Exception) as e:
             tm.intfile = None
-        self.assertEqual(e.exception.message, "Setting a non-foreignkey field to None is not supported")
+        self.assertEqual(
+            e.exception.message,
+            "Setting a non-foreignkey field to None is not supported",
+        )
 
     def test_query_iexact(self):
         orm = self.make_coreapi()
         with patch.object(orm.grpc_stub, "FilterTestModel", autospec=True) as filter:
-            orm.TestModel.objects.filter(name__iexact = "foo")
+            orm.TestModel.objects.filter(name__iexact="foo")
             self.assertEqual(filter.call_count, 1)
             q = filter.call_args[0][0]
 
@@ -530,7 +619,7 @@
     def test_query_equal(self):
         orm = self.make_coreapi()
         with patch.object(orm.grpc_stub, "FilterTestModel", autospec=True) as filter:
-            orm.TestModel.objects.filter(name = "foo")
+            orm.TestModel.objects.filter(name="foo")
             self.assertEqual(filter.call_count, 1)
             q = filter.call_args[0][0]
 
@@ -559,8 +648,8 @@
 
         site.login_base = "bar"
 
-        self.assertEqual(site._dict, {'login_base': 'bar', 'name': 'mysite'})
-        self.assertEqual(site.diff, {'login_base': (None, 'bar')})
+        self.assertEqual(site._dict, {"login_base": "bar", "name": "mysite"})
+        self.assertEqual(site.diff, {"login_base": (None, "bar")})
         self.assertIn("name", site.changed_fields)
         self.assertIn("login_base", site.changed_fields)
         self.assertEqual(site.has_field_changed("name"), False)
@@ -582,8 +671,8 @@
 
         site.login_base = "bar"
 
-        self.assertEqual(site._dict, {'id': 1, 'login_base': 'bar', 'name': 'mysite'})
-        self.assertEqual(site.diff, {'login_base': ("foo", 'bar')})
+        self.assertEqual(site._dict, {"id": 1, "login_base": "bar", "name": "mysite"})
+        self.assertEqual(site.diff, {"login_base": ("foo", "bar")})
         self.assertIn("login_base", site.changed_fields)
         self.assertEqual(site.has_field_changed("name"), False)
         self.assertEqual(site.has_field_changed("login_base"), True)
@@ -598,7 +687,7 @@
 
         site.login_base = "bar"
 
-        self.assertEqual(site.diff, {'login_base': ("foo", 'bar')})
+        self.assertEqual(site.diff, {"login_base": ("foo", "bar")})
 
         site.save()
 
@@ -633,14 +722,18 @@
         testModel = orm.TestModel(intfield=7, stringfield="foo")
         testModel.save()
 
-        testModel.intfield=9
+        testModel.intfield = 9
 
-        with patch.object(orm.grpc_stub, "UpdateTestModel", wraps=orm.grpc_stub.UpdateTestModel) as update:
+        with patch.object(
+            orm.grpc_stub, "UpdateTestModel", wraps=orm.grpc_stub.UpdateTestModel
+        ) as update:
             testModel.save_changed_fields()
 
             self.assertEqual(update.call_count, 1)
             self.assertIn("metadata", update.call_args[1])
-            update_fields_arg = [x[1] for x in update.call_args[1]["metadata"] if x[0]=="update_fields"]
+            update_fields_arg = [
+                x[1] for x in update.call_args[1]["metadata"] if x[0] == "update_fields"
+            ]
             self.assertEqual(update_fields_arg, ["intfield"])
 
     def test_ORMWrapper_get_generic_foreignkeys(self):
@@ -658,11 +751,17 @@
 
         testModelTwo = orm.TestModelTwo()
 
-        self.assertDictEqual(testModelTwo.gen_fkmap(),
-                             {'testmodel': {'kind': 'fk',
-                                            'modelName': 'TestModel',
-                                            'reverse_fieldName': 'testmodeltwos',
-                                            'src_fieldName': 'testmodel_id'}})
+        self.assertDictEqual(
+            testModelTwo.gen_fkmap(),
+            {
+                "testmodel": {
+                    "kind": "fk",
+                    "modelName": "TestModel",
+                    "reverse_fieldName": "testmodeltwos",
+                    "src_fieldName": "testmodel_id",
+                }
+            },
+        )
 
     def test_ORMWrapper_gen_reverse_fkmap(self):
         """ TestModel includes a reverse relation back to TestModelTwo, and the reverse_fkmap should contain that
@@ -673,10 +772,16 @@
 
         testModel = orm.TestModel()
 
-        self.assertDictEqual(testModel.gen_reverse_fkmap(),
-                             {'testmodeltwos': {'modelName': 'TestModelTwo',
-                                                'src_fieldName': 'testmodeltwos_ids',
-                                                'writeable': False}})
+        self.assertDictEqual(
+            testModel.gen_reverse_fkmap(),
+            {
+                "testmodeltwos": {
+                    "modelName": "TestModelTwo",
+                    "src_fieldName": "testmodeltwos_ids",
+                    "writeable": False,
+                }
+            },
+        )
 
     def test_ORMWrapper_fk_resolve(self):
         """ If we create a TestModelTwo that has a foreign key reference to a TestModel, then calling fk_resolve should
@@ -716,7 +821,9 @@
         self.assertEqual(testModelTwos_resolved.count(), 1)
 
         # the reverse_cache should have been populated
-        self.assertIn(("testmodeltwos", testModelTwos_resolved), testModel.reverse_cache.items())
+        self.assertIn(
+            ("testmodeltwos", testModelTwos_resolved), testModel.reverse_cache.items()
+        )
 
     def test_ORMWrapper_fk_set(self):
         """ fk_set will set the testmodel field on TesTModelTwo to point to the TestModel. """
@@ -745,11 +852,15 @@
         # fake_stub.py doesn't populate the reverse relations for us, so force what the server would have done...
         testModel._wrapped_class.testmodeltwos_ids = [testModelTwo.id]
 
-        post_save_fixups = [{"src_fieldName": "testmodel",
-                             "dest_id": None, # this field appears to not be used...
-                             "dest_model": testModel,
-                             "remove": True,
-                             "reverse_fieldName": "testmodeltwos"}]
+        post_save_fixups = [
+            {
+                "src_fieldName": "testmodel",
+                "dest_id": None,  # this field appears to not be used...
+                "dest_model": testModel,
+                "remove": True,
+                "reverse_fieldName": "testmodeltwos",
+            }
+        ]
 
         testModelTwo.post_save_fixups = post_save_fixups
         testModelTwo.do_post_save_fixups()
@@ -771,25 +882,30 @@
         # the reverse relation. But let's be sure, in case someone fixes that.
         testModel._wrapped_class.testmodeltwos_ids = []
 
-        post_save_fixups = [{"src_fieldName": "testmodel",
-                             "dest_id": None, # this field appears to not be used...
-                             "dest_model": testModel,
-                             "remove": False,
-                             "reverse_fieldName": "testmodeltwos"}]
+        post_save_fixups = [
+            {
+                "src_fieldName": "testmodel",
+                "dest_id": None,  # this field appears to not be used...
+                "dest_model": testModel,
+                "remove": False,
+                "reverse_fieldName": "testmodeltwos",
+            }
+        ]
 
         testModelTwo.post_save_fixups = post_save_fixups
         testModelTwo.do_post_save_fixups()
 
         self.assertEqual(testModel._wrapped_class.testmodeltwos_ids, [testModelTwo.id])
 
-
     def test_ORMWrapper_tologdict(self):
         """ Tologdict contains the model name and id, used for structured logging """
         orm = self.make_coreapi()
 
         testModel = orm.TestModel(intfield=7, stringfile="foo")
 
-        self.assertDictEqual(testModel.tologdict(), {'model_name': 'TestModel', 'pk': 0})
+        self.assertDictEqual(
+            testModel.tologdict(), {"model_name": "TestModel", "pk": 0}
+        )
 
     def test_ORMWrapper_ansible_tag(self):
         """ Ansible_tag is used by old-style synchronizers. Deprecated. """
@@ -800,7 +916,6 @@
 
         self.assertEqual(testModel.ansible_tag, "TestModel_7")
 
-
     def test_deleted_objects_all(self):
         orm = self.make_coreapi()
         orig_len_sites = len(orm.Site.objects.all())
@@ -811,21 +926,23 @@
         sites = orm.Site.objects.all()
         self.assertEqual(len(sites), orig_len_sites)
         deleted_sites = orm.Site.deleted_objects.all()
-        self.assertEqual(len(deleted_sites), orig_len_deleted_sites+1)
+        self.assertEqual(len(deleted_sites), orig_len_deleted_sites + 1)
 
     def test_deleted_objects_filter(self):
         orm = self.make_coreapi()
-        with patch.object(orm.grpc_stub, "FilterTestModel", wraps=orm.grpc_stub.FilterTestModel) as filter:
+        with patch.object(
+            orm.grpc_stub, "FilterTestModel", wraps=orm.grpc_stub.FilterTestModel
+        ) as filter:
             foo = orm.TestModel(name="foo")
             foo.save()
             foo.delete()
 
             # There should be no live objects
-            objs = orm.TestModel.objects.filter(name = "foo")
+            objs = orm.TestModel.objects.filter(name="foo")
             self.assertEqual(len(objs), 0)
 
             # There should be one deleted object
-            deleted_objs = orm.TestModel.deleted_objects.filter(name = "foo")
+            deleted_objs = orm.TestModel.deleted_objects.filter(name="foo")
             self.assertEqual(len(deleted_objs), 1)
 
             # Two calls, one for when we checked live objects, the other for when we checked deleted objects
@@ -839,7 +956,7 @@
             self.assertEqual(q.elements[0].sValue, "foo")
 
     def test_ORMQuerySet_first_nonempty(self):
-        qs = self.ORMQuerySet([1,2,3])
+        qs = self.ORMQuerySet([1, 2, 3])
         self.assertEqual(qs.first(), 1)
 
     def test_ORMQuerySet_first_empty(self):
@@ -847,7 +964,7 @@
         self.assertEqual(qs.first(), None)
 
     def test_ORMQuerySet_exists_nonempty(self):
-        qs = self.ORMQuerySet([1,2,3])
+        qs = self.ORMQuerySet([1, 2, 3])
         self.assertEqual(qs.exists(), True)
 
     def test_ORMQuerySet_exists_empty(self):
@@ -923,6 +1040,7 @@
         lobjs.remove(t)
         self.assertEqual(lobjs.count(), 0)
 
+
 def main():
     global USE_FAKE_STUB
     global xos_grpc_client
@@ -941,16 +1059,17 @@
         # This assumes xos-client python library is installed, and a gRPC server
         # is available.
 
-        from twisted.internet import reactor
         from xosapi import xos_grpc_client
 
-        print "Using xos-client library and core server"
+        print("Using xos-client library and core server")
 
         def test_callback():
             try:
-                sys.argv = sys.argv[:1] # unittest does not like xos_grpc_client's command line arguments (TODO: find a cooperative approach)
+                sys.argv = sys.argv[
+                    :1
+                ]  # unittest does not like xos_grpc_client's command line arguments (TODO: find a cooperative approach)
                 unittest.main()
-            except exceptions.SystemExit, e:
+            except exceptions.SystemExit as e:
                 global exitStatus
                 exitStatus = e.code
 
@@ -958,5 +1077,6 @@
 
         sys.exit(exitStatus)
 
+
 if __name__ == "__main__":
     main()
diff --git a/xos/xos_client/xosapi/test_wrapper.py b/xos/xos_client/xosapi/test_wrapper.py
index cd185a7..fe4f9c1 100644
--- a/xos/xos_client/xosapi/test_wrapper.py
+++ b/xos/xos_client/xosapi/test_wrapper.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,11 +12,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
 import exceptions
 import os
 import random
-import shutil
 import string
 import sys
 import unittest
@@ -28,23 +26,25 @@
 # TODO: Investigate writing wrapper unit tests using mocks rather than using the ORM test framework
 
 # by default, use fake stub rather than real core
-USE_FAKE_STUB=True
+USE_FAKE_STUB = True
 
-PARENT_DIR=os.path.join(os.path.dirname(__file__), "..")
+PARENT_DIR = os.path.join(os.path.dirname(__file__), "..")
+
 
 class TestWrappers(unittest.TestCase):
     def setUp(self):
         from xosconfig import Config
+
         test_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
         config = os.path.join(test_path, "test_config.yaml")
         Config.clear()
-        Config.init(config, 'synchronizer-config-schema.yaml')
+        Config.init(config, "synchronizer-config-schema.yaml")
 
-        if (USE_FAKE_STUB):
+        if USE_FAKE_STUB:
             sys.path.append(PARENT_DIR)
 
     def tearDown(self):
-        if (USE_FAKE_STUB):
+        if USE_FAKE_STUB:
             sys.path.remove(PARENT_DIR)
 
     def make_coreapi(self):
@@ -55,7 +55,13 @@
             xosapi.orm.import_convenience_methods()
 
             stub = FakeStub()
-            api = xosapi.orm.ORMStub(stub=stub, package_name = "xos", protos=FakeProtos(), empty = FakeObj, enable_backoff = False)
+            api = xosapi.orm.ORMStub(
+                stub=stub,
+                package_name="xos",
+                protos=FakeProtos(),
+                empty=FakeObj,
+                enable_backoff=False,
+            )
             return api
         else:
             return xos_grpc_client.coreapi
@@ -64,23 +70,41 @@
         orm = self.make_coreapi()
         deployment = orm.Deployment(name="test_deployment")
         deployment.save()
-        controller = orm.Controller(name="test_controller", deployment_id = deployment.id)
+        controller = orm.Controller(name="test_controller", deployment_id=deployment.id)
         controller.save()
         site = orm.Site(name="testsite")
         site.save()
-        user = orm.User(email="fake_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)), site_id=site.id)
+        user = orm.User(
+            email="fake_"
+            + "".join(
+                random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+            ),
+            site_id=site.id,
+        )
         user.save()
         vsg_access_template = orm.NetworkTemplate(name="vsg_access", vtn_kind="VSG")
         vsg_access_template.save()
         service_one = orm.Service(name="service_one")
         service_one.save()
-        slice_one = orm.Slice(name="testsite_sliceone", service_id = service_one.id, site_id = site.id, creator_id = user.id, network = "noauto")
+        slice_one = orm.Slice(
+            name="testsite_sliceone",
+            service_id=service_one.id,
+            site_id=site.id,
+            creator_id=user.id,
+            network="noauto",
+        )
         slice_one.save()
-        network_one = orm.Network(name="testsite_sliceone_access", owner_id = slice_one.id, template_id = vsg_access_template.id)
+        network_one = orm.Network(
+            name="testsite_sliceone_access",
+            owner_id=slice_one.id,
+            template_id=vsg_access_template.id,
+        )
         network_one.save()
-        ns = orm.NetworkSlice(slice_id = slice_one.id, network_id = network_one.id)
+        ns = orm.NetworkSlice(slice_id=slice_one.id, network_id=network_one.id)
         ns.save()
-        cn_one = orm.ControllerNetwork(network_id = network_one.id, controller_id = controller.id)
+        cn_one = orm.ControllerNetwork(
+            network_id=network_one.id, controller_id=controller.id
+        )
         cn_one.save()
 
         if USE_FAKE_STUB:
@@ -100,18 +124,26 @@
         orm = self.make_coreapi()
         deployment = orm.Deployment(name="test_deployment")
         deployment.save()
-        controller = orm.Controller(name="test_controller", deployment_id = deployment.id)
+        controller = orm.Controller(name="test_controller", deployment_id=deployment.id)
         controller.save()
         site = orm.Site(name="testsite")
         site.save()
-        user = orm.User(email="fake_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)), site_id=site.id)
+        user = orm.User(
+            email="fake_"
+            + "".join(
+                random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+            ),
+            site_id=site.id,
+        )
         user.save()
         vsg_access_template = orm.NetworkTemplate(name="vsg_access", vtn_kind="VSG")
         vsg_access_template.save()
         service_one = orm.Service(name="service_one")
         service_one.save()
 
-        self.assertEqual(service_one.get_service_instance_class_name(), "ServiceInstance")
+        self.assertEqual(
+            service_one.get_service_instance_class_name(), "ServiceInstance"
+        )
 
     def test_service_get_service_instance_class(self):
         orm = self.make_coreapi()
@@ -122,15 +154,21 @@
         site = orm.Site(name="testsite")
         site.save()
         user = orm.User(
-            email="fake_" + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)),
-            site_id=site.id)
+            email="fake_"
+            + "".join(
+                random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
+            ),
+            site_id=site.id,
+        )
         user.save()
         vsg_access_template = orm.NetworkTemplate(name="vsg_access", vtn_kind="VSG")
         vsg_access_template.save()
         service_one = orm.Service(name="service_one")
         service_one.save()
 
-        self.assertEqual(service_one.get_service_instance_class().model_name, "ServiceInstance")
+        self.assertEqual(
+            service_one.get_service_instance_class().model_name, "ServiceInstance"
+        )
 
     def test_wrapper_from__class__dot_name(self):
         """ The Service model has a wrapper, so it should be returned when make_ORMWrapper looks for a wrapper based
@@ -148,6 +186,7 @@
         obj = orm.ONOSService()
         self.assertEqual(obj.__class__.__name__, "ORMWrapperService")
 
+
 def main():
     global USE_FAKE_STUB
     global xos_grpc_client
@@ -166,16 +205,17 @@
         # This assumes xos-client python library is installed, and a gRPC server
         # is available.
 
-        from twisted.internet import reactor
         from xosapi import xos_grpc_client
 
-        print "Using xos-client library and core server"
+        print("Using xos-client library and core server")
 
         def test_callback():
             try:
-                sys.argv = sys.argv[:1] # unittest does not like xos_grpc_client's command line arguments (TODO: find a cooperative approach)
+                sys.argv = sys.argv[
+                    :1
+                ]  # unittest does not like xos_grpc_client's command line arguments (TODO: find a cooperative approach)
                 unittest.main()
-            except exceptions.SystemExit, e:
+            except exceptions.SystemExit as e:
                 global exitStatus
                 exitStatus = e.code
 
@@ -183,5 +223,6 @@
 
         sys.exit(exitStatus)
 
+
 if __name__ == "__main__":
     main()
diff --git a/xos/xos_client/xosapi/version.py b/xos/xos_client/xosapi/version.py
index a118c43..2c84950 100644
--- a/xos/xos_client/xosapi/version.py
+++ b/xos/xos_client/xosapi/version.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os
-
 # This file will be replaced by setup.py
 __version__ = "unknown"
diff --git a/xos/xos_client/xosapi/xos_grpc_client.py b/xos/xos_client/xosapi/xos_grpc_client.py
index 47849da..13e969d 100644
--- a/xos/xos_client/xosapi/xos_grpc_client.py
+++ b/xos/xos_client/xosapi/xos_grpc_client.py
@@ -1,4 +1,3 @@
-
 # Copyright 2017-present Open Networking Foundation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,53 +12,64 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+from __future__ import print_function
 import argparse
 import base64
 import functools
 import grpc
 import orm
 import os
-import pdb
 import sys
-from google.protobuf.empty_pb2 import Empty
-from grpc import metadata_call_credentials, ChannelCredentials, composite_channel_credentials, ssl_channel_credentials
-
-# fix up sys.path for chameleon
-import inspect
-currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
-sys.path = [currentdir] + sys.path
-
-import chameleon.grpc_client.grpc_client as chameleon_client
 
 from twisted.internet import reactor
 from google.protobuf.empty_pb2 import Empty
+from grpc import (
+    metadata_call_credentials,
+    composite_channel_credentials,
+    ssl_channel_credentials,
+)
+
+# fix up sys.path for chameleon
+import inspect
+
+currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
+sys.path = [currentdir] + sys.path
 
 from xosconfig import Config
+import chameleon.grpc_client.grpc_client as chameleon_client
+
 from multistructlog import create_logger
+log = create_logger(Config().get("logging"))
 
-log = create_logger(Config().get('logging'))
+SERVER_CA = "/usr/local/share/ca-certificates/local_certs.crt"
 
-SERVER_CA="/usr/local/share/ca-certificates/local_certs.crt"
 
 class UsernamePasswordCallCredentials(grpc.AuthMetadataPlugin):
-  """Metadata wrapper for raw access token credentials."""
-  def __init__(self, username, password):
+    """Metadata wrapper for raw access token credentials."""
+
+    def __init__(self, username, password):
         self._username = username
         self._password = password
-  def __call__(self, context, callback):
-        basic_auth = "Basic %s" % base64.b64encode("%s:%s" % (self._username, self._password))
-        metadata = (('authorization', basic_auth),)
+
+    def __call__(self, context, callback):
+        basic_auth = "Basic %s" % base64.b64encode(
+            "%s:%s" % (self._username, self._password)
+        )
+        metadata = (("authorization", basic_auth),)
         callback(metadata, None)
 
+
 class SessionIdCallCredentials(grpc.AuthMetadataPlugin):
-  """Metadata wrapper for raw access token credentials."""
-  def __init__(self, sessionid):
+    """Metadata wrapper for raw access token credentials."""
+
+    def __init__(self, sessionid):
         self._sessionid = sessionid
-  def __call__(self, context, callback):
-        metadata = (('x-xossession', self._sessionid),)
+
+    def __call__(self, context, callback):
+        metadata = (("x-xossession", self._sessionid),)
         callback(metadata, None)
 
+
 class XOSClient(chameleon_client.GrpcClient):
     # We layer our own reconnect_callback functionality so we can setup the
     # ORM before calling reconnect_callback.
@@ -71,30 +81,37 @@
 
     def load_convenience_methods(self):
 
-        convenience_methods_dir = "/usr/local/lib/python2.7/dist-packages/xosapi/convenience/"
+        convenience_methods_dir = (
+            "/usr/local/lib/python2.7/dist-packages/xosapi/convenience/"
+        )
 
         try:
             response = self.dynamicload.GetConvenienceMethods(Empty())
 
             if response:
-                log.info("Loading convenience methods",  methods=[m.filename for m in response.convenience_methods])
+                log.info(
+                    "Loading convenience methods",
+                    methods=[m.filename for m in response.convenience_methods],
+                )
 
                 for cm in response.convenience_methods:
                     log.debug("Saving convenience method", method=cm.filename)
                     save_path = os.path.join(convenience_methods_dir, cm.filename)
                     file(save_path, "w").write(cm.contents)
             else:
-                log.exception("Cannot load convenience methods, restarting the synchronzier")
-                os.execv(sys.executable, ['python'] + sys.argv)
+                log.exception(
+                    "Cannot load convenience methods, restarting the synchronzier"
+                )
+                os.execv(sys.executable, ["python"] + sys.argv)
 
-        except grpc._channel._Rendezvous, e:
+        except grpc._channel._Rendezvous as e:
             code = e.code()
             if code == grpc.StatusCode.UNAVAILABLE:
                 # NOTE if the core is not available, restart the synchronizer
-                os.execv(sys.executable, ['python'] + sys.argv)
+                os.execv(sys.executable, ["python"] + sys.argv)
 
     def reconnected(self):
-        for api in ['modeldefs', 'utility', 'xos', 'dynamicload']:
+        for api in ["modeldefs", "utility", "xos", "dynamicload"]:
             pb2_file_name = os.path.join(self.work_dir, api + "_pb2.py")
             pb2_grpc_file_name = os.path.join(self.work_dir, api + "_pb2_grpc.py")
 
@@ -109,12 +126,12 @@
                 finally:
                     sys.path = orig_sys_path
 
-                stub_class = getattr(m_grpc, api+"Stub")
+                stub_class = getattr(m_grpc, api + "Stub")
 
                 setattr(self, api, stub_class(self.channel))
-                setattr(self, api+"_pb2", m_protos)
+                setattr(self, api + "_pb2", m_protos)
             else:
-                print >> sys.stderr, "failed to locate api", api
+                print("failed to locate api", api, file=sys.stderr)
 
         if hasattr(self, "xos"):
             self.xos_orm = orm.ORMStub(self.xos, self.xos_pb2, "xos")
@@ -130,98 +147,127 @@
 
 
 class InsecureClient(XOSClient):
-    def __init__(self, consul_endpoint=None, work_dir="/tmp/xos_grpc_protos", endpoint='localhost:50055', reconnect_callback=None):
-        super(InsecureClient,self).__init__(consul_endpoint, work_dir, endpoint, self.reconnected)
+    def __init__(
+        self,
+        consul_endpoint=None,
+        work_dir="/tmp/xos_grpc_protos",
+        endpoint="localhost:50055",
+        reconnect_callback=None,
+    ):
+        super(InsecureClient, self).__init__(
+            consul_endpoint, work_dir, endpoint, self.reconnected
+        )
 
         self.reconnect_callback2 = reconnect_callback
 
+
 class SecureClient(XOSClient):
-    def __init__(self, consul_endpoint=None, work_dir="/tmp/xos_grpc_protos", endpoint='localhost:50055', reconnect_callback=None, cacert=SERVER_CA, username=None, password=None, sessionid=None):
-        server_ca = open(cacert,"r").read()
-        if (sessionid):
+    def __init__(
+        self,
+        consul_endpoint=None,
+        work_dir="/tmp/xos_grpc_protos",
+        endpoint="localhost:50055",
+        reconnect_callback=None,
+        cacert=SERVER_CA,
+        username=None,
+        password=None,
+        sessionid=None,
+    ):
+        server_ca = open(cacert, "r").read()
+        if sessionid:
             call_creds = metadata_call_credentials(SessionIdCallCredentials(sessionid))
         else:
-            call_creds = metadata_call_credentials(UsernamePasswordCallCredentials(username, password))
+            call_creds = metadata_call_credentials(
+                UsernamePasswordCallCredentials(username, password)
+            )
         chan_creds = ssl_channel_credentials(server_ca)
         chan_creds = composite_channel_credentials(chan_creds, call_creds)
 
-        super(SecureClient,self).__init__(consul_endpoint, work_dir, endpoint, self.reconnected, chan_creds)
+        super(SecureClient, self).__init__(
+            consul_endpoint, work_dir, endpoint, self.reconnected, chan_creds
+        )
 
         self.reconnect_callback2 = reconnect_callback
 
+
 # -----------------------------------------------------------------------------
 # Wrappers for easy setup for test cases, etc
 # -----------------------------------------------------------------------------
 
+
 def parse_args():
     parser = argparse.ArgumentParser()
 
-    defs = {"grpc_insecure_endpoint": "xos-core.cord.lab:50055",
-            "grpc_secure_endpoint": "xos-core.cord.lab:50051",
-            "config": '/opt/xos/config.yml'}
+    defs = {
+        "grpc_insecure_endpoint": "xos-core.cord.lab:50055",
+        "grpc_secure_endpoint": "xos-core.cord.lab:50051",
+        "config": "/opt/xos/config.yml",
+    }
 
-    _help = 'Path to the config file (default: %s)' % defs['config']
+    _help = "Path to the config file (default: %s)" % defs["config"]
     parser.add_argument(
-        '-C', '--config', dest='config', action='store',
-        default=defs['config'],
-        help=_help)
+        "-C",
+        "--config",
+        dest="config",
+        action="store",
+        default=defs["config"],
+        help=_help,
+    )
 
-    _help = ('gRPC insecure end-point to connect to. It is a direct',
-             '. (default: %s'
-             % defs['grpc_insecure_endpoint'])
-    parser.add_argument('-G', '--grpc-insecure-endpoint',
-                        dest='grpc_insecure_endpoint',
-                        action='store',
-                        default=defs["grpc_insecure_endpoint"],
-                        help=_help)
+    _help = (
+        "gRPC insecure end-point to connect to. It is a direct",
+        ". (default: %s" % defs["grpc_insecure_endpoint"],
+    )
+    parser.add_argument(
+        "-G",
+        "--grpc-insecure-endpoint",
+        dest="grpc_insecure_endpoint",
+        action="store",
+        default=defs["grpc_insecure_endpoint"],
+        help=_help,
+    )
 
-    _help = ('gRPC secure end-point to connect to. It is a direct',
-             '. (default: %s'
-             % defs["grpc_secure_endpoint"])
-    parser.add_argument('-S', '--grpc-secure-endpoint',
-                        dest='grpc_secure_endpoint',
-                        action='store',
-                        default=defs["grpc_secure_endpoint"],
-                        help=_help)
+    _help = (
+        "gRPC secure end-point to connect to. It is a direct",
+        ". (default: %s" % defs["grpc_secure_endpoint"],
+    )
+    parser.add_argument(
+        "-S",
+        "--grpc-secure-endpoint",
+        dest="grpc_secure_endpoint",
+        action="store",
+        default=defs["grpc_secure_endpoint"],
+        help=_help,
+    )
 
-    parser.add_argument('-u', '--username',
-                        dest='username',
-                        action='store',
-                        default=None,
-                        help=_help)
+    parser.add_argument(
+        "-u", "--username", dest="username", action="store", default=None, help=_help
+    )
 
-    parser.add_argument('-p', '--password',
-                        dest='password',
-                        action='store',
-                        default=None,
-                        help=_help)
+    parser.add_argument(
+        "-p", "--password", dest="password", action="store", default=None, help=_help
+    )
 
-    _help = 'omit startup banner log lines'
-    parser.add_argument('-n', '--no-banner',
-                        dest='no_banner',
-                        action='store_true',
-                        default=False,
-                        help=_help)
+    _help = "omit startup banner log lines"
+    parser.add_argument(
+        "-n",
+        "--no-banner",
+        dest="no_banner",
+        action="store_true",
+        default=False,
+        help=_help,
+    )
 
     _help = "suppress debug and info logs"
-    parser.add_argument('-q', '--quiet',
-                        dest='quiet',
-                        action='count',
-                        help=_help)
+    parser.add_argument("-q", "--quiet", dest="quiet", action="count", help=_help)
 
-    _help = 'enable verbose logging'
-    parser.add_argument('-v', '--verbose',
-                        dest='verbose',
-                        action='count',
-                        help=_help)
+    _help = "enable verbose logging"
+    parser.add_argument("-v", "--verbose", dest="verbose", action="count", help=_help)
 
     args = parser.parse_args()
 
     return args
 
-def setup_logging():
-    log = create_logger(Config().get('logging'))
-
 
 def coreclient_reconnect(client, reconnect_callback, *args, **kwargs):
     global coreapi
@@ -233,6 +279,7 @@
 
     reactor.stop()
 
+
 def start_api(reconnect_callback, *args, **kwargs):
     global coreclient
 
@@ -241,11 +288,14 @@
     else:
         coreclient = InsecureClient(*args, **kwargs)
 
-    coreclient.set_reconnect_callback(functools.partial(coreclient_reconnect, coreclient, reconnect_callback))
+    coreclient.set_reconnect_callback(
+        functools.partial(coreclient_reconnect, coreclient, reconnect_callback)
+    )
     coreclient.start()
 
     reactor.run()
 
+
 def start_api_parseargs(reconnect_callback):
     """ This function is an entrypoint for tests and other simple programs to
         setup the API and get a callback when the API is ready.
@@ -253,50 +303,58 @@
 
     args = parse_args()
 
-    setup_logging()
-
     if args.username:
-        start_api(reconnect_callback, endpoint=args.grpc_secure_endpoint, username=args.username, password=args.password)
+        start_api(
+            reconnect_callback,
+            endpoint=args.grpc_secure_endpoint,
+            username=args.username,
+            password=args.password,
+        )
     else:
         start_api(reconnect_callback, endpoint=args.grpc_insecure_endpoint)
 
 
-
-
 # -----------------------------------------------------------------------------
 # Self test
 # -----------------------------------------------------------------------------
 
+
 def insecure_callback(client):
-    print "insecure self_test start"
-    print client.xos_orm.User.objects.all()
-    print "insecure self_test done"
+    print("insecure self_test start")
+    print(client.xos_orm.User.objects.all())
+    print("insecure self_test done")
 
     # now start the next test
     client.stop()
     reactor.callLater(0, start_secure_test)
 
+
 def start_insecure_test():
     client = InsecureClient(endpoint="xos-core:50055")
     client.set_reconnect_callback(functools.partial(insecure_callback, client))
     client.start()
 
+
 def secure_callback(client):
-    print "secure self_test start"
-    print client.xos_orm.User.objects.all()
-    print "secure self_test done"
+    print("secure self_test start")
+    print(client.xos_orm.User.objects.all())
+    print("secure self_test done")
     reactor.stop()
 
+
 def start_secure_test():
-    client = SecureClient(endpoint="xos-core:50051", username="admin@opencord.org", password="letmein")
+    client = SecureClient(
+        endpoint="xos-core:50051", username="admin@opencord.org", password="letmein"
+    )
     client.set_reconnect_callback(functools.partial(secure_callback, client))
     client.start()
 
+
 def main():
     reactor.callLater(0, start_insecure_test)
 
     reactor.run()
 
-if __name__=="__main__":
-    main()
 
+if __name__ == "__main__":
+    main()
diff --git a/xos/xos_client/xossh b/xos/xos_client/xossh
index daefecd..9951b69 100644
--- a/xos/xos_client/xossh
+++ b/xos/xos_client/xossh
@@ -5,7 +5,8 @@
 
 import argparse
 import functools
-import os, sys
+import os
+import sys
 import atexit
 import readline
 import traceback
@@ -15,6 +16,7 @@
 
 current_client = None
 
+
 def parse_args():
     parser = argparse.ArgumentParser()
 
@@ -71,13 +73,13 @@
                         action='count',
                         help=_help)
 
-    _help = 'enable verbose logging'
+    _help = 'increase verbosity level (can be used multiple times)'
     parser.add_argument('-v', '--verbose',
                         dest='verbose',
                         action='count',
                         help=_help)
 
-    _help = 'enable verbose logging'
+    _help = 'print version'
     parser.add_argument('-V', '--version',
                         dest='version',
                         action='store_true',
@@ -88,6 +90,7 @@
 
     return args
 
+
 def login(username=None, password=None):
     from xosapi.xos_grpc_client import InsecureClient, SecureClient
     if current_client:
@@ -105,15 +108,19 @@
         client.set_reconnect_callback(functools.partial(start_xossh, client))
         client.start()
 
+
 def setDirtyModels(*args, **kwargs):
     return current_client.utility.SetDirtyModels(current_client.utility_pb2.ModelFilter(*args, **kwargs))
 
+
 def listDirtyModels(*args, **kwargs):
     return current_client.utility.ListDirtyModels(current_client.utility_pb2.ModelFilter(*args, **kwargs))
 
+
 def listModelDefs():
     return current_client.modeldefs.ListModelDefs(Empty())
 
+
 def loadModels(name, version, xproto_filenames=[], decl_filenames=[], attic_filenames=[]):
     request = current_client.dynamicload_pb2.LoadModelsRequest(name=name, version=version)
     for fn in xproto_filenames:
@@ -130,14 +137,17 @@
         item.contents = open(fn).read()
     return current_client.dynamicload.LoadModels(request)
 
+
 def unloadModels(name, version):
     request = current_client.dynamicload_pb2.UnloadModelsRequest(name=name, version=version)
     return current_client.dynamicload.UnloadModels(request)
 
+
 def getLoadStatus():
     request = Empty()
     return current_client.dynamicload.GetLoadStatus(request)
 
+
 def listUtility():
     print 'setDirtyModels(class_name=None)'
     print 'listDirtyModels(class_name=None)'
@@ -146,6 +156,7 @@
     print 'unloadModels(name, version)'
     print 'getLoadStatus()'
 
+
 def examples():
     print 'Slice.objects.all() # list all slices'
     print 'Slice.objects.first().dump() # dump the first slice'
@@ -155,6 +166,7 @@
     print 's.save() # save the slice'
     print
 
+
 def start_xossh(client):
     global coreapi, current_client
     coreapi = client.xos_orm
@@ -164,12 +176,12 @@
 
     if not args.no_banner:
         print
-        print "__   __   ____     _____    _____   _    _"
-        print "\ \ / /  / __ \   / ____|  / ____| | |  | |"
-        print " \ V /  | |  | | | (___   | (___   | |__| |"
-        print "  > <   | |  | |  \___ \   \___ \  |  __  |"
-        print " / . \  | |__| |  ____) |  ____) | | |  | |"
-        print "/_/ \_\  \____/  |_____/  |_____/  |_|  |_|"
+        print r"__   __   ____     _____    _____   _    _"
+        print r"\ \ / /  / __ \   / ____|  / ____| | |  | |"
+        print r" \ V /  | |  | | | (___   | (___   | |__| |"
+        print r"  > <   | |  | |  \___ \   \___ \  |  __  |"
+        print r" / . \  | |__| |  ____) |  ____) | | |  | |"
+        print r"/_/ \_\  \____/  |_____/  |_____/  |_|  |_|"
         print
 
         print "XOS Core server at %s" % client.endpoint
@@ -193,6 +205,7 @@
 
     reactor.callLater(0, functools.partial(do_xossh_prompt, client))
 
+
 def do_xossh_prompt(client):
     for k in client.xos_orm.all_model_names:
         locals()[k] = getattr(client.xos_orm, k)
@@ -252,6 +265,7 @@
         print
         reactor.stop()
 
+
 def main():
     global args
     args = parse_args()
@@ -261,10 +275,6 @@
     config_file = args.config
     Config.init(config_file, 'synchronizer-config-schema.yaml')
 
-    from xosapi.xos_grpc_client import setup_logging
-
-    setup_logging()
-
     if args.version:
         print __version__
         sys.exit(0)
@@ -272,5 +282,6 @@
     login(username=args.username, password=args.password)
     reactor.run()
 
+
 if __name__ == "__main__":
     main()