[SEBA-412] Automated reformat of Python code

Passes of modernize, autopep8, black, then check with flake8

flake8 + manual fixes:
  lib/xos-config
  lib/xos-kafka
  lib/xos-util
  xos/coreapi
  xos/api
  xos/xos_client

Change-Id: Ib23cf84cb13beb3c6381fa0d79594dc9131dc815
diff --git a/lib/xos-kafka/setup.py b/lib/xos-kafka/setup.py
index 4486f19..3d05854 100644
--- a/lib/xos-kafka/setup.py
+++ b/lib/xos-kafka/setup.py
@@ -19,27 +19,25 @@
 
 
 def readme():
-    with open('README.rst') as f:
+    with open("README.rst") as f:
         return f.read()
 
 
 setup_with_auto_version(
-    name='xoskafka',
+    name="xoskafka",
     version=__version__,
-    description='Wrapper around kafka for XOS',
+    description="Wrapper around kafka for XOS",
     long_description=readme(),
-    classifiers=[
-        'License :: OSI Approved :: Apache Software License',
-    ],
-    author='Zack Williams',
-    author_email='zdw@opennetworking.org',
-    packages=['xoskafka'],
-    license='Apache v2',
+    classifiers=["License :: OSI Approved :: Apache Software License"],
+    author="Zack Williams",
+    author_email="zdw@opennetworking.org",
+    packages=["xoskafka"],
+    license="Apache v2",
     install_requires=[
-        'confluent-kafka>=0.11.5',
-        'xosconfig>=2.1.0',
-        'multistructlog>=1.5',
-        ],
+        "confluent-kafka>=0.11.5",
+        "xosconfig>=2.1.0",
+        "multistructlog>=1.5",
+    ],
     include_package_data=True,
     zip_safe=False,
-    )
+)
diff --git a/lib/xos-kafka/xoskafka/__init__.py b/lib/xos-kafka/xoskafka/__init__.py
index 69f5a32..293a26e 100644
--- a/lib/xos-kafka/xoskafka/__init__.py
+++ b/lib/xos-kafka/xoskafka/__init__.py
@@ -13,3 +13,5 @@
 # limitations under the License.
 
 from .xoskafkaproducer import XOSKafkaProducer
+
+__all__ = ["XOSKafkaProducer"]
diff --git a/lib/xos-kafka/xoskafka/xoskafkaproducer.py b/lib/xos-kafka/xoskafka/xoskafkaproducer.py
index b4134d5..4611547 100644
--- a/lib/xos-kafka/xoskafka/xoskafkaproducer.py
+++ b/lib/xos-kafka/xoskafka/xoskafkaproducer.py
@@ -18,7 +18,8 @@
 
 from xosconfig import Config
 from multistructlog import create_logger
-log = create_logger(Config().get('logging'))
+
+log = create_logger(Config().get("logging"))
 
 kafka_producer = None
 
@@ -34,23 +35,24 @@
         global kafka_producer
 
         if kafka_producer:
-            raise Exception('XOSKafkaProducer already initialized')
+            raise Exception("XOSKafkaProducer already initialized")
 
         else:
-            log.info('Connecting to Kafka with bootstrap servers: %s' %
-                     Config.get('kafka_bootstrap_servers'))
+            log.info(
+                "Connecting to Kafka with bootstrap servers: %s"
+                % Config.get("kafka_bootstrap_servers")
+            )
 
             try:
                 producer_config = {
-                    'bootstrap.servers':
-                        ','.join(Config.get('kafka_bootstrap_servers')),
+                    "bootstrap.servers": ",".join(Config.get("kafka_bootstrap_servers"))
                 }
 
                 kafka_producer = confluent_kafka.Producer(**producer_config)
 
-                log.info('Connected to Kafka: %s' % kafka_producer)
+                log.info("Connected to Kafka: %s" % kafka_producer)
 
-            except confluent_kafka.KafkaError, e:
+            except confluent_kafka.KafkaError as e:
                 log.exception("Kafka Error: %s" % e)
 
     @classmethod
@@ -58,25 +60,22 @@
 
         try:
             kafka_producer.produce(
-                topic,
-                value,
-                key,
-                callback=cls._kafka_delivery_callback
-                )
+                topic, value, key, callback=cls._kafka_delivery_callback
+            )
 
             # see https://github.com/confluentinc/confluent-kafka-python/issues/16
             kafka_producer.poll(0)
 
-        except confluent_kafka.KafkaError, err:
+        except confluent_kafka.KafkaError as err:
             log.exception("Kafka Error", err)
 
     def __del__(self):
-       if kafka_producer is not None:
+        if kafka_producer is not None:
             kafka_producer.flush()
 
     @staticmethod
     def _kafka_delivery_callback(err, msg):
         if err:
-            log.error('Message failed delivery: %s' % err)
+            log.error("Message failed delivery: %s" % err)
         else:
-            log.trace('Message delivered', message=msg)
+            log.trace("Message delivered", message=msg)