This commit consists of:
1) Provide a kafka client to publish events to kafka brokers
2) Provide zookeeper and kafka docker containers for local testing,
   will not be present in production
3) Sends a regular heartbeat to the kafka broker from voltha to
   exercise all the components
4) Provides a basic kafka consumeri (requires kafka-python to be
   installed)  to read the messages off the local kafka broker -
   this time it is only heartbeat messages
diff --git a/kafka/kafkaConsumer.py b/kafka/kafkaConsumer.py
new file mode 100644
index 0000000..2fd61e8
--- /dev/null
+++ b/kafka/kafkaConsumer.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+import threading, logging, time
+
+from kafka import KafkaConsumer
+
+
+class Consumer(threading.Thread):
+    daemon = True
+
+    def run(self):
+        #consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
+        consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
+                                 auto_offset_reset='earliest')
+        consumer.subscribe(['voltha-heartbeat'])
+
+        for message in consumer:
+            print (message)
+
+
+def main():
+    threads = [
+        Consumer()
+    ]
+
+    for t in threads:
+        t.start()
+
+    time.sleep(3000)
+
+if __name__ == "__main__":
+    logging.basicConfig(
+        format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
+        level=logging.INFO
+        )
+    main()
+