publish indications on kafka
Change-Id: I3ad7a151aa7bec2810831a225e45851087acd678
diff --git a/voltha/adapters/openolt/openolt_data_model.py b/voltha/adapters/openolt/openolt_data_model.py
index e232a69..afd5ce7 100644
--- a/voltha/adapters/openolt/openolt_data_model.py
+++ b/voltha/adapters/openolt/openolt_data_model.py
@@ -17,6 +17,7 @@
import structlog
import socket
from scapy.layers.l2 import Ether
+
from voltha.adapters.openolt.openolt_utils import OpenoltUtils
from voltha.protos.device_pb2 import Port, Device
from voltha.protos.openflow_13_pb2 import OFPPS_LIVE, OFPPF_FIBER, \
@@ -403,19 +404,6 @@
def _device_id(self):
return self.device.id
- def _resolve_onu_id(self, onu_id, port_intf_id):
- try:
- onu_device = None
- onu_device = self.adapter_agent.get_child_device(
- self.device_id,
- parent_port_no=self.platform.intf_id_to_port_no(
- port_intf_id, Port.PON_OLT),
- onu_id=onu_id)
- except Exception as inner:
- self.log.exception('resolve-onu-id', errmsg=inner.message)
-
- return onu_device
-
def create_alarm(self, **kwargs):
return self.adapter_agent.create_alarm(
logical_device_id=self.logical_device_id,
diff --git a/voltha/adapters/openolt/openolt_device.py b/voltha/adapters/openolt/openolt_device.py
index 81231d2..ac3cba5 100644
--- a/voltha/adapters/openolt/openolt_device.py
+++ b/voltha/adapters/openolt/openolt_device.py
@@ -21,11 +21,15 @@
from twisted.internet import reactor
from scapy.layers.l2 import Ether, Dot1Q
from transitions import Machine
+from simplejson import dumps
+from google.protobuf.message import Message
+from google.protobuf.json_format import MessageToDict
from voltha.protos.device_pb2 import Port
from voltha.adapters.openolt.protos import openolt_pb2_grpc, openolt_pb2
from voltha.adapters.openolt.openolt_utils import OpenoltUtils
from voltha.extensions.alarms.onu.onu_discovery_alarm import OnuDiscoveryAlarm
+from voltha.northbound.kafka.kafka_proxy import get_kafka_proxy
class OpenoltDevice(object):
@@ -170,6 +174,22 @@
self.flow_mgr.reset_flows()
def indications_thread(self):
+
+ def send_indication(msg):
+ topic = "openolt.ind"
+ try:
+ kafka_proxy = get_kafka_proxy()
+ if kafka_proxy and not kafka_proxy.is_faulty():
+ self.log.debug('kafka-proxy-available')
+ # convert to JSON string if msg is a protobuf msg
+ if isinstance(msg, Message):
+ msg = dumps(MessageToDict(msg, True, True))
+ kafka_proxy.send_message(topic, dumps(msg))
+ else:
+ self.log.error('kafka-proxy-unavailable')
+ except Exception, e:
+ self.log.exception('failed-sending-message', e=e)
+
self.log.debug('starting-indications-thread')
self.log.debug('connecting to olt')
@@ -225,6 +245,8 @@
indications=ind)
continue
+ send_indication(ind)
+
# indication handlers run in the main event loop
if ind.HasField('olt_ind'):
reactor.callFromThread(self.olt_indication, ind.olt_ind)
diff --git a/voltha/adapters/openolt/openolt_kafka_consumer.py b/voltha/adapters/openolt/openolt_kafka_consumer.py
new file mode 100644
index 0000000..73ff0c2
--- /dev/null
+++ b/voltha/adapters/openolt/openolt_kafka_consumer.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 Confluent Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Example high-level Kafka 0.9 balanced Consumer
+#
+from confluent_kafka import Consumer, KafkaError
+import sys
+import getopt
+import json
+import logging
+from pprint import pformat
+
+def print_usage_and_exit(program_name):
+ sys.stderr.write('Usage: %s [options..] <bootstrap-brokers> <group> <topic1> <topic2> ..\n' % program_name)
+ options = '''
+ Options:
+ -T <intvl> Enable client statistics at specified interval (ms)
+'''
+ sys.stderr.write(options)
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ optlist, argv = getopt.getopt(sys.argv[1:], 'T:')
+ if len(argv) < 3:
+ print_usage_and_exit(sys.argv[0])
+
+ broker = argv[0]
+ group = argv[1]
+ topics = argv[2:]
+ # Consumer configuration
+ # See https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
+ '''
+ conf = {'bootstrap.servers': broker,'group.id': group, 'session.timeout.ms': 60000,
+ 'auto.offset.reset': 'earliest'}
+ '''
+ conf = {'bootstrap.servers': broker,
+ 'group.id': group,
+ 'session.timeout.ms': 60000}
+
+ logger = logging.getLogger('openolt-kafka-consumer')
+ logger.setLevel(logging.DEBUG)
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter('%(asctime)-15s %(levelname)-8s %(message)s'))
+ logger.addHandler(handler)
+
+ # Create Consumer instance
+ # Hint: try debug='fetch' to generate some log messages
+ # c = Consumer(conf, logger=logger, debug='fetch')
+ c = Consumer(conf, logger=logger)
+
+ def print_assignment(consumer, partitions):
+ print('Assignment:', partitions)
+
+ # Subscribe to topics
+ c.subscribe(topics, on_assign=print_assignment)
+
+ # Read messages from Kafka, print to stdout
+ try:
+ while True:
+ msg = c.poll(timeout=1.0)
+ if msg is None:
+ continue
+ elif not msg.error():
+ print(msg.value())
+ elif msg.error().code() == KafkaError._PARTITION_EOF:
+ # print('End of partition reached {0}/{1}'
+ # .format(msg.topic(), msg.partition()))
+ pass
+ else:
+ print('Error occured: {0}'.format(msg.error().str()))
+
+ except KeyboardInterrupt:
+ pass
+
+ finally:
+ # Close down consumer to commit final offsets.
+ c.close()