blob: f6d45ef5a140d20acce0e3e637f489f4d23afb20 [file] [log] [blame]
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -05001#!/usr/bin/env python
2#
3# Copyright 2018 the original author or authors.
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16#
17
18"""OpenONU Adapter main entry point"""
19
Matt Jeanneret2e3cb8d2019-11-16 09:22:41 -050020from __future__ import absolute_import
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -050021import argparse
22import os
23import time
Matteo Scandolod8d73172019-11-26 12:15:15 -070024import logging
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -050025
26import arrow
27import yaml
Matt Jeanneret2e3cb8d2019-11-16 09:22:41 -050028import socketserver
Rohan Agrawalc5bdbbc2019-11-14 12:39:39 +000029
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -050030from packaging.version import Version
31from simplejson import dumps
32from twisted.internet.defer import inlineCallbacks, returnValue
33from twisted.internet.task import LoopingCall
34from zope.interface import implementer
35
Matt Jeanneret72f96fc2019-02-11 10:53:05 -050036from pyvoltha.common.structlog_setup import setup_logging, update_logging
37from pyvoltha.common.utils.asleep import asleep
38from pyvoltha.common.utils.deferred_utils import TimeOutError
39from pyvoltha.common.utils.dockerhelpers import get_my_containers_name
40from pyvoltha.common.utils.nethelpers import get_my_primary_local_ipv4, \
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -050041 get_my_primary_interface
Matt Jeanneret72f96fc2019-02-11 10:53:05 -050042from pyvoltha.common.utils.registry import registry, IComponent
43from pyvoltha.adapters.kafka.adapter_proxy import AdapterProxy
44from pyvoltha.adapters.kafka.adapter_request_facade import AdapterRequestFacade
45from pyvoltha.adapters.kafka.core_proxy import CoreProxy
46from pyvoltha.adapters.kafka.kafka_inter_container_library import IKafkaMessagingProxy, \
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -050047 get_messaging_proxy
Matt Jeanneret72f96fc2019-02-11 10:53:05 -050048from pyvoltha.adapters.kafka.kafka_proxy import KafkaProxy, get_kafka_proxy
William Kurkian8235c1e2019-03-05 12:58:28 -050049from voltha_protos.adapter_pb2 import AdapterConfig
Matt Jeanneret72f96fc2019-02-11 10:53:05 -050050
Matt Jeanneret2e3cb8d2019-11-16 09:22:41 -050051from brcm_openomci_onu_adapter import BrcmOpenomciOnuAdapter
Rohan Agrawalc5bdbbc2019-11-14 12:39:39 +000052from probe import Probe
53
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -050054defs = dict(
55 version_file='./VERSION',
56 config=os.environ.get('CONFIG', './openonu.yml'),
57 container_name_regex=os.environ.get('CONTAINER_NUMBER_EXTRACTOR', '^.*\.(['
58 '0-9]+)\..*$'),
59 consul=os.environ.get('CONSUL', 'localhost:8500'),
60 name=os.environ.get('NAME', 'openonu'),
61 vendor=os.environ.get('VENDOR', 'Voltha Project'),
62 device_type=os.environ.get('DEVICE_TYPE', 'openonu'),
63 accept_bulk_flow=os.environ.get('ACCEPT_BULK_FLOW', True),
64 accept_atomic_flow=os.environ.get('ACCEPT_ATOMIC_FLOW', True),
65 etcd=os.environ.get('ETCD', 'localhost:2379'),
66 core_topic=os.environ.get('CORE_TOPIC', 'rwcore'),
Devmalya Paulffc89df2019-07-31 17:43:13 -040067 event_topic=os.environ.get('EVENT_TOPIC', 'voltha.events'),
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -050068 interface=os.environ.get('INTERFACE', get_my_primary_interface()),
69 instance_id=os.environ.get('INSTANCE_ID', os.environ.get('HOSTNAME', '1')),
70 kafka_adapter=os.environ.get('KAFKA_ADAPTER', '192.168.0.20:9092'),
71 kafka_cluster=os.environ.get('KAFKA_CLUSTER', '10.100.198.220:9092'),
72 backend=os.environ.get('BACKEND', 'none'),
73 retry_interval=os.environ.get('RETRY_INTERVAL', 2),
74 heartbeat_topic=os.environ.get('HEARTBEAT_TOPIC', "adapters.heartbeat"),
Rohan Agrawalc5bdbbc2019-11-14 12:39:39 +000075 probe=os.environ.get('PROBE', ':8080')
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -050076)
77
78
79def parse_args():
80 parser = argparse.ArgumentParser()
81
82 _help = ('Path to openonu.yml config file (default: %s). '
83 'If relative, it is relative to main.py of openonu adapter.'
84 % defs['config'])
85 parser.add_argument('-c', '--config',
86 dest='config',
87 action='store',
88 default=defs['config'],
89 help=_help)
90
91 _help = 'Regular expression for extracting conatiner number from ' \
92 'container name (default: %s)' % defs['container_name_regex']
93 parser.add_argument('-X', '--container-number-extractor',
94 dest='container_name_regex',
95 action='store',
96 default=defs['container_name_regex'],
97 help=_help)
98
99 _help = '<hostname>:<port> to consul agent (default: %s)' % defs['consul']
100 parser.add_argument('-C', '--consul',
101 dest='consul',
102 action='store',
103 default=defs['consul'],
104 help=_help)
105
106 _help = 'name of this adapter (default: %s)' % defs['name']
107 parser.add_argument('-na', '--name',
108 dest='name',
109 action='store',
110 default=defs['name'],
111 help=_help)
112
113 _help = 'vendor of this adapter (default: %s)' % defs['vendor']
114 parser.add_argument('-ven', '--vendor',
115 dest='vendor',
116 action='store',
117 default=defs['vendor'],
118 help=_help)
119
120 _help = 'supported device type of this adapter (default: %s)' % defs[
121 'device_type']
122 parser.add_argument('-dt', '--device_type',
123 dest='device_type',
124 action='store',
125 default=defs['device_type'],
126 help=_help)
127
128 _help = 'specifies whether the device type accepts bulk flow updates ' \
129 'adapter (default: %s)' % defs['accept_bulk_flow']
130 parser.add_argument('-abf', '--accept_bulk_flow',
131 dest='accept_bulk_flow',
132 action='store',
133 default=defs['accept_bulk_flow'],
134 help=_help)
135
136 _help = 'specifies whether the device type accepts add/remove flow ' \
137 '(default: %s)' % defs['accept_atomic_flow']
138 parser.add_argument('-aaf', '--accept_atomic_flow',
139 dest='accept_atomic_flow',
140 action='store',
141 default=defs['accept_atomic_flow'],
142 help=_help)
143
144 _help = '<hostname>:<port> to etcd server (default: %s)' % defs['etcd']
145 parser.add_argument('-e', '--etcd',
146 dest='etcd',
147 action='store',
148 default=defs['etcd'],
149 help=_help)
150
151 _help = ('unique string id of this container instance (default: %s)'
152 % defs['instance_id'])
153 parser.add_argument('-i', '--instance-id',
154 dest='instance_id',
155 action='store',
156 default=defs['instance_id'],
157 help=_help)
158
159 _help = 'ETH interface to recieve (default: %s)' % defs['interface']
160 parser.add_argument('-I', '--interface',
161 dest='interface',
162 action='store',
163 default=defs['interface'],
164 help=_help)
165
166 _help = 'omit startup banner log lines'
167 parser.add_argument('-n', '--no-banner',
168 dest='no_banner',
169 action='store_true',
170 default=False,
171 help=_help)
172
173 _help = 'do not emit periodic heartbeat log messages'
174 parser.add_argument('-N', '--no-heartbeat',
175 dest='no_heartbeat',
176 action='store_true',
177 default=False,
178 help=_help)
179
180 _help = "suppress debug and info logs"
181 parser.add_argument('-q', '--quiet',
182 dest='quiet',
183 action='count',
184 help=_help)
185
186 _help = 'enable verbose logging'
187 parser.add_argument('-v', '--verbose',
188 dest='verbose',
189 action='count',
190 help=_help)
191
192 _help = ('use docker container name as conatiner instance id'
193 ' (overrides -i/--instance-id option)')
194 parser.add_argument('--instance-id-is-container-name',
195 dest='instance_id_is_container_name',
196 action='store_true',
197 default=False,
198 help=_help)
199
200 _help = ('<hostname>:<port> of the kafka adapter broker (default: %s). ('
201 'If not '
202 'specified (None), the address from the config file is used'
203 % defs['kafka_adapter'])
204 parser.add_argument('-KA', '--kafka_adapter',
205 dest='kafka_adapter',
206 action='store',
207 default=defs['kafka_adapter'],
208 help=_help)
209
210 _help = ('<hostname>:<port> of the kafka cluster broker (default: %s). ('
211 'If not '
212 'specified (None), the address from the config file is used'
213 % defs['kafka_cluster'])
214 parser.add_argument('-KC', '--kafka_cluster',
215 dest='kafka_cluster',
216 action='store',
217 default=defs['kafka_cluster'],
218 help=_help)
219
220 _help = 'backend to use for config persitence'
221 parser.add_argument('-b', '--backend',
222 default=defs['backend'],
223 choices=['none', 'consul', 'etcd'],
224 help=_help)
225
226 _help = 'topic of core on the kafka bus'
227 parser.add_argument('-ct', '--core_topic',
228 dest='core_topic',
229 action='store',
230 default=defs['core_topic'],
231 help=_help)
232
Devmalya Paulffc89df2019-07-31 17:43:13 -0400233 _help = 'topic of events on the kafka bus'
234 parser.add_argument('-et', '--event_topic',
235 dest='event_topic',
236 action='store',
237 default=defs['event_topic'],
238 help=_help)
239
Rohan Agrawalc5bdbbc2019-11-14 12:39:39 +0000240
241 _help = '<hostname>:<port> for liveness and readiness probes (default: %s)' % defs['probe']
242 parser.add_argument(
243 '-P', '--probe', dest='probe', action='store',
244 default=defs['probe'],
245 help=_help)
246
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500247 args = parser.parse_args()
248
249 # post-processing
250
251 if args.instance_id_is_container_name:
252 args.instance_id = get_my_containers_name()
253
254 return args
255
256
257def load_config(args):
258 path = args.config
259 if path.startswith('.'):
260 dir = os.path.dirname(os.path.abspath(__file__))
261 path = os.path.join(dir, path)
262 path = os.path.abspath(path)
263 with open(path) as fd:
264 config = yaml.load(fd)
265 return config
266
267
268def print_banner(log):
269 log.info(' ')
270 log.info(' OpenOnu Adapter ')
271 log.info(' ')
272 log.info('(to stop: press Ctrl-C)')
273
274
275@implementer(IComponent)
276class Main(object):
277
278 def __init__(self):
279
280 self.args = args = parse_args()
281 self.config = load_config(args)
282
Matteo Scandolod8d73172019-11-26 12:15:15 -0700283 # log levels in python are:
284 # 1 - DEBUG => verbosity_adjust = 0
285 # 2 - INFO => verbosity_adjust = 1
286 # 3 - WARNING => verbosity_adjust = 2
287 # 4 - ERROR
288 # 5 - CRITICAL
289 # If no flags are set we want to stick with INFO,
290 # if verbose is set we want to go down to DEBUG
291 # if quiet is set we want to go up to WARNING
292 # if you set both, you're doing something non-sense and you'll be back at INFO
293
294 verbosity_adjust = 1 - (args.verbose or 0) + (args.quiet or 0)
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500295 self.log = setup_logging(self.config.get('logging', {}),
296 args.instance_id,
297 verbosity_adjust=verbosity_adjust)
298 self.log.info('container-number-extractor',
299 regex=args.container_name_regex)
300
301 self.adapter_version = self.get_version()
Matteo Scandolod8d73172019-11-26 12:15:15 -0700302 self.log.info('OpenONU-Adapter-Version', version=self.adapter_version)
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500303
304 if not args.no_banner:
305 print_banner(self.log)
306
307 self.adapter = None
308 # Create a unique instance id using the passed-in instance id and
309 # UTC timestamp
310 current_time = arrow.utcnow().timestamp
311 self.instance_id = self.args.instance_id + '_' + str(current_time)
312
Matt Jeanneret2e3cb8d2019-11-16 09:22:41 -0500313 self.core_topic = str(args.core_topic)
314 self.event_topic = str(args.event_topic)
315 self.listening_topic = str(args.name)
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500316 self.startup_components()
317
318 if not args.no_heartbeat:
319 self.start_heartbeat()
320 self.start_kafka_cluster_heartbeat(self.instance_id)
321
322 def get_version(self):
323 path = defs['version_file']
324 if not path.startswith('/'):
325 dir = os.path.dirname(os.path.abspath(__file__))
326 path = os.path.join(dir, path)
327
328 path = os.path.abspath(path)
329 version_file = open(path, 'r')
330 v = version_file.read()
331
332 # Use Version to validate the version string - exception will be raised
333 # if the version is invalid
334 Version(v)
335
336 version_file.close()
337 return v
338
339 def start(self):
340 self.start_reactor() # will not return except Keyboard interrupt
341
342 def stop(self):
343 pass
344
345 def get_args(self):
346 """Allow access to command line args"""
347 return self.args
348
349 def get_config(self):
350 """Allow access to content of config file"""
351 return self.config
352
353 def _get_adapter_config(self):
354 cfg = AdapterConfig()
355 return cfg
356
357 @inlineCallbacks
358 def startup_components(self):
359 try:
360 self.log.info('starting-internal-components',
361 consul=self.args.consul,
362 etcd=self.args.etcd)
363
364 registry.register('main', self)
365
366 # Update the logger to output the vcore id.
367 self.log = update_logging(instance_id=self.instance_id,
368 vcore_id=None)
369
370 yield registry.register(
371 'kafka_cluster_proxy',
372 KafkaProxy(
373 self.args.consul,
374 self.args.kafka_cluster,
375 config=self.config.get('kafka-cluster-proxy', {})
376 )
377 ).start()
Rohan Agrawalc5bdbbc2019-11-14 12:39:39 +0000378 Probe.kafka_cluster_proxy_running = True
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500379
380 config = self._get_adapter_config()
381
382 self.core_proxy = CoreProxy(
383 kafka_proxy=None,
Matt Jeanneretc288ee42019-02-28 13:31:59 -0500384 default_core_topic=self.core_topic,
Devmalya Paulffc89df2019-07-31 17:43:13 -0400385 default_event_topic=self.event_topic,
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500386 my_listening_topic=self.listening_topic)
387
388 self.adapter_proxy = AdapterProxy(
389 kafka_proxy=None,
390 core_topic=self.core_topic,
391 my_listening_topic=self.listening_topic)
392
393 self.adapter = BrcmOpenomciOnuAdapter(
394 core_proxy=self.core_proxy, adapter_proxy=self.adapter_proxy,
395 config=config)
Matt Jeanneretc288ee42019-02-28 13:31:59 -0500396
Matt Jeannereta32441c2019-03-07 05:16:37 -0500397 self.adapter.start()
398
Matt Jeanneretc288ee42019-02-28 13:31:59 -0500399 openonu_request_handler = AdapterRequestFacade(adapter=self.adapter,
400 core_proxy=self.core_proxy)
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500401
402 yield registry.register(
403 'kafka_adapter_proxy',
404 IKafkaMessagingProxy(
405 kafka_host_port=self.args.kafka_adapter,
406 # TODO: Add KV Store object reference
407 kv_store=self.args.backend,
408 default_topic=self.args.name,
409 group_id_prefix=self.args.instance_id,
410 target_cls=openonu_request_handler
411 )
412 ).start()
Rohan Agrawalc5bdbbc2019-11-14 12:39:39 +0000413 Probe.kafka_adapter_proxy_running = True
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500414
415 self.core_proxy.kafka_proxy = get_messaging_proxy()
416 self.adapter_proxy.kafka_proxy = get_messaging_proxy()
417
418 # retry for ever
419 res = yield self._register_with_core(-1)
Rohan Agrawalc5bdbbc2019-11-14 12:39:39 +0000420 Probe.register_adapter_with_core = True
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500421
422 self.log.info('started-internal-services')
423
424 except Exception as e:
425 self.log.exception('Failure-to-start-all-components', e=e)
426
427 @inlineCallbacks
428 def shutdown_components(self):
429 """Execute before the reactor is shut down"""
430 self.log.info('exiting-on-keyboard-interrupt')
431 for component in reversed(registry.iterate()):
432 yield component.stop()
433
434 import threading
435 self.log.info('THREADS:')
436 main_thread = threading.current_thread()
437 for t in threading.enumerate():
438 if t is main_thread:
439 continue
440 if not t.isDaemon():
441 continue
442 self.log.info('joining thread {} {}'.format(
443 t.getName(), "daemon" if t.isDaemon() else "not-daemon"))
444 t.join()
445
446 def start_reactor(self):
447 from twisted.internet import reactor
448 reactor.callWhenRunning(
449 lambda: self.log.info('twisted-reactor-started'))
450 reactor.addSystemEventTrigger('before', 'shutdown',
451 self.shutdown_components)
Rohan Agrawalc5bdbbc2019-11-14 12:39:39 +0000452 reactor.callInThread(self.start_probe)
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500453 reactor.run()
454
Rohan Agrawalc5bdbbc2019-11-14 12:39:39 +0000455 def start_probe(self):
456 args = self.args
457 host = args.probe.split(':')[0]
458 port = args.probe.split(':')[1]
Matt Jeanneret2e3cb8d2019-11-16 09:22:41 -0500459 server = socketserver.TCPServer((host, int(port)), Probe)
Rohan Agrawalc5bdbbc2019-11-14 12:39:39 +0000460 server.serve_forever()
461
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500462 @inlineCallbacks
463 def _register_with_core(self, retries):
464 while 1:
465 try:
466 resp = yield self.core_proxy.register(
467 self.adapter.adapter_descriptor(),
468 self.adapter.device_types())
469 if resp:
470 self.log.info('registered-with-core',
471 coreId=resp.instance_id)
472
473 returnValue(resp)
474 except TimeOutError as e:
475 self.log.warn("timeout-when-registering-with-core", e=e)
476 if retries == 0:
477 self.log.exception("no-more-retries", e=e)
478 raise
479 else:
480 retries = retries if retries < 0 else retries - 1
481 yield asleep(defs['retry_interval'])
482 except Exception as e:
483 self.log.exception("failed-registration", e=e)
484 raise
485
486 def start_heartbeat(self):
487
488 t0 = time.time()
489 t0s = time.ctime(t0)
490
491 def heartbeat():
492 self.log.debug(status='up', since=t0s, uptime=time.time() - t0)
493
494 lc = LoopingCall(heartbeat)
495 lc.start(10)
496
497 # Temporary function to send a heartbeat message to the external kafka
498 # broker
499 def start_kafka_cluster_heartbeat(self, instance_id):
500 # For heartbeat we will send a message to a specific "voltha-heartbeat"
501 # topic. The message is a protocol buf
502 # message
503 message = dict(
504 type='heartbeat',
505 adapter=self.args.name,
506 instance=instance_id,
507 ip=get_my_primary_local_ipv4()
508 )
509 topic = defs['heartbeat_topic']
510
511 def send_msg(start_time):
512 try:
513 kafka_cluster_proxy = get_kafka_proxy()
514 if kafka_cluster_proxy and not kafka_cluster_proxy.is_faulty():
515 # self.log.debug('kafka-proxy-available')
516 message['ts'] = arrow.utcnow().timestamp
517 message['uptime'] = time.time() - start_time
518 # self.log.debug('start-kafka-heartbeat')
519 kafka_cluster_proxy.send_message(topic, dumps(message))
520 else:
Rohan Agrawalc5bdbbc2019-11-14 12:39:39 +0000521 Probe.kafka_cluster_proxy_running = False
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500522 self.log.error('kafka-proxy-unavailable')
Matt Jeanneret2e3cb8d2019-11-16 09:22:41 -0500523 except Exception as e:
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500524 self.log.exception('failed-sending-message-heartbeat', e=e)
525
526 try:
527 t0 = time.time()
528 lc = LoopingCall(send_msg, t0)
529 lc.start(10)
Matt Jeanneret2e3cb8d2019-11-16 09:22:41 -0500530 except Exception as e:
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -0500531 self.log.exception('failed-kafka-heartbeat', e=e)
532
533
534if __name__ == '__main__':
535 Main().start()