Srikanth Vavilapalli | 71aa28d | 2017-01-31 00:43:13 +0000 | [diff] [blame] | 1 | # |
| 2 | # Copyright 2015 Cisco Inc. |
| 3 | # |
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); you may |
| 5 | # not use this file except in compliance with the License. You may obtain |
| 6 | # a copy of the License at |
| 7 | # |
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | # |
| 10 | # Unless required by applicable law or agreed to in writing, software |
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
| 12 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
| 13 | # License for the specific language governing permissions and limitations |
| 14 | # under the License. |
| 15 | |
| 16 | import kafka |
| 17 | from oslo_log import log |
| 18 | from oslo_serialization import jsonutils |
| 19 | from oslo_utils import netutils |
| 20 | from six.moves.urllib import parse as urlparse |
| 21 | |
| 22 | from ceilometer.i18n import _LE |
| 23 | from ceilometer.publisher import messaging |
| 24 | |
| 25 | LOG = log.getLogger(__name__) |
| 26 | |
| 27 | |
| 28 | class KafkaBrokerPublisher(messaging.MessagingPublisher): |
| 29 | """Publish metering data to kafka broker. |
| 30 | |
| 31 | The ip address and port number of kafka broker should be configured in |
| 32 | ceilometer pipeline configuration file. If an ip address is not specified, |
| 33 | this kafka publisher will not publish any meters. |
| 34 | |
| 35 | To enable this publisher, add the following section to the |
| 36 | /etc/ceilometer/pipeline.yaml file or simply add it to an existing |
| 37 | pipeline:: |
| 38 | |
| 39 | meter: |
| 40 | - name: meter_kafka |
| 41 | interval: 600 |
| 42 | counters: |
| 43 | - "*" |
| 44 | transformers: |
| 45 | sinks: |
| 46 | - kafka_sink |
| 47 | sinks: |
| 48 | - name: kafka_sink |
| 49 | transformers: |
| 50 | publishers: |
| 51 | - kafka://[kafka_broker_ip]:[kafka_broker_port]?topic=[topic] |
| 52 | |
| 53 | Kafka topic name and broker's port are required for this publisher to work |
| 54 | properly. If topic parameter is missing, this kafka publisher publish |
| 55 | metering data under a topic name, 'ceilometer'. If the port number is not |
| 56 | specified, this Kafka Publisher will use 9092 as the broker's port. |
| 57 | This publisher has transmit options such as queue, drop, and retry. These |
| 58 | options are specified using policy field of URL parameter. When queue |
| 59 | option could be selected, local queue length can be determined using |
| 60 | max_queue_length field as well. When the transfer fails with retry |
| 61 | option, try to resend the data as many times as specified in max_retry |
| 62 | field. If max_retry is not specified, default the number of retry is 100. |
| 63 | """ |
| 64 | |
| 65 | def __init__(self, parsed_url): |
| 66 | super(KafkaBrokerPublisher, self).__init__(parsed_url) |
| 67 | options = urlparse.parse_qs(parsed_url.query) |
| 68 | |
| 69 | self._producer = None |
| 70 | self._host, self._port = netutils.parse_host_port( |
| 71 | parsed_url.netloc, default_port=9092) |
| 72 | self._topic = options.get('topic', ['ceilometer'])[-1] |
| 73 | self.max_retry = int(options.get('max_retry', [100])[-1]) |
| 74 | |
| 75 | def _ensure_connection(self): |
| 76 | if self._producer: |
| 77 | return |
| 78 | |
| 79 | try: |
| 80 | self._producer = kafka.KafkaProducer( |
| 81 | bootstrap_servers=["%s:%s" % (self._host, self._port)]) |
| 82 | except kafka.errors.KafkaError as e: |
| 83 | LOG.exception(_LE("Failed to connect to Kafka service: %s"), e) |
| 84 | raise messaging.DeliveryFailure('Kafka Client is not available, ' |
| 85 | 'please restart Kafka client') |
| 86 | except Exception as e: |
| 87 | LOG.exception(_LE("Failed to connect to Kafka service: %s"), e) |
| 88 | raise messaging.DeliveryFailure('Kafka Client is not available, ' |
| 89 | 'please restart Kafka client') |
| 90 | |
| 91 | def _send(self, context, event_type, data): |
| 92 | self._ensure_connection() |
| 93 | # TODO(sileht): don't split the payload into multiple network |
| 94 | # message ... but how to do that without breaking consuming |
| 95 | # application... |
| 96 | try: |
| 97 | for d in data: |
| 98 | self._producer.send(self._topic, jsonutils.dumps(d)) |
| 99 | LOG.info("Kafka publish") |
| 100 | except Exception as e: |
| 101 | messaging.raise_delivery_failure(e) |