blob: 420d1b26ab12277282ea5dacef993b0206fbcb89 [file] [log] [blame]
Zack Williams9e8efd32018-10-17 15:01:13 -07001#!/usr/bin/env python
2
3# Copyright 2018-present Open Networking Foundation
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
Zack Williams1f300022018-10-26 15:30:23 -070017from kafkaloghandler import KafkaLogHandler
Zack Williams9e8efd32018-10-17 15:01:13 -070018import json
19import logging
20import unittest
21
Zack Williams1f300022018-10-26 15:30:23 -070022# mock is a part of unittest in python 3
23try:
24 from mock import patch
25except ImportError:
26 from unittest.mock import patch
Zack Williams9e8efd32018-10-17 15:01:13 -070027
28
29class FakeKafkaProducer():
30 '''
31 Works like Producer in confluent_kafka, ref:
32 https://docs.confluent.io/current/clients/confluent-kafka-python/#producer
33 '''
34 def __init__(self, config=[]):
35 self.config = config
36
37 def produce(self, topic, value='', key=''):
38 self.topic = topic
39 self.value = value
40 self.key = key
41
42 def flush(self, timeout=1):
43 self.flush_timeout = timeout
44
45
46class TestKafkaLogHandler(unittest.TestCase):
47
48 def setUp(self):
49 '''
50 Setup tests for KafkaLogHandler, mainly common init of logger
51 '''
52 self.logger = logging.getLogger(__name__)
53 self.logger.handlers = []
54 self.logger.setLevel(logging.INFO)
55
56 def tearDown(self):
57 logging.shutdown()
58
59 def test_single_message(self):
60 '''
61 tests that _emit is called once when there is one message
62 '''
63
64 with patch.object(KafkaLogHandler, 'emit') as emit:
65
66 klh = KafkaLogHandler(bootstrap_servers=["test-kafka:9092"],
67 topic="testtopic")
68
69 self.logger.addHandler(klh)
70
71 self.logger.warn('Warning')
72
Zack Williams1f300022018-10-26 15:30:23 -070073 assert emit.call_count == 1
Zack Williams9e8efd32018-10-17 15:01:13 -070074
75 def test_with_structure(self):
76 '''
77 tests structured serialization of log to JSON
78 '''
79
80 with patch.object(KafkaLogHandler, '_connect'):
81
82 klh = KafkaLogHandler(bootstrap_servers=["test-kafka:9092"],
83 topic="testtopic")
84
85 klh.producer = FakeKafkaProducer()
86
87 self.logger.addHandler(klh)
88
89 extra_data = {
90 "foo": "value1",
91 "bar": "value2",
92 "l1": {"l2": {'l3': "nested"}},
93 }
94
95 self.logger.info('structured', extra=extra_data)
96
97 decoded_message = json.loads(klh.producer.value)
98
99 self.assertEqual(klh.producer.topic, 'testtopic')
Zack Williams1f300022018-10-26 15:30:23 -0700100 self.assertEqual(decoded_message['message'], 'structured')
Zack Williams9e8efd32018-10-17 15:01:13 -0700101 self.assertEqual(decoded_message['foo'], 'value1')
102 self.assertEqual(decoded_message['bar'], 'value2')
103 self.assertEqual(decoded_message['l1.l2.l3'], 'nested')
104
105 def test_without_flatten(self):
106 '''
107 tests with flattening of objects disabled
108 '''
109
110 with patch.object(KafkaLogHandler, '_connect'):
111
112 klh = KafkaLogHandler(bootstrap_servers=["test-kafka:9092"],
113 topic="testtopic",
114 flatten=0)
115
116 klh.producer = FakeKafkaProducer()
117
118 self.logger.addHandler(klh)
119
120 extra_data = {
121 "foo": "value1",
122 "l1": {"l2": {'l3': "nested"}},
123 }
124
125 self.logger.info('noflatten', extra=extra_data)
126
127 decoded_message = json.loads(klh.producer.value)
128
Zack Williams1f300022018-10-26 15:30:23 -0700129 self.assertEqual(decoded_message['message'], 'noflatten')
Zack Williams9e8efd32018-10-17 15:01:13 -0700130 self.assertEqual(decoded_message['foo'], 'value1')
131 self.assertEqual(decoded_message['l1'], {'l2': {'l3': "nested"}})
132
133 def test_with_shallow_flatten(self):
134 '''
135 Tests with a shallow flattening of objects, and different separator
136 '''
137
138 with patch.object(KafkaLogHandler, '_connect'):
139
140 klh = KafkaLogHandler(bootstrap_servers=["test-kafka:9092"],
141 topic="testtopic",
142 flatten=1,
143 separator='_')
144
145 klh.producer = FakeKafkaProducer()
146
147 self.logger.addHandler(klh)
148
149 extra_data = {
150 "foo": "value1",
151 "l1": {"l2": {'l3': "nested"}},
152 }
153
154 self.logger.info('oneflatten', extra=extra_data)
155
156 decoded_message = json.loads(klh.producer.value)
157
Zack Williams1f300022018-10-26 15:30:23 -0700158 self.assertEqual(decoded_message['message'], 'oneflatten')
Zack Williams9e8efd32018-10-17 15:01:13 -0700159 self.assertEqual(decoded_message['foo'], 'value1')
160 self.assertEqual(decoded_message['l1_l2'], {'l3': 'nested'})
161
162 def test_override_key(self):
163 '''
164 Test setting the key argument to override the default
165 '''
166
167 with patch.object(KafkaLogHandler, '_connect'):
168
169 klh = KafkaLogHandler(bootstrap_servers=["test-kafka:9092"],
170 topic="testtopic")
171
172 klh.producer = FakeKafkaProducer()
173
174 self.logger.addHandler(klh)
175
176 extra_data = {
177 "foo": "value1",
178 "l1": {"l2": {'l3': "nested"}},
179 }
180
181 # log with default 'klh' key
182 self.logger.info('defaultkey', extra=extra_data)
183
184 decoded_message1 = json.loads(klh.producer.value)
185
186 self.assertEqual(klh.producer.key, 'klh')
187 self.assertEqual(decoded_message1['foo'], 'value1')
Zack Williams1f300022018-10-26 15:30:23 -0700188 self.assertEqual(decoded_message1['message'], 'defaultkey')
Zack Williams9e8efd32018-10-17 15:01:13 -0700189 self.assertEqual(decoded_message1['l1.l2.l3'], 'nested')
190
191 # log with key overridden
192 extra_data.update({'key': 'override'})
193 self.logger.info('keyoverride', extra=extra_data)
194
195 decoded_message2 = json.loads(klh.producer.value)
196
197 self.assertEqual(klh.producer.key, 'override')
Zack Williams1f300022018-10-26 15:30:23 -0700198 self.assertEqual(decoded_message2['message'], 'keyoverride')
Zack Williams9e8efd32018-10-17 15:01:13 -0700199 self.assertEqual(decoded_message2['foo'], 'value1')
200 self.assertEqual(decoded_message2['l1.l2.l3'], 'nested')
201
202 def test_blacklist(self):
203 '''
204 tests adding items to blacklist
205 '''
206
207 with patch.object(KafkaLogHandler, '_connect'):
208
209 klh = KafkaLogHandler(bootstrap_servers=["test-kafka:9092"],
210 topic="testtopic",
211 blacklist=["bar"])
212
213 klh.producer = FakeKafkaProducer()
214
215 self.logger.addHandler(klh)
216
217 extra_data = {
218 "foo": "value1",
219 "bar": "value2",
220 "l1": {"l2": {'l3': "nested"}},
221 }
222
223 self.logger.info('blacklist', extra=extra_data)
224
225 decoded_message = json.loads(klh.producer.value)
226
227 self.assertEqual(klh.producer.topic, 'testtopic')
Zack Williams1f300022018-10-26 15:30:23 -0700228 self.assertEqual(decoded_message['message'], 'blacklist')
Zack Williams9e8efd32018-10-17 15:01:13 -0700229 self.assertEqual(decoded_message['foo'], 'value1')
230 with self.assertRaises(KeyError):
231 decoded_message['bar']