Skip to content

Commit 908a532

Browse files
author
Sudev Ambadi
committed
Fails for kafka not available case
1 parent 31b0fd5 commit 908a532

File tree

2 files changed

+31
-2
lines changed

2 files changed

+31
-2
lines changed

src/KafkaHandler.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,10 @@ def __init__(self, hosts_list, topic, batch_size, key=None):
1414
def emit(self, record):
1515
# drop kafka logging to avoid infinite recursion
1616
# Or should I write them to a file ? (Grr.. there are too many of them too, so ignore)
17-
if record.name == 'kafka':
17+
if record.name == 'kafka':
1818
return
1919
try:
20-
# use default formatting and then byte encode it
20+
# use default formatting, this can be overiden by goibibo buckter format
2121
msg = self.format(record)
2222
msg = bytes(msg)
2323
# Keyed messages should be produced when ordering of message is important

test/kafka_dead.py

+29
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
import logging
2+
import pdb
3+
import time
4+
import numpy
5+
import sys
6+
sys.path.append('../src')
7+
import KafkaHandler as kl
8+
9+
logger = logging.getLogger('spam_application')
10+
logger.setLevel(logging.DEBUG)
11+
12+
# Add the log message handler to the logger
13+
# Lets give wrong kafka url and try what happens
14+
handler = kl.KafkaLoggingHandler(hosts_list="10.70.45.47:9092", topic= "testLogger", batch_size=100, key="Hello")
15+
16+
logger.addHandler(handler)
17+
18+
latency = []
19+
# Log some messages
20+
for i in range(200):
21+
t = time.time()
22+
logger.info('i = %d' % i)
23+
logger.debug('i = %d' % i)
24+
latency.append(time.time() - t)
25+
26+
nl = numpy.array(latency)
27+
print(numpy.mean(nl))
28+
print(numpy.std(nl))
29+
time.sleep(20)

0 commit comments

Comments
 (0)