Skip to content

Commit fca2f22

Browse files
committed
提交
1 parent e361fa5 commit fca2f22

File tree

2 files changed

+55
-15
lines changed

2 files changed

+55
-15
lines changed

README.md

Lines changed: 49 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,59 @@ HELLOWORLD
1919

2020
demon1
2121

22-
```python
23-
22+
```
2423
from logger import Logger
2524
class MyClass(Logger):
2625
name = "log_name"
2726
def __init__(self, settings_file):
2827
super(MyClass, self).__init__(settings_file)
2928
self.logger.debug("....")
29+
```
30+
31+
demon2
32+
33+
```
34+
import os
35+
from logger import CustomLogFactory
36+
from cloghandler import ConcurrentRotatingFileHandler
37+
my_dir = "logs"
38+
try:
39+
os.makedirs(my_dir)
40+
except OSError as exception:
41+
if exception.errno != errno.EEXIST:
42+
raise
43+
logger = CustomLogFactory.get_instance(name="test_name")
44+
logger.set_handler(
45+
ConcurrentRotatingFileHandler(
46+
os.path.join(my_dir, "test.log"),
47+
backupCount=5,
48+
maxBytes=10240))
49+
logger.info("this is a log. ")
50+
```
51+
52+
demon3
53+
54+
```
55+
from logger import CustomLogFactory, KafkaHandler
56+
settings = {"KAFKA_HOSTS":"192.168.200.90:9092", "TOPIC":"jay-cluster-logs"}
57+
logger = CustomLogFactory.get_instance(name="test_name", json=True)
58+
kafka_handler = KafkaHandler(settings)
59+
logger.set_handler(kafka_handler)
60+
logger.info("this is a log. ")
61+
```
62+
63+
demon4
64+
```
65+
import sys
66+
import logging
67+
from logger import CustomLogFactory
68+
logger = CustomLogFactory.get_instance(name="test_name")
69+
logger.set_handler(logging.StreamHandler(sys.stdout))
70+
logger.info("this is a log. ")
71+
```
3072

31-
```
73+
demon5
74+
```
75+
# 编写自定义handler
76+
# 请参见KafkaHandler的实现方式
77+
```

logger.py

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
import inspect
1111
from functools import wraps
1212
from kafka import KafkaClient, SimpleProducer
13-
from argparse import ArgumentParser
1413
from kafka.common import FailedPayloadsError
1514
from cloghandler import ConcurrentRotatingFileHandler
1615
from scutils.log_factory import LogFactory, LogObject
@@ -142,15 +141,18 @@ def set_logger(self, logger=None):
142141
maxBytes=my_bytes))
143142

144143

145-
class SignalLogger(Logger):
144+
class ThreadClosing(object):
146145

147146
alive = True
148147

149-
def __init__(self, settings):
150-
super(SignalLogger, self).__init__(settings)
148+
def __init__(self):
151149
self.threads = []
152150
self.int_signal_count = 1
153151
self.open()
152+
self.logger = LogFactory.get_instance()
153+
154+
def set_logger(self, logger):
155+
self.logger = logger
154156

155157
def stop(self, *args):
156158
if self.int_signal_count > 1:
@@ -169,14 +171,6 @@ def open(self):
169171
def set_force_interrupt(self, thread):
170172
self.threads.append(thread)
171173

172-
@classmethod
173-
def parse_args(cls):
174-
parser = ArgumentParser()
175-
parser.add_argument("-s", "--settings", dest="settings", help="settings", default="settings_kafkadump.py")
176-
parser.add_argument("-c", "--consumer", required=False, dest="consumer", help="comsumer id")
177-
parser.add_argument("-t", "--topic", required=True, dest="topic", help="topic")
178-
return cls(**vars(parser.parse_args()))
179-
180174
def _async_raise(self, name, tid, exctype):
181175
"""raises the exception, performs cleanup if needed"""
182176
tid = ctypes.c_long(tid)

0 commit comments

Comments
 (0)