├── pystream ├── executor │ ├── __init__.py │ ├── event.py │ ├── async.py │ ├── wraps.py │ ├── utils.py │ ├── output.py │ ├── executor.py │ ├── source.py │ └── middleware.py ├── __init__.py ├── logparser │ ├── exception.py │ ├── datatype.py │ ├── __init__.py │ ├── logparser.py │ └── ruleparser.py ├── logger.py ├── rule ├── utils.py ├── logic.py └── config.py ├── requirements.txt ├── setup.py └── README.md /pystream/executor/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | __author__ = 'tong' 6 | -------------------------------------------------------------------------------- /pystream/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import logger 5 | 6 | __author__ = 'tong' 7 | __version__ = '1.0.0' -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pyparsing>=2.1.5 2 | python-dateutil>=2.5.3 3 | psutil>=4.3.0 4 | multiprocess>=0.70.5 5 | pytz 6 | toolz 7 | 8 | # posix 9 | pybloomfiltermmap>=0.3.15 10 | # windows 11 | pybloom>=1.1 -------------------------------------------------------------------------------- /pystream/executor/event.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | __author__ = 'tong' 6 | 7 | 8 | class Enum(object): 9 | def __init__(self, name, value): 10 | self.name = name 11 | self.value = value 12 | 13 | def __eq__(self, other): 14 | return self.value == other.value 15 | 16 | def __str__(self): 17 | return self.name 18 | 19 | def __repr__(self): 20 | return '' % self.name 21 | 22 | 23 | class Event(object): 24 | SKIP = Enum('SKIP', 1) 25 | IDLE = Enum('IDLE', 2) 26 | 27 | 28 | def is_event(obj): 29 | return isinstance(obj, Enum) 30 | -------------------------------------------------------------------------------- /pystream/logparser/exception.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | __author__ = 'tong' 6 | 7 | 8 | class LogParserException(Exception): 9 | def __init__(self, msg): 10 | self.message = msg 11 | 12 | def __str__(self): 13 | return '%s: %s' % (self.__class__.__name__, self.message) 14 | 15 | 16 | class RuleException(LogParserException): 17 | pass 18 | 19 | 20 | class ParseException(LogParserException): 21 | def __init__(self, msg, line=None, type=None, rule=None): 22 | super(ParseException, self).__init__(msg) 23 | self.line = line 24 | self.type = type 25 | self.rule = rule 26 | 27 | 28 | class ClassException(LogParserException): 29 | pass -------------------------------------------------------------------------------- /pystream/logger.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import uuid 5 | import time 6 | import logging 7 | from random import randint 8 | from traceback import format_exc 9 | 10 | __author__ = 'tong' 11 | 12 | logger = logging.getLogger('stream.logger') 13 | tracer = logging.getLogger('stream.tracer') 14 | 15 | 16 | class LogTracer(logging.Filter): 17 | def filter(self, record): 18 | if record.levelno >= logging.WARN: 19 | extra = getattr(record, 'extra', {}) 20 | trace_id = str(uuid.uuid3(uuid.NAMESPACE_DNS, '%s%s' % (time.time(), randint(0, 100000)))) 21 | record.trace_id = trace_id 22 | msg = '\n'.join(['[%s]\n%s' % (k.strip(), str(v).strip()) for k, v in extra.items()]) 23 | tracer.error('[%s]: %s[format_exc]\n%s' % (trace_id, msg and ('\n%s\n' % msg), format_exc())) 24 | return True 25 | 26 | logger.addFilter(LogTracer()) 27 | -------------------------------------------------------------------------------- /pystream/rule: -------------------------------------------------------------------------------- 1 | [nginx] 2 | type=regex 3 | rule=(?P(((25[0-5]|2[0-4]\d|((1\d{2})|([1-9]?\d)))\.){3}(25[0-5]|2[0-4]\d|((1\d{2})|([1-9]?\d))))) (?P.*?) (?P.*?) \[(?P\d{2}/(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)/\d{4}:\d{2}:\d{2}:\d{2})( \+\d{4})?\] "(?P(?P(OPTIONS|HEAD|GET|POST|PUT|DELETE|TRACE|CONNECT)) (?P.*?) (?P.*?)|.*?)" (?P\d+?) (?P.*?) "(?P.*?)" "(?P.*?)" 4 | fields={"status": "status", "remote_user": "remote_user", "http_referer": "http_referer", "remote_addr": "remote_addr", "request": "request", "version": "version", "http_user_agent": "http_user_agent", "path": "path", "method": "method"} 5 | subrules={"body_bytes_sent": "nginx_bytes_sent", "time_local": "nginx_time"} 6 | 7 | [nginx_bytes_sent] 8 | type=type 9 | rule=number 10 | fields={"body_bytes_sent": "0"} 11 | 12 | [nginx_time] 13 | type=type 14 | rule=date 15 | fields={"time_local": "0"} 16 | -------------------------------------------------------------------------------- /pystream/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import json 5 | from datetime import datetime, date, time 6 | 7 | __author__ = 'tong' 8 | 9 | 10 | class JSONCls(json.JSONEncoder): 11 | def default(self, obj): 12 | from sqlalchemy.engine.result import RowProxy 13 | if isinstance(obj, RowProxy): 14 | return list(obj) 15 | if isinstance(obj, datetime): 16 | return '%04d-%02d-%02d %02d:%02d:%02d' % (obj.year, obj.month, obj.day, obj.hour, obj.minute, obj.second) 17 | if isinstance(obj, date): 18 | return '%04d-%02d-%02d' % (obj.year, obj.month, obj.day) 19 | if isinstance(obj, time): 20 | return obj.strftime('%H:%M:%S') 21 | return json.JSONEncoder.default(self, obj) 22 | 23 | 24 | class DefaultParser(object): 25 | def parse(self, data): 26 | from .logparser.logparser import ParserResult 27 | return ParserResult(data, data, data) 28 | 29 | @property 30 | def rule(self): 31 | return None 32 | 33 | @property 34 | def fieldnames(self): 35 | return [] 36 | 37 | @property 38 | def fieldtypes(self): 39 | return {} 40 | -------------------------------------------------------------------------------- /pystream/logparser/datatype.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import re 5 | from dateutil.parser import parse 6 | from exception import ParseException 7 | 8 | __author__ = 'tong' 9 | 10 | 11 | class Datatype(object): 12 | def __init__(self, data): 13 | self._data = data 14 | 15 | def __str__(self): 16 | return str(self._data) 17 | 18 | __repr__ = __str__ 19 | 20 | @classmethod 21 | def get(cls, item): 22 | item = item.lower() 23 | for subcls in cls.__subclasses__(): 24 | if item == subcls.__name__.lower(): 25 | return subcls 26 | raise Exception(u'未知类型: %s' % item) 27 | 28 | @property 29 | def data(self): 30 | return self._data 31 | 32 | 33 | class Number(Datatype): 34 | pattern = re.compile(r"(\d*\.)?\d+") 35 | 36 | def __init__(self, data): 37 | data = str(data) 38 | res = re.match(Number.pattern, data) 39 | if res: 40 | data = res.group() 41 | elif data.lower().startswith('0x'): 42 | data = int(data, 16) 43 | else: 44 | raise ParseException('%s is not a number' % data) 45 | super(Number, self).__init__(data) 46 | 47 | @property 48 | def data(self): 49 | if '.' in self._data: 50 | return float(self._data) 51 | return int(self._data) 52 | 53 | 54 | class Date(Datatype): 55 | def __init__(self, data): 56 | data = parse(data, fuzzy=True) 57 | super(Date, self).__init__(data) 58 | 59 | def __str__(self): 60 | return self._data.strftime('%Y-%m-%d %H:%M:%S') 61 | 62 | 63 | class String(Datatype): 64 | def __init__(self, data): 65 | super(String, self).__init__(data) 66 | -------------------------------------------------------------------------------- /pystream/executor/async.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | from asyncore import dispatcher 6 | 7 | from .event import is_event 8 | 9 | __author__ = 'tong' 10 | 11 | logger = logging.getLogger('stream.logger') 12 | 13 | 14 | class TCPClient(dispatcher): 15 | def __init__(self, address): 16 | dispatcher.__init__(self) 17 | self.message = None 18 | self.iterator = None 19 | self._source = None 20 | self.address = address 21 | 22 | def handle_connect(self): 23 | pass 24 | 25 | def handle_read(self): 26 | pass 27 | 28 | def handle_error(self): 29 | logger.error('client[TCP] socket %s error' % str(self.addr)) 30 | self.handle_close() 31 | 32 | def handle_expt(self): 33 | logger.error('client[TCP] socket %s error: unhandled incoming priority event' % self.addr) 34 | 35 | def handle_close(self): 36 | logger.info('client[TCP] socket %s close' % str(self.addr)) 37 | self.close() 38 | 39 | def writable(self): 40 | try: 41 | self.message = next(self.iterator) 42 | return not is_event(self.message) 43 | except StopIteration: 44 | self.handle_close() 45 | return False 46 | 47 | def handle_write(self): 48 | sent = self.send(self.message+'\n') 49 | logger.debug('OUTPUT socket a message(%s)' % sent) 50 | 51 | def start(self): 52 | import socket 53 | import asyncore 54 | socket_af = socket.AF_UNIX if isinstance(self.address, basestring) else socket.AF_INET 55 | self.create_socket(socket_af, socket.SOCK_STREAM) 56 | self.connect(self.address) 57 | self.iterator = iter(self.source) 58 | asyncore.loop(use_poll=True) 59 | 60 | @property 61 | def source(self): 62 | if not self._source: 63 | raise Exception('Lack of data source!') 64 | return self._source 65 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import os 5 | import re 6 | import sys 7 | 8 | from setuptools import find_packages, setup 9 | 10 | 11 | POSIX = os.name == "posix" 12 | WINDOWS = os.name == "nt" 13 | LINUX = sys.platform.startswith("linux") 14 | OSX = sys.platform.startswith("darwin") 15 | FREEBSD = sys.platform.startswith("freebsd") 16 | OPENBSD = sys.platform.startswith("openbsd") 17 | NETBSD = sys.platform.startswith("netbsd") 18 | BSD = FREEBSD or OPENBSD or NETBSD 19 | SUNOS = sys.platform.startswith("sunos") or sys.platform.startswith("solaris") 20 | 21 | 22 | def text_of(relpath): 23 | thisdir = os.path.dirname(__file__) 24 | file_path = os.path.join(thisdir, os.path.normpath(relpath)) 25 | with open(file_path) as f: 26 | text = f.read() 27 | return text 28 | 29 | version = re.search( 30 | "__version__ = '([^']+)'", text_of('pystream/__init__.py') 31 | ).group(1) 32 | 33 | 34 | NAME = 'python-stream' 35 | VERSION = version 36 | DESCRIPTION = '数据采集处理框架' 37 | KEYWORDS = 'File Log Structure Stream' 38 | AUTHOR = 'tong' 39 | AUTHOR_EMAIL = 'g_tongbin@foxmail.com' 40 | URL = 'http://t.navan.cc' 41 | LICENSE = '' 42 | PACKAGES = find_packages(exclude=['tests', 'tests.*']) 43 | PACKAGE_DATA = {'pystream': ['rule']} 44 | 45 | INSTALL_REQUIRES = ['pyparsing>=2.1.5', 'pytz', 'python-dateutil>=2.5.3', 'psutil>=4.3.0', 'pybloom>=1.1', 'toolz'] 46 | 47 | if not WINDOWS: 48 | INSTALL_REQUIRES.append('pybloomfiltermmap>=0.3.15') 49 | 50 | CLASSIFIERS = [ 51 | 'Operating System :: OS Independent', 52 | 'Programming Language :: Python', 53 | 'Programming Language :: Python :: 2', 54 | 'Programming Language :: Python :: 2.7' 55 | 'Topic :: structure :: Agent :: Log :: File :: Csv', 56 | 'Topic :: Software Development :: Libraries' 57 | ] 58 | 59 | LONG_DESCRIPTION = text_of('README.md') 60 | 61 | 62 | params = { 63 | 'name': NAME, 64 | 'version': VERSION, 65 | 'description': DESCRIPTION, 66 | 'keywords': KEYWORDS, 67 | 'long_description': LONG_DESCRIPTION, 68 | 'author': AUTHOR, 69 | 'author_email': AUTHOR_EMAIL, 70 | 'url': URL, 71 | 'license': LICENSE, 72 | 'packages': PACKAGES, 73 | 'package_data': PACKAGE_DATA, 74 | 'install_requires': INSTALL_REQUIRES, 75 | 'classifiers': CLASSIFIERS, 76 | } 77 | 78 | setup(**params) 79 | -------------------------------------------------------------------------------- /pystream/logic.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import json 5 | from utils import JSONCls 6 | 7 | __author__ = 'tong' 8 | 9 | 10 | class Expr(object): 11 | def __init__(self, name=None): 12 | self.name = name or '_' 13 | self.operator = lambda x, y: x == y 14 | self.value = None 15 | self.linker = '=' 16 | 17 | def set(self, o, v, l): 18 | self.operator = o 19 | self.value = v 20 | self.linker = l 21 | return self 22 | 23 | def data(self, data): 24 | pass 25 | 26 | def result(self, data): 27 | return self.operator(self.data(data), self.value) 28 | 29 | def __eq__(self, other): 30 | return self.set(lambda x, y: x == y, other, '==') 31 | 32 | def __ne__(self, other): 33 | return self.set(lambda x, y: x != y, other, '!=') 34 | 35 | def __lt__(self, other): 36 | return self.set(lambda x, y: x < y, other, '<') 37 | 38 | def __gt__(self, other): 39 | return self.set(lambda x, y: x > y, other, '>') 40 | 41 | def __le__(self, other): 42 | return self.set(lambda x, y: x <= y, other, '<=') 43 | 44 | def __ge__(self, other): 45 | return self.set(lambda x, y: x >= y, other, '>=') 46 | 47 | def contain(self, other): 48 | return self.set(lambda x, y: y in x, other, 'in') 49 | 50 | def In(self, other): 51 | return self.set(lambda x, y: x in y, other, 'in') 52 | 53 | def __or__(self, other): 54 | return Or(self, other) 55 | 56 | def __and__(self, other): 57 | return And(self, other) 58 | 59 | def __str__(self): 60 | return '`%s` %s %s' % (self.name, self.linker, json.dumps(self.value, cls=JSONCls)) 61 | 62 | 63 | class Text(Expr): 64 | def data(self, data): 65 | return data 66 | 67 | 68 | class Key(Expr): 69 | def data(self, data): 70 | if not isinstance(data, dict): 71 | raise Exception('data should be a dict') 72 | return data.get(self.name) 73 | 74 | 75 | class And(object): 76 | def __init__(self, *args): 77 | self.args = args 78 | 79 | def result(self, data): 80 | return all([v.result(data) for v in self.args]) 81 | 82 | def __str__(self): 83 | return ' And '.join(['(%s)' % i for i in self.args]) 84 | 85 | 86 | class Or(object): 87 | def __init__(self, *args): 88 | self.args = args 89 | 90 | def result(self, data): 91 | return any([v.result(data) for v in self.args]) 92 | 93 | def __str__(self): 94 | return ' Or '.join(['(%s)' % i for i in self.args]) 95 | 96 | _ = Text() 97 | -------------------------------------------------------------------------------- /pystream/executor/wraps.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | import traceback 6 | 7 | from output import Output 8 | from executor import Executor 9 | from event import Event, is_event 10 | from utils import start_process, IterableError 11 | 12 | 13 | __author__ = 'tong' 14 | 15 | logger = logging.getLogger('stream.logger') 16 | 17 | 18 | class Wraps(Executor): 19 | def __or__(self, other): 20 | raise Exception('please used by `source | %s(exe)`' % self.__class__.__name__) 21 | 22 | 23 | class Batch(Wraps): 24 | def __init__(self, sender, **kwargs): 25 | if not isinstance(sender, Output): 26 | raise Exception('sender only accept `Ouput` type') 27 | self.sender = sender 28 | super(Batch, self).__init__(**kwargs) 29 | 30 | def __iter__(self): 31 | iterator = super(Batch, self).__iter__() 32 | for items in iterator: 33 | if is_event(items) and self._output: 34 | yield items 35 | continue 36 | for item in items: 37 | yield item 38 | 39 | def handle(self, item): 40 | try: 41 | self.sender.outputmany(item) 42 | except IterableError, e: 43 | return [_ for _ in e.args] 44 | except Exception, e: 45 | return [{'data': _, 'exception': e, 'traceback': traceback.format_exc()} for _ in item] 46 | 47 | 48 | class Combiner(Wraps): 49 | def __init__(self, *args, **kwargs): 50 | super(Combiner, self).__init__(**kwargs) 51 | self._source = args 52 | if not args: 53 | raise Exception('no source') 54 | 55 | @property 56 | def source(self): 57 | sources = [iter(s) for s in self._source] 58 | while True: 59 | if not sources: 60 | break 61 | source = sources.pop(0) 62 | try: 63 | while True: 64 | item = next(source) 65 | yield item 66 | if is_event(item) and item == Event.IDLE: 67 | break 68 | sources.append(source) 69 | except StopIteration: 70 | pass 71 | 72 | 73 | class Daemonic(Wraps): 74 | def __init__(self, exe, handler=None, **kwargs): 75 | super(Daemonic, self).__init__(**kwargs) 76 | self.exe = exe 77 | self.handler = handler 78 | 79 | def __iter__(self): 80 | raise Exception('please use `start` method') 81 | 82 | def _run(self): 83 | if isinstance(self.exe, Output): 84 | handle = self.handler or (lambda x: self.handle_exception(x['data'], x['exception'])) 85 | else: 86 | handle = self.handler or (lambda x: x) 87 | for item in self.exe: 88 | handle(item) 89 | 90 | def start(self): 91 | if hasattr(self.exe, 'start'): 92 | return start_process(self.exe.start) 93 | else: 94 | return start_process(self._run) 95 | 96 | -------------------------------------------------------------------------------- /pystream/logparser/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import ruleparser 5 | from exception import RuleException, ParseException 6 | 7 | __author__ = 'tong' 8 | 9 | 10 | class Rule(object): 11 | def __init__(self, rule=None): 12 | self._type = None 13 | self._rule = None 14 | self._subrules = {} 15 | self._parser = None 16 | self._ruleparser = None 17 | rule = rule or {} 18 | if not isinstance(rule, dict): 19 | raise RuleException('Input rule should be dict!') 20 | 21 | if rule: 22 | self.type = rule.get('type') 23 | self.rule = rule.get('rule') 24 | self.subrules = rule.get('subrules', {}) 25 | 26 | def __str__(self): 27 | return '[TYPE] %s [RULE] %s [SUBRULES] %s' % (self.type, self.rule, self.subrules) 28 | 29 | def parse(self, text): 30 | parser = self.parser 31 | try: 32 | return parser.parse(text) 33 | except Exception, e: 34 | raise ParseException(e, text, self.type, self.rule) 35 | 36 | def __call__(self): 37 | return { 38 | 'type': self.type.lower(), 39 | 'rule': self.rule, 40 | 'subrules': self.subrules 41 | } 42 | 43 | @property 44 | def parser(self): 45 | if not self._parser: 46 | try: 47 | self._parser = self._ruleparser(self.rule) 48 | except Exception, e: 49 | raise RuleException('%s rule error: %s (%s)' % (self.type, self.rule, e)) 50 | return self._parser 51 | 52 | @property 53 | def type(self): 54 | if self._type is None: 55 | raise RuleException('Lack of type') 56 | return self._type 57 | 58 | @type.setter 59 | def type(self, value): 60 | value = str(value).lower().capitalize() 61 | if not hasattr(ruleparser, value): 62 | raise RuleException('Unsupported rule (%s)!' % value) 63 | self._type = value 64 | self._parser = None 65 | self._ruleparser = getattr(ruleparser, self.type) 66 | 67 | @property 68 | def ruleparser(self): 69 | if not self._ruleparser: 70 | raise RuleException('Type not setting') 71 | return self._ruleparser 72 | 73 | @property 74 | def rule(self): 75 | if isinstance(self._rule, self.ruleparser.TYPE): 76 | return self._rule 77 | raise RuleException('Lack of rule') 78 | 79 | @rule.setter 80 | def rule(self, value): 81 | rule_type = self.ruleparser.TYPE 82 | if not isinstance(value, rule_type): 83 | raise RuleException('The context of rule (%s) must be %s ! (%s: %s)' 84 | % (self.type, rule_type, type(value), value)) 85 | self._rule = value 86 | self._parser = None 87 | 88 | @property 89 | def subrules(self): 90 | return self._subrules 91 | 92 | @subrules.setter 93 | def subrules(self, value): 94 | if not isinstance(value, dict): 95 | raise RuleException('Sub rule should be dict!') 96 | self._subrules = value 97 | -------------------------------------------------------------------------------- /pystream/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import os 5 | import json 6 | import pytz 7 | import ConfigParser 8 | from datetime import datetime 9 | 10 | from logic import Key, And, Or, Text, _ 11 | 12 | __author__ = 'tong' 13 | 14 | 15 | ROOT_PATH = os.path.abspath(os.path.dirname(os.path.abspath(__file__))) 16 | 17 | PATH = { 18 | 'ROOT': ROOT_PATH, 19 | 'RULE': [os.path.join(ROOT_PATH, 'rule')] 20 | } 21 | 22 | 23 | def config(filename): 24 | if not os.path.exists(filename): 25 | raise Exception('No such config file %s' % filename) 26 | cfg = ConfigParser.ConfigParser() 27 | cfg.read(filename) 28 | return cfg 29 | 30 | 31 | def parse(value): 32 | class FieldDict(object): 33 | def __init__(self, e): 34 | self.e = e 35 | 36 | def __getitem__(self, item): 37 | return self.e.get(item, Key(item)) 38 | 39 | env = { 40 | 'Text': Text, 41 | 'Key': Key, 42 | 'And': And, 43 | 'Or': Or, 44 | 'pytz': pytz, 45 | 'datetime': datetime, 46 | '_': _ 47 | } 48 | result = eval(value, env, FieldDict(env)) 49 | if isinstance(result, (tuple, list)): 50 | result = And(*result) 51 | return result 52 | 53 | 54 | def rule(name='root', rulebase=None): 55 | def _decode_list(data): 56 | rv = [] 57 | for item in data: 58 | if isinstance(item, unicode): 59 | item = item.encode('utf-8') 60 | elif isinstance(item, list): 61 | item = _decode_list(item) 62 | elif isinstance(item, dict): 63 | item = _decode_dict(item) 64 | rv.append(item) 65 | return rv 66 | 67 | def _decode_dict(data): 68 | rv = {} 69 | for key, value in data.iteritems(): 70 | if isinstance(key, unicode): 71 | key = key.encode('utf-8') 72 | if isinstance(value, unicode): 73 | value = value.encode('utf-8') 74 | elif isinstance(value, list): 75 | value = _decode_list(value) 76 | elif isinstance(value, dict): 77 | value = _decode_dict(value) 78 | rv[key] = value 79 | return rv 80 | 81 | bools = {'1': True, 'yes': True, 'true': True, 'on': True, 82 | '0': False, 'no': False, 'false': False, 'off': False} 83 | 84 | def rule_type(rl): 85 | from logparser import Rule 86 | r = Rule() 87 | r.type = rl['type'] 88 | value = rl.get('rule') 89 | if r.ruleparser.TYPE is dict: 90 | value = json.loads(value, object_hook=_decode_dict) 91 | if r.ruleparser.TYPE is bool: 92 | value = bools.get(value.lower(), False) 93 | return value 94 | for rulebase in ([rulebase] if rulebase else PATH['RULE']): 95 | try: 96 | rules = dict(config(rulebase).items(name)) 97 | break 98 | except Exception: 99 | pass 100 | else: 101 | raise Exception('No such rule %s' % name) 102 | 103 | ret = {'type': rules['type'], 'rule': rule_type(rules)} 104 | if rules.get('fields'): 105 | try: 106 | fields = json.loads(rules['fields'], object_hook=_decode_dict) 107 | except: 108 | fields = [i.strip() for i in rules['fields'].split(',')] 109 | if isinstance(fields, list): 110 | fields = dict(zip(fields, [str(i) for i in range(len(fields))])) 111 | fields.pop(None, None) 112 | fields.pop('', None) 113 | ret['fields'] = fields 114 | if rules.get('subrules'): 115 | ret['subrules'] = {k: rule(v, rulebase) for k, v in 116 | json.loads(rules['subrules'], object_hook=_decode_dict).items()} 117 | return ret 118 | -------------------------------------------------------------------------------- /pystream/executor/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import os 5 | import time 6 | import multiprocessing 7 | 8 | __author__ = 'tong' 9 | 10 | 11 | def start_process(func): 12 | proc = multiprocessing.Process(target=func) 13 | proc.start() 14 | return proc 15 | 16 | 17 | def endpoint(fp): 18 | pos = fp.tell() 19 | fp.seek(0, 2) 20 | end = fp.tell() 21 | fp.seek(pos) 22 | return end 23 | 24 | 25 | class Window(object): 26 | def __init__(self, size=None, timeout=None): 27 | self.size = size 28 | self.timer = time.time() 29 | self.buffer = [] 30 | self.timeout = timeout 31 | 32 | @property 33 | def empty(self): 34 | return not bool(self.buffer) 35 | 36 | @property 37 | def data(self): 38 | data = self.buffer 39 | self.buffer = [] 40 | return data 41 | 42 | def append(self, item): 43 | if not self.buffer: 44 | self.timer = time.time() 45 | self.buffer.append(item) 46 | 47 | @property 48 | def fulled(self): 49 | if not self.size and not self.timeout: 50 | return False 51 | if not self.buffer: 52 | return False 53 | if self.size: 54 | if len(self.buffer) >= self.size: 55 | return True 56 | if not self.timeout: 57 | return False 58 | return time.time() - self.timer >= self.timeout 59 | 60 | 61 | def ifilter(name, path, **kwargs): 62 | if name == 'bloom': 63 | return BloomFilter(path, **kwargs) 64 | if name == 'max': 65 | return MaxFilter(path, **kwargs) 66 | 67 | 68 | def gzip(item): 69 | import gzip 70 | import StringIO 71 | s = StringIO.StringIO() 72 | data = gzip.GzipFile(fileobj=s, mode='w') 73 | data.write(item) 74 | data.close() 75 | return s.getvalue() 76 | 77 | 78 | def ungzip(item): 79 | import gzip 80 | import StringIO 81 | s = StringIO.StringIO() 82 | data = gzip.GzipFile(fileobj=s, mode='w') 83 | data.write(item) 84 | data.close() 85 | return s.getvalue() 86 | 87 | 88 | class IterableError(Exception): 89 | pass 90 | 91 | 92 | class BloomFilter(object): 93 | def __init__(self, cachefile, capacity=1000000, error_rate=0.001): 94 | self.cachefile = cachefile 95 | if os.name == 'nt' or not cachefile: 96 | from pybloom import BloomFilter 97 | if self.cache(): 98 | with open(cachefile, 'r') as fp: 99 | self.filter = BloomFilter.fromfile(fp) 100 | else: 101 | self.filter = BloomFilter(capacity=capacity, error_rate=error_rate) 102 | elif os.name == 'posix': 103 | from pybloomfilter import BloomFilter 104 | if self.cache(): 105 | self.filter = BloomFilter.open(self.cachefile) 106 | else: 107 | self.filter = BloomFilter(capacity, error_rate, cachefile) 108 | 109 | def __contains__(self, key): 110 | return key in self.filter 111 | 112 | def add(self, obj): 113 | self.filter.add(obj) 114 | if os.name == 'nt': 115 | with open(self.cachefile, 'w') as fp: 116 | self.filter.tofile(fp) 117 | 118 | def cache(self): 119 | return os.path.exists(self.cachefile or '') 120 | 121 | 122 | class MaxFilter(object): 123 | def __init__(self, cachefile, is_number=False): 124 | self.cachefile = cachefile 125 | self.is_number = is_number 126 | if os.path.exists(self.cachefile or ''): 127 | with open(self.cachefile, 'w') as fp: 128 | text = fp.read() 129 | self.max_value = float(text) if self.is_number else text 130 | else: 131 | self.max_value = None 132 | 133 | def __contains__(self, key): 134 | if self.is_number: 135 | key = float(key) 136 | return key <= self.max_value 137 | 138 | def add(self, obj): 139 | self.max_value = obj 140 | with open(self.cachefile, 'w') as fp: 141 | fp.write(self.max_value) 142 | -------------------------------------------------------------------------------- /pystream/logparser/logparser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import json 5 | from . import Rule 6 | from exception import RuleException 7 | 8 | __author__ = 'tong' 9 | 10 | 11 | class ParserResult(object): 12 | def __init__(self, line, trace, result): 13 | self._line = line 14 | self._trace = trace 15 | self._result = result 16 | 17 | def line(self): 18 | return self._line 19 | 20 | def trace(self): 21 | return self._trace 22 | 23 | def result(self): 24 | return self._result 25 | 26 | 27 | class LogParser(object): 28 | def __init__(self, rule): 29 | self._rule = Rule(rule) 30 | self._fields = rule.get('fields', {}) 31 | self._parser = {} 32 | 33 | for name, subrule in self._rule.subrules.iteritems(): 34 | self._parser[name] = LogParser(subrule) 35 | 36 | @property 37 | def rule(self): 38 | return self._rule 39 | 40 | @property 41 | def fieldnames(self): 42 | ret = self._fields.keys() 43 | for name, parser in self._parser.items(): 44 | ret += parser.fieldnames 45 | return list(set(ret)) 46 | 47 | @property 48 | def fieldtypes(self): 49 | keys = self._fields.keys() 50 | datatype = 'string' 51 | if self.rule.type == 'Type': 52 | datatype = self.rule.rule.lower() 53 | 54 | ret = {}.fromkeys(keys, datatype) 55 | for name, parser in self._parser.items(): 56 | ret.update(parser.fieldtypes) 57 | return ret 58 | 59 | def parse(self, log): 60 | ret = self._rule.parse(log.strip('\n')) 61 | if not ret: 62 | return ParserResult(log, {}, {}) 63 | 64 | trace = ret 65 | result = {k: ret[v] for k, v in self._fields.iteritems() if v in ret} 66 | for key in self._rule.subrules: 67 | if key in ret: 68 | res = self._parser[key].parse(ret[key]) 69 | trace[key] = res.trace() 70 | result.update(res.result()) 71 | return ParserResult(log, self.trace(trace), result) 72 | 73 | def trace(self, trace): 74 | if not trace: 75 | return {} 76 | if self.rule.ruleparser.SINGLE_RET: 77 | return trace[trace.keys()[0]] 78 | return trace 79 | 80 | 81 | class RuleEditor(object): 82 | def __init__(self, text=''): 83 | self._text = text 84 | self._name = None 85 | self._rule = None 86 | self._subrules = {} 87 | self._results = {} 88 | 89 | def __str__(self): 90 | if self._results: 91 | ret = {subrule(): key for key, subrule in self._subrules.items() if subrule()} 92 | return json.dumps(self._results, indent=2)+'\n'+json.dumps(ret, indent=2) 93 | return self._text 94 | 95 | __repr__ = __str__ 96 | 97 | def __call__(self, name=None): 98 | if name: 99 | self._name = name 100 | return self._name 101 | 102 | def __getattr__(self, key): 103 | try: 104 | if key not in self._subrules: 105 | self._subrules[key] = RuleEditor(self._results.get(key)) 106 | return self._subrules[key] 107 | except Exception, e: 108 | raise RuleException('key must in %s [%s: %s]' % 109 | (self._results.keys(), key, e)) 110 | 111 | __getitem__ = __getattr__ 112 | 113 | def load(self, rule): 114 | self._subrules = {} 115 | self.parse(rule['type'], rule['rule']) 116 | for key, value in rule.get('fields', {}).items(): 117 | self.__getattr__(value)(key) 118 | for key, subrule in rule.get('subrules', {}).items(): 119 | self.__getattr__(key).load(subrule) 120 | 121 | def parse(self, type, rule=None, **kwargs): 122 | self._rule = Rule() 123 | self._rule.type = type 124 | self._rule.rule = rule or json.dumps(kwargs) 125 | 126 | self._subrules = {} 127 | if self._text is not None: 128 | self._results = self._rule.parse(self._text) 129 | return self._results 130 | 131 | def rule(self): 132 | if self._rule is None: 133 | return None 134 | 135 | ret = self._rule() 136 | ret['fields'] = {} 137 | for key, subrule in self._subrules.items(): 138 | r = subrule.rule() 139 | if r: 140 | ret['subrules'][key] = r 141 | 142 | if subrule(): 143 | ret['fields'][subrule()] = key 144 | return ret 145 | -------------------------------------------------------------------------------- /pystream/executor/output.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import json 5 | import logging 6 | import traceback 7 | 8 | from toolz import merge 9 | from executor import Executor 10 | 11 | from utils import IterableError 12 | 13 | 14 | __author__ = 'tong' 15 | 16 | logger = logging.getLogger('stream.logger') 17 | 18 | 19 | class Output(Executor): 20 | def handle(self, item): 21 | try: 22 | self.output(item) 23 | except Exception, e: 24 | logger.error('OUTPUT %s %s error: %s' % (self.__class__.__name__, self.name, e)) 25 | return {'data': item, 'exception': e, 'traceback': traceback.format_exc()} 26 | 27 | def output(self, item): 28 | pass 29 | 30 | def outputmany(self, items): 31 | for item in items: 32 | self.output(item) 33 | 34 | 35 | class Kafka(Output): 36 | def __init__(self, topic, server, client=None, name=None, ignore_exc=None, **kwargs): 37 | try: 38 | import kafka 39 | except ImportError: 40 | raise Exception('Lack of kafka module, try to execute `pip install kafka-python>=1.3.1` install it') 41 | 42 | client = client or kafka.SimpleClient 43 | self._producer = None 44 | self._topic = topic 45 | try: 46 | self._kafka = client(server, **kwargs) 47 | except Exception, e: 48 | raise Exception('kafka client init failed: %s' % e) 49 | self.producer(kafka.SimpleProducer) 50 | super(Kafka, self).__init__(name, ignore_exc) 51 | 52 | def producer(self, producer, **kwargs): 53 | try: 54 | self._producer = producer(self._kafka, **kwargs) 55 | except Exception, e: 56 | raise Exception('kafka producer init failed: %s' % e) 57 | 58 | def output(self, item): 59 | if not self._producer: 60 | raise Exception('No producer init') 61 | logger.info('OUTPUT INSERT Kafka 1: %s' % self._producer.send_messages(self._topic, item)) 62 | 63 | def outputmany(self, items): 64 | if not self._producer: 65 | raise Exception('No producer init') 66 | logger.info('OUTPUT INSERT Kafka %s: %s' % (len(items), self._producer.send_messages(self._topic, *items))) 67 | 68 | def close(self): 69 | if self._producer: 70 | del self._producer 71 | self._producer = None 72 | 73 | 74 | class HTTPRequest(Output): 75 | def __init__(self, server, headers=None, method='GET', request_args=None, timeout=None, 76 | catch_exc=None, **kwargs): 77 | from .. import __version__ 78 | self.server = server 79 | self.method = method.upper() 80 | self.headers = headers or {} 81 | self.timeout = timeout 82 | self.request_args = request_args or {} 83 | self.catch_exc = catch_exc or (lambda x: not bool(x)) 84 | self.headers.setdefault('User-Agent', 'python-stream %s HTTPRequest' % __version__) 85 | super(HTTPRequest, self).__init__(**kwargs) 86 | 87 | def output(self, item): 88 | import requests 89 | timeout = (self.timeout, self.timeout) if self.timeout else None 90 | ret = requests.request(self.method, self.server, headers=self.headers, 91 | timeout=timeout, **merge(self.arguments(item), self.request_args)) 92 | if self.catch_exc(ret): 93 | raise Exception(ret) 94 | logger.info('OUTPUT INSERT Request 1: %s' % ret) 95 | 96 | def outputmany(self, items): 97 | import grequests 98 | tasks = [grequests.request( 99 | self.method, self.server, headers=self.headers, **self.arguments(item) 100 | ) for item in items] 101 | ret = grequests.map(tasks, gtimeout=self.timeout) 102 | if not any([self.catch_exc(_) for _ in ret]): 103 | logger.info('OUTPUT INSERT Request %s' % len(ret)) 104 | return 105 | errors = [] 106 | for i, _ in enumerate(ret): 107 | if _ is None: 108 | errors.append({'data': items[i], 'exception': tasks[i].exception, 'traceback': tasks[i].traceback}) 109 | elif self.catch_exc(_): 110 | errors.append({'data': items[i], 'exception': Exception(_), 'traceback': None}) 111 | raise IterableError(*errors) 112 | 113 | def arguments(self, item): 114 | if self.method == 'GET': 115 | return {'params': item} 116 | if self.method == 'POST': 117 | return {'data': self.data(item)} 118 | 119 | def data(self, data): 120 | ctype = self.headers.get('Content-Type') 121 | if ctype == 'application/json': 122 | return json.dumps(data, separators=(',', ':')) 123 | return data 124 | 125 | 126 | class File(Output): 127 | def __init__(self, filename, **kwargs): 128 | self.filename = filename 129 | self.stream = open(self.filename, 'a') 130 | super(File, self).__init__(**kwargs) 131 | 132 | def __del__(self): 133 | self.stream.close() 134 | 135 | def output(self, item): 136 | self.stream.write(item+'\n') 137 | self.stream.flush() 138 | 139 | def outputmany(self, items): 140 | self.stream.writelines('\n'.join(items)+'\n') 141 | self.stream.flush() 142 | 143 | 144 | class Csv(File): 145 | def __init__(self, filename, name=None, ignore_exc=True, **kwargs): 146 | import csv 147 | super(Csv, self).__init__(filename, name=name, ignore_exc=ignore_exc, **kwargs) 148 | self.writer = csv.writer(self.stream, **kwargs) 149 | 150 | def output(self, item): 151 | self.writer.writerow(item) 152 | 153 | def outputmany(self, items): 154 | self.writer.writerows(items) 155 | 156 | 157 | class Screen(Output): 158 | def output(self, item): 159 | print item 160 | 161 | def outputmany(self, items): 162 | print '\n'.join(items)+'\n' 163 | 164 | 165 | Stdout = Screen 166 | 167 | 168 | class Null(Output): 169 | pass 170 | -------------------------------------------------------------------------------- /pystream/logparser/ruleparser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import re 5 | import sys 6 | import json 7 | import csv 8 | import urllib 9 | from pyparsing import nestedExpr, OneOrMore, CharsNotIn, Or, Empty 10 | from datatype import Datatype 11 | 12 | __author__ = 'tong' 13 | 14 | reload(sys) 15 | sys.setdefaultencoding("utf-8") 16 | 17 | 18 | class Rulebase(object): 19 | TYPE = basestring 20 | SINGLE_RET = False 21 | 22 | 23 | class Regex(Rulebase): 24 | def __init__(self, rule): 25 | self.reg = rule 26 | self.rule = re.compile(rule) 27 | 28 | def parse(self, log): 29 | res = re.match(self.rule, log) 30 | ret = res.groupdict() 31 | if not ret: 32 | ret = {str(i): value for i, value in enumerate(res.groups())} 33 | return ret 34 | 35 | 36 | class Form(Rulebase): 37 | TYPE = dict 38 | 39 | def __init__(self, rule): 40 | self.rule = rule 41 | self.opener = self.rule['opener'] 42 | self.closer = self.rule['closer'] 43 | self.columns = self.rule.get('columns', -1) 44 | 45 | nested = nestedExpr(opener=self.opener, closer=self.closer, 46 | content=CharsNotIn(self.opener + self.closer)) 47 | if self.columns < 0: 48 | self.nested = OneOrMore(nested) 49 | else: 50 | self.nested = nested * self.columns + Or([CharsNotIn('\n'), Empty()]) 51 | 52 | def merge(self, items): 53 | return '%s%s%s' % (self.opener, 54 | ''.join([self.merge(item) 55 | if isinstance(item, list) 56 | else str(item) 57 | for item in items]), 58 | self.closer) 59 | 60 | def parse(self, log): 61 | ret = self.nested.parseString(log).asList() 62 | start = len(self.opener) 63 | end = -len(self.closer) 64 | return {str(i): self.merge(value)[start:end].strip() 65 | for i, value in enumerate(ret)} 66 | 67 | 68 | class Split(Rulebase): 69 | TYPE = dict 70 | 71 | def __init__(self, rule): 72 | self.rule = rule 73 | self.separator = self.rule['separator'] 74 | self.maxsplit = self.rule.get('maxsplit', -1) 75 | 76 | def parse(self, log): 77 | ret = log.split(self.separator, self.maxsplit) 78 | return {str(i): value.strip() for i, value in enumerate(ret)} 79 | 80 | 81 | class Type(Rulebase): 82 | SINGLE_RET = True 83 | 84 | def __init__(self, rule): 85 | self.rule = Datatype.get(rule) 86 | 87 | def parse(self, log): 88 | return {'0': self.rule(log).data} 89 | 90 | 91 | class Kv(Rulebase): 92 | TYPE = dict 93 | 94 | def __init__(self, rule): 95 | self.rule = rule 96 | self.separator = self.rule['separator'] 97 | self.linker = self.rule.get('linker', '=') 98 | self.pattern = re.compile(r'^[a-zA-Z_]\w*$') 99 | self.not_check = not self.rule.get('strict', False) 100 | 101 | def parse(self, log): 102 | ret = log.split(self.separator) 103 | ret = [value.split(self.linker, 1) for value in ret if self.linker in value] 104 | return {key.strip(): value.strip() for key, value in ret 105 | if self.not_check or re.match(self.pattern, key.strip())} 106 | 107 | 108 | class Macro(Rulebase): 109 | TYPE = dict 110 | SINGLE_RET = True 111 | 112 | def __init__(self, rule): 113 | self.rule = rule 114 | 115 | def parse(self, log): 116 | return {'0': self.rule.get(log, log)} 117 | 118 | 119 | class Csv(Rulebase): 120 | TYPE = dict 121 | 122 | def __init__(self, rule): 123 | self.data = Csv.Iterator() 124 | self.rule = rule 125 | self.csv = csv.reader(self.data, **rule) 126 | 127 | def parse(self, log): 128 | self.data.append(log) 129 | ret = self.csv.next() 130 | return {str(i): item for i, item in enumerate(ret)} 131 | 132 | class Iterator(object): 133 | def __init__(self): 134 | self.data = '' 135 | 136 | def append(self, data): 137 | self.data = data 138 | 139 | def __iter__(self): 140 | return self 141 | 142 | def next(self): 143 | data = self.data 144 | if data is None: 145 | raise StopIteration() 146 | self.data = None 147 | return data 148 | 149 | 150 | class Encode(Rulebase): 151 | SINGLE_RET = True 152 | 153 | def __init__(self, rule): 154 | self.rule = rule 155 | 156 | def parse(self, log): 157 | if self.rule == 'urlquote': 158 | return {'0': urllib.quote(log)} 159 | if self.rule == 'urlquote_plus': 160 | return {'0': urllib.quote_plus(log)} 161 | return {'0': log.encode(self.rule)} 162 | 163 | 164 | class Decode(Rulebase): 165 | SINGLE_RET = True 166 | 167 | def __init__(self, rule): 168 | self.rule = rule 169 | 170 | def parse(self, log): 171 | if self.rule == 'urlquote': 172 | return {'0': urllib.unquote(log)} 173 | if self.rule == 'urlquote_plus': 174 | return {'0': urllib.unquote_plus(log)} 175 | return {'0': log.decode(self.rule)} 176 | 177 | 178 | class Json(Rulebase): 179 | TYPE = bool 180 | 181 | def __init__(self, rule): 182 | self.rule = rule 183 | 184 | def parse(self, log): 185 | ret = json.loads(log) 186 | if self.rule: 187 | for key, value in ret.items(): 188 | if isinstance(value, (list, dict)): 189 | ret[key] = json.dumps(value) 190 | return ret 191 | 192 | 193 | class Endswith(Rulebase): 194 | TYPE = dict 195 | SINGLE_RET = True 196 | 197 | def __init__(self, rule): 198 | self.rule = rule 199 | self.suffix = self.rule['suffix'] 200 | self.start = self.rule.get('start') 201 | self.end = self.rule.get('end') 202 | 203 | def parse(self, log): 204 | ret = log.endswith(self.suffix, self.start, self.end) 205 | return {str(ret): log} 206 | 207 | 208 | class Startswith(Rulebase): 209 | TYPE = dict 210 | SINGLE_RET = True 211 | 212 | def __init__(self, rule): 213 | self.rule = rule 214 | self.suffix = self.rule['suffix'] 215 | self.start = self.rule.get('start') 216 | self.end = self.rule.get('end') 217 | 218 | def parse(self, log): 219 | ret = log.startswith(self.suffix, self.start, self.end) 220 | return {str(ret): log} 221 | 222 | 223 | class Contain(Rulebase): 224 | TYPE = dict 225 | SINGLE_RET = True 226 | 227 | def __init__(self, rule): 228 | self.rule = rule 229 | self.suffix = self.rule['suffix'] 230 | self.start = self.rule.get('start') 231 | self.end = self.rule.get('end') 232 | 233 | def parse(self, log): 234 | ret = self.suffix in log[self.start: self.end] 235 | return {str(ret): log} 236 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # python-stream 2 | 3 | ### 说明 4 | 数据流式框架, 可用作数据清洗, 数据预处理, 数据迁移等应用场景 5 | 6 | 更优雅的流式数据处理方式 7 | 8 | ### 安装 9 | ----------- 10 | ```shell 11 | pip install git+https://github.com/sandabuliu/python-stream.git 12 | ``` 13 | or 14 | 15 | ```shell 16 | git clone https://github.com/sandabuliu/python-stream.git 17 | cd python-agent 18 | python setup.py install 19 | ``` 20 | 21 | 22 | ### QuickStart 23 | --------------- 24 | #### Examples 25 | 26 | ##### Word Count 27 | 28 | ```python 29 | from pystream.executor.source import Memory 30 | from pystream.executor.executor import Map, Iterator, ReducebyKey 31 | 32 | data = Memory([ 33 | 'Wikipedia is a free online encyclopedia, created and edited by volunteers around the world and hosted by the Wikimedia Foundation.', 34 | 'Search thousands of wikis, start a free wiki, compare wiki software.', 35 | 'The official Wikipedia Android app is designed to help you find, discover, and explore knowledge on Wikipedia.' 36 | ]) 37 | p = data | Map(lambda x: x.split(' ')) | Iterator(lambda x: (x.strip('.,'), 1)) | ReducebyKey(lambda x, y: x+y) 38 | result = {} 39 | for key, value in p: 40 | result[key] = value 41 | print result.items() 42 | ``` 43 | 44 | 执行结果 45 | 46 | ```python 47 | [('and', 3), ('wiki', 2), ('compare', 1), ('help', 1), ('is', 2), ('Wikipedia', 3), ('discover', 1), ('hosted', 1), ('Android', 1), ('find', 1), ('Foundation', 1), ('knowledge', 1), ('to', 1), ('by', 2), ('start', 1), ('online', 1), ('you', 1), ('thousands', 1), ('app', 1), ('edited', 1), ('Search', 1), ('around', 1), ('free', 2), ('explore', 1), ('designed', 1), ('world', 1), ('The', 1), ('the', 2), ('a', 2), ('on', 1), ('created', 1), ('Wikimedia', 1), ('official', 1), ('encyclopedia', 1), ('of', 1), ('wikis', 1), ('volunteers', 1), ('software', 1)] 48 | ``` 49 | 50 | ##### 计算π 51 | 52 | ```python 53 | from random import random 54 | from pystream.executor.source import Faker 55 | from pystream.executor.executor import Executor, Map, Group 56 | 57 | class Pi(Executor): 58 | def __init__(self, **kwargs): 59 | super(Pi, self).__init__(**kwargs) 60 | self.counter = 0 61 | self.result = 0 62 | 63 | def handle(self, item): 64 | self.counter += 1 65 | self.result += item 66 | return 4.0*self.result/self.counter 67 | 68 | s = Faker(lambda: random(), 100000) | Map(lambda x: x*2-1) | Group(size=2) | Map(lambda x: 1 if x[0]**2+x[1]**2 <= 1 else 0) | Pi() 69 | 70 | res = None 71 | for _ in s: 72 | res = _ 73 | print res 74 | ``` 75 | 76 | 执行结果 77 | 78 | ```python 79 | 3.14728 80 | ``` 81 | 82 | ##### 排序 83 | 84 | ```python 85 | from random import randint 86 | from pystream.executor.source import Memory 87 | from pystream.executor.executor import Sort 88 | m = Memory([randint(0, 100) for i in range(10)]) | Sort() 89 | 90 | for i in m: 91 | print list(i) 92 | ``` 93 | 94 | 执行结果 95 | 96 | ``` 97 | [94] 98 | [94, 99] 99 | [18, 94, 99] 100 | [18, 40, 94, 99] 101 | [18, 26, 40, 94, 99] 102 | [18, 26, 40, 63, 94, 99] 103 | [18, 26, 40, 63, 83, 94, 99] 104 | [3, 18, 26, 40, 63, 83, 94, 99] 105 | [3, 18, 26, 40, 63, 83, 83, 94, 99] 106 | [3, 16, 18, 26, 40, 63, 83, 83, 94, 99] 107 | ``` 108 | 109 | ##### 在 hadoop 中使用 110 | ###### wordcount 111 | 112 | ###### mapper.py 113 | ```python 114 | from pystream.executor.source import Stdin 115 | from pystream.executor.executor import Map, Iterator 116 | from pystream.executor.output import Stdout 117 | 118 | s = Stdin() | Map(lambda x: x.strip().split()) | Iterator(lambda x: "%s\t1" % x) | Stdout() 119 | s.start() 120 | ``` 121 | 122 | ###### reducer.py 123 | ```python 124 | from pystream.executor.source import Stdin 125 | from pystream.executor.executor import Map, ReducebySortedKey 126 | from pystream.executor.output import Stdout 127 | 128 | s = Stdin() | Map(lambda x: x.strip().split('\t')) | ReducebySortedKey(lambda x, y: int(x)+int(y)) | Map(lambda x: '%s\t%s' % x) | Stdout() 129 | s.start() 130 | ``` 131 | 132 | ##### 解析 NGINX 日志 133 | ```python 134 | from pystream.config import rule 135 | from pystream.executor.source import File 136 | from pystream.executor.executor import Parser 137 | s = File('/var/log/nginx/access.log') | Parser(rule('nginx')) 138 | 139 | for item in s: 140 | print item 141 | 142 | ``` 143 | 144 | 执行结果 145 | 146 | ``` 147 | {'status': '400', 'body_bytes_sent': 173, 'remote_user': '-', 'http_referer': '-', 'remote_addr': '198.35.46.20', 'request': '\\x05\\x01\\x00', 'version': None, 'http_user_agent': '-', 'time_local': datetime.datetime(2017, 2, 15, 13, 11, 3), 'path': None, 'method': None} 148 | {'status': '400', 'body_bytes_sent': 173, 'remote_user': '-', 'http_referer': '-', 'remote_addr': '198.35.46.20', 'request': '\\x05\\x01\\x00', 'version': None, 'http_user_agent': '-', 'time_local': datetime.datetime(2017, 2, 15, 13, 11, 3), 'path': None, 'method': None} 149 | {'status': '400', 'body_bytes_sent': 173, 'remote_user': '-', 'http_referer': '-', 'remote_addr': '198.35.46.20', 'request': '\\x05\\x01\\x00', 'version': None, 'http_user_agent': '-', 'time_local': datetime.datetime(2017, 2, 15, 13, 11, 3), 'path': None, 'method': None} 150 | {'status': '400', 'body_bytes_sent': 173, 'remote_user': '-', 'http_referer': '-', 'remote_addr': '198.35.46.20', 'request': '\\x05\\x01\\x00', 'version': None, 'http_user_agent': '-', 'time_local': datetime.datetime(2017, 2, 15, 13, 11, 3), 'path': None, 'method': None} 151 | {'status': '400', 'body_bytes_sent': 173, 'remote_user': '-', 'http_referer': '-', 'remote_addr': '198.35.46.20', 'request': '\\x05\\x01\\x00', 'version': None, 'http_user_agent': '-', 'time_local': datetime.datetime(2017, 2, 15, 13, 11, 3), 'path': None, 'method': None} 152 | ``` 153 | 154 | ##### 导出数据库数据 155 | 156 | ```python 157 | from sqlalchemy import create_engine 158 | from pystream.executor.source import SQL 159 | from pystream.executor.output import Csv 160 | from pystream.executor.wraps import Batch 161 | 162 | engine = create_engine('mysql://root:123456@127.0.0.1:3306/test') 163 | conn = engine.connect() 164 | s = SQL(conn, 'select * from faker') | Batch(Csv('/tmp/output')) 165 | 166 | for item in s: 167 | print item['data'] 168 | print item['exception'] 169 | conn.close() 170 | ``` 171 | 172 | #### 数据源 173 | ##### 读取文件数据 174 | 175 | ```python 176 | from pystream.executor.source import Tail, File, Csv 177 | Tail('/var/log/nginx/access.log') 178 | File('/var/log/nginx/*.log') 179 | Csv('/tmp/test*.csv') 180 | ``` 181 | 182 | ##### 读取 TCP 流数据 183 | 184 | ```python 185 | from pystream.executor.source import TCPClient 186 | TCPClient('/tmp/pystream.sock') 187 | TCPClient(('127.0.0.1', 10000)) 188 | ``` 189 | 190 | ##### 读取 python 数据 191 | 192 | ```python 193 | from Queue import Queue as Q 194 | from random import randint 195 | from pystream.executor.source import Memory, Faker, Queue 196 | queue = Q(10) 197 | 198 | Memory([1, 2, 3, 4]) 199 | Faker(randint, 1000) 200 | Queue(queue) 201 | ``` 202 | 203 | ##### 读取常用模块数据 204 | 205 | ```python 206 | from pystream.executor.source import SQL, Kafka 207 | SQL(conn, 'select * from faker') # 读取数据库数据 208 | Kafka('topic1', '127.0.0.1:9092') # 读取 kafka 数据 209 | ``` 210 | 211 | #### 数据输出 212 | ##### 输出到文件 213 | 214 | ```python 215 | from pystream.executor.output import File, Csv 216 | File('/tmp/output') 217 | Csv('/tmp/output.csv') 218 | ``` 219 | 220 | ##### 通过HTTP输出 221 | 222 | ```python 223 | from pystream.executor.output import HTTPRequest 224 | HTTPRequest('http://127.0.0.1/api/data') 225 | ``` 226 | 227 | ##### 输出到kafka 228 | 229 | ```python 230 | from pystream.executor.output import Kafka 231 | Kafka('topic', '127.0.0.1:9092') 232 | ``` 233 | 234 | #### 中间件 235 | ##### 队列 236 | ```python 237 | from pystream.executor.source import Tail 238 | from pystream.executor.output import Stdout 239 | from pystream.executor.middleware import Queue 240 | 241 | s = Tail('/Users/tongbin01/PycharmProjects/python-stream/README.md') | Queue() | Stdout() 242 | s.start() 243 | ``` 244 | 245 | ##### 订阅 246 | 247 | ```python 248 | from random import randint 249 | from pystream.executor.source import Tail 250 | from pystream.executor.executor import Map 251 | from pystream.executor.output import Stdout 252 | 253 | from pystream.executor.middleware import Subscribe 254 | from pystream.executor.wraps import Daemonic 255 | 256 | sub = Tail('/var/log/messages') | Map(lambda x: (str(randint(1, 2)), x.strip())) | Subscribe() 257 | Daemonic(sub).start() 258 | 259 | s = sub['1'] | Map(lambda x: x.strip()) | Stdout() 260 | s.start() 261 | ``` 262 | 263 | ### TodoList 264 | 265 | * 订阅器(Subscribe)客户端超时处理 266 | * 并行计算 267 | * HTTP 异步输出/异步源 268 | * 添加其他基础输出/基础源 269 | * 添加对其他常用模块的支持, 如 redis, kafka, flume, log-stash, 各种数据库等 270 | 271 | Copyright © 2017 [g_tongbin@foxmail.com](mailto:g_tongbin@foxmail.com) -------------------------------------------------------------------------------- /pystream/executor/executor.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import os 5 | import re 6 | import time 7 | import json 8 | import msgpack 9 | import logging 10 | 11 | from .event import Event, is_event 12 | from .utils import Window, gzip, ungzip 13 | from ..logic import Key, Or, And 14 | from ..utils import DefaultParser 15 | from ..logparser.logparser import LogParser 16 | 17 | __author__ = 'tong' 18 | 19 | logger = logging.getLogger('stream.logger') 20 | 21 | 22 | class Executor(object): 23 | def __init__(self, name=None, ignore_exc=True, **kwargs): 24 | self.name = name or ('%s-%s' % (self.__class__.__name__, id(self))) 25 | self.ignore_exc = ignore_exc 26 | self._source = None 27 | self._output = None 28 | self.kwargs = kwargs 29 | 30 | def __iter__(self): 31 | for item in self.source: 32 | try: 33 | if is_event(item): 34 | if item == Event.SKIP: 35 | continue 36 | result = self.handle_event(item) 37 | if result is not None: 38 | yield result 39 | if self._output: 40 | yield item 41 | continue 42 | 43 | result = self.handle(item) 44 | if result is not None: 45 | yield result 46 | except BaseException, e: 47 | if self.ignore_exc: 48 | self.handle_exception(item, e) 49 | else: 50 | raise 51 | 52 | def handle(self, item): 53 | return item 54 | 55 | def handle_exception(self, item, e): 56 | logger.warn('%s (%s) handled failed, cause: %s, data: %s' % (self.name, self.__class__.__name__, e, [item])) 57 | 58 | def handle_event(self, event): 59 | return None 60 | 61 | def start(self): 62 | for _ in self: 63 | continue 64 | 65 | def __or__(self, executor): 66 | source = executor # type: Executor 67 | while source._source: 68 | source = source._source 69 | source._source = self 70 | self._output = source 71 | return executor 72 | 73 | @property 74 | def source(self): 75 | if not self._source: 76 | raise Exception('Lack of data source!') 77 | return self._source 78 | 79 | 80 | class Parser(Executor): 81 | def __init__(self, rule=None, trace=False, **kwargs): 82 | super(Parser, self).__init__(**kwargs) 83 | self.trace = trace 84 | self.parser = LogParser(rule) if rule else DefaultParser() 85 | 86 | def handle(self, item): 87 | if not item: 88 | return item 89 | result = self.parser.parse(item) 90 | if not result: 91 | return result 92 | return result.trace() if self.trace else result.result() 93 | 94 | def handle_exception(self, item, e): 95 | logger.warn( 96 | 'PARSER PASS %s %s error: %s' % (self.name, type(e).__name__, e), 97 | extra={'extra': dict(line=item.strip(), type=e.type, rule=e.rule, data=e.line)} 98 | ) 99 | 100 | 101 | class Filter(Executor): 102 | def __init__(self, *args, **kwargs): 103 | super(Filter, self).__init__(**kwargs) 104 | if any([not isinstance(i, (Key, Or, And)) for i in args]): 105 | raise Exception('Filter args should be in (`Field`, `Or`, `And`)') 106 | if not args: 107 | self.filter = type('name'.encode('utf8'), (object, ), {'result': lambda s, x: True})() 108 | elif len(args) == 1: 109 | self.filter = args[0] 110 | else: 111 | self.filter = And(*args) 112 | 113 | def handle(self, item): 114 | if self.filter.result(item): 115 | return item 116 | else: 117 | return None 118 | 119 | def handle_exception(self, item, e): 120 | logger.warn('Filter %s( %s ) Failed, cause: %s' % (self.name, str(self.filter), e)) 121 | 122 | 123 | class Map(Executor): 124 | def __init__(self, function, **kwargs): 125 | super(Map, self).__init__(**kwargs) 126 | self.func = function 127 | 128 | def handle(self, item): 129 | return self.func(item) 130 | 131 | 132 | class Sort(Executor): 133 | def __init__(self, maxlen=None, key=None, desc=False, maxsize=None, cache_path=None, **kwargs): 134 | super(Sort, self).__init__(**kwargs) 135 | self.maxlen = maxlen 136 | self.maxsize = maxsize 137 | self.items = [] 138 | self.files = [] 139 | self.length = 0 140 | self.key = key or (lambda x: x) 141 | self.desc = desc 142 | self.cache_path = cache_path or '/tmp/pystream_sort_%s' % time.time() 143 | 144 | def handle(self, item): 145 | self.insert(item) 146 | if self.maxlen: 147 | if len(self.items) > self.maxlen: 148 | self.items = self.items[:self.maxlen] 149 | elif self.maxsize and self.length > self.maxsize: 150 | if not os.path.exists(self.cache_path): 151 | os.makedirs(self.cache_path) 152 | filename = os.path.join(self.cache_path, str(time.time())) 153 | fp = open(filename, 'w') 154 | fp.write('\n'.join([_ for _ in self.items])) 155 | fp.close() 156 | self.files.append(filename) 157 | self.length = 0 158 | self.items = [] 159 | return self.Iterable(self) 160 | 161 | def insert(self, item): 162 | l = 0 163 | r = len(self.items)-1 164 | compare = (lambda x, y: x > y) if self.desc else (lambda x, y: x < y) 165 | while l <= r: 166 | m = (l + r)/2 167 | value1 = self.key(self.items[m]) 168 | value2 = self.key(item) 169 | if value1 == value2: 170 | break 171 | elif compare(value1, value2): 172 | l = m + 1 173 | else: 174 | r = m - 1 175 | self.items.insert((l + r)/2+1, item) 176 | if self.maxsize: 177 | self.length += len(item) 178 | 179 | class Iterable(object): 180 | def __init__(self, exe): 181 | self.exe = exe 182 | self.func = max if exe.desc else min 183 | 184 | def __iter__(self): 185 | if not self.exe.maxsize: 186 | for item in self.exe.items: 187 | yield item 188 | return 189 | fps = [iter(self.exe.items)] if self.exe.items else [] 190 | fps += [open(_) for _ in self.exe.files] 191 | fps = [[_, next(_).strip('\n')] for _ in fps] 192 | while fps: 193 | item = self.func(fps, key=(lambda x: self.exe.key(x[1]))) 194 | yield item[1] 195 | try: 196 | item[1] = next(item[0]).strip('\n') 197 | except StopIteration: 198 | fps.remove(item) 199 | 200 | 201 | class Reduce(Executor): 202 | def __init__(self, function, **kwargs): 203 | super(Reduce, self).__init__(**kwargs) 204 | self.func = function 205 | self.data = None 206 | 207 | def handle(self, item): 208 | if self.data is None: 209 | self.data = item 210 | else: 211 | self.data = self.func(self.data, item) 212 | return self.data 213 | 214 | 215 | class ReducebyKey(Executor): 216 | def __init__(self, function, **kwargs): 217 | super(ReducebyKey, self).__init__(**kwargs) 218 | self.func = function 219 | self.data = {} 220 | 221 | def handle(self, item): 222 | key, item = item 223 | if key not in self.data: 224 | self.data[key] = item 225 | else: 226 | self.data[key] = self.func(self.data[key], item) 227 | return key, self.data[key] 228 | 229 | 230 | class ReducebySortedKey(Executor): 231 | def __init__(self, function, **kwargs): 232 | super(ReducebySortedKey, self).__init__(**kwargs) 233 | self.func = function 234 | self.key = None 235 | self.value = None 236 | 237 | def handle(self, item): 238 | key, value = item 239 | if key == self.key: 240 | self.value = self.func(self.value, value) 241 | return None 242 | result = (self.key, self.value) 243 | self.key = key 244 | self.value = value 245 | if result[0]: 246 | return result 247 | 248 | def __iter__(self): 249 | for item in super(ReducebySortedKey, self).__iter__(): 250 | yield item 251 | if self.key: 252 | yield self.key, self.value 253 | 254 | 255 | class Group(Executor): 256 | def __init__(self, function=None, size=None, timeout=None, window=None, **kwargs): 257 | super(Group, self).__init__(**kwargs) 258 | self.func = function or (lambda x: x) 259 | if window: 260 | self.window = window 261 | else: 262 | self.window = Window(size, timeout) 263 | 264 | def __iter__(self): 265 | for item in super(Group, self).__iter__(): 266 | yield item 267 | if not self.window.empty: 268 | yield self.func(self.window.data) 269 | 270 | def handle_event(self, event): 271 | if event == Event.IDLE: 272 | if self.window.fulled: 273 | return self.func(self.window.data) 274 | 275 | def handle(self, item): 276 | self.window.append(item) 277 | if self.window.fulled: 278 | return self.func(self.window.data) 279 | return None 280 | 281 | 282 | class JsonDumps(Executor): 283 | def handle(self, item): 284 | return json.dumps(item, **self.kwargs) 285 | 286 | 287 | class JsonLoads(Executor): 288 | def handle(self, item): 289 | return json.loads(item, **self.kwargs) 290 | 291 | 292 | class MsgpackDumps(Executor): 293 | def handle(self, item): 294 | return msgpack.dumps(item, **self.kwargs) 295 | 296 | 297 | class MsgpackLoads(Executor): 298 | def handle(self, item): 299 | return msgpack.loads(item, **self.kwargs) 300 | 301 | 302 | class GZip(Executor): 303 | def handle(self, item): 304 | return gzip(item) 305 | 306 | 307 | class UnGZip(Executor): 308 | def handle(self, item): 309 | return ungzip(item) 310 | 311 | 312 | class Encode(Executor): 313 | def __init__(self, encoding, **kwargs): 314 | super(Encode, self).__init__(**kwargs) 315 | self.encoding = encoding 316 | 317 | def handle(self, item): 318 | return item.encode(self.encoding) 319 | 320 | 321 | class Decode(Executor): 322 | def __init__(self, encoding, **kwargs): 323 | super(Decode, self).__init__(**kwargs) 324 | self.encoding = encoding 325 | 326 | def handle(self, item): 327 | return item.decode(self.encoding) 328 | 329 | 330 | class Iterator(Executor): 331 | def __init__(self, function=None, **kwargs): 332 | super(Iterator, self).__init__(**kwargs) 333 | self.func = function or (lambda x: x) 334 | 335 | def __iter__(self): 336 | iterator = super(Iterator, self).__iter__() 337 | for items in iterator: 338 | if is_event(items) and self._output: 339 | yield items 340 | continue 341 | for item in items: 342 | yield self.func(item) 343 | 344 | 345 | class Regex(Executor): 346 | def __init__(self, pattern, **kwargs): 347 | super(Regex, self).__init__(**kwargs) 348 | self.regex = pattern 349 | self.pattern = re.compile(pattern) 350 | 351 | def handle(self, item): 352 | res = re.match(self.pattern, item) 353 | ret = res.groupdict() 354 | if not ret: 355 | return res.groups() 356 | return ret 357 | -------------------------------------------------------------------------------- /pystream/executor/source.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import os 5 | import sys 6 | import csv 7 | import time 8 | import glob 9 | import logging 10 | 11 | from executor import Executor 12 | from event import Event, is_event 13 | from utils import ifilter, endpoint 14 | 15 | __author__ = 'tong' 16 | 17 | logger = logging.getLogger('stream.logger') 18 | 19 | 20 | def sleep(delta): 21 | time.sleep(0 if delta <= 0 else delta) 22 | 23 | 24 | class Tail(Executor): 25 | def __init__(self, path, wait=1, times=3, startline=None, position=None, **kwargs): 26 | super(Tail, self).__init__(**kwargs) 27 | path = os.path.abspath(path) 28 | if not os.path.exists(path): 29 | raise Exception('logsource init failed, cause: No such file %s' % path) 30 | self.process = None 31 | self.path = path 32 | self.stream = None 33 | self.pos = 0 34 | self.count = 0 35 | self.lineno = 0 36 | self.wait = wait 37 | self.times = times 38 | self.startline = startline or 0 39 | self.position = position or 0 40 | 41 | def open(self, filename): 42 | if self.stream and not self.stream.closed: 43 | self.stream.close() 44 | self.stream = open(filename) 45 | self.pos = self.stream.tell() 46 | self.lineno = 0 47 | 48 | def seek(self): 49 | if self.position > 0: 50 | self.stream.seek(self.position) 51 | return 52 | if self.position < 0: 53 | self.stream.seek(self.position, 2) 54 | return 55 | for i in range(self.startline): 56 | self.lineno += 1 57 | self.stream.readline() 58 | if self.startline < 0: 59 | self.stream.seek(0, 2) 60 | 61 | def __iter__(self): 62 | import psutil 63 | self.process = psutil.Process() 64 | for event in self.catch(): 65 | yield event 66 | self.seek() 67 | while True: 68 | self.pos = self.stream.tell() 69 | line = self.stream.readline() 70 | if not line: 71 | self.count += 1 72 | if self.count > self.times: 73 | for event in self.redirect(): 74 | yield event 75 | continue 76 | timer = time.time() 77 | yield Event.IDLE 78 | sleep(self.wait - (time.time() - timer)) 79 | self.stream.seek(self.pos) 80 | else: 81 | self.count = 0 82 | self.lineno += 1 83 | yield line 84 | 85 | def redirect(self): 86 | self.count = 0 87 | files = self.process.open_files() 88 | for fo in files: 89 | if fo.fd == self.stream.fileno(): 90 | if fo.path == self.path: 91 | return 92 | if not os.path.exists(self.path): 93 | logger.info('SOURCE LOG %s does not exist, become: %s' % 94 | (self.path, fo.path)) 95 | return 96 | logger.info('SOURCE LOG redirect to %s, archive to %s (line count: %s)' % 97 | (self.path, fo.path, self.lineno)) 98 | self.open(self.path) 99 | return 100 | logger.error('SOURCE LOG fd: %s does not exist? what happened?' 101 | % self.stream.fileno()) 102 | for event in self.catch(): 103 | yield event 104 | 105 | def catch(self): 106 | logger.info('SOURCE LOG try to catch %s...' % self.path) 107 | while True: 108 | if os.path.exists(self.path): 109 | self.open(self.path) 110 | logger.info('SOURCE LOG catch %s successful' % self.path) 111 | break 112 | timer = time.time() 113 | yield Event.IDLE 114 | sleep(self.wait - (time.time() - timer)) 115 | 116 | 117 | class File(Executor): 118 | def __init__(self, path, filewait=None, confirmwait=None, cachefile=None, 119 | position=None, startline=None, **kwargs): 120 | super(File, self).__init__(**kwargs) 121 | self.path = os.path.abspath(path) 122 | self.filename = None 123 | self.file_wait = filewait 124 | self.confirm_wait = confirmwait 125 | self.lineno = 0 126 | self.filter = ifilter('bloom', cachefile) 127 | self.position = position or 0 128 | self.startline = startline or 0 129 | self.stream = None 130 | logger.info('SOURCE FILE FILTER: %s: %s' % (self.path, cachefile)) 131 | 132 | def seek(self): 133 | if self.position > 0: 134 | self.stream.seek(self.position) 135 | return 136 | if self.position < 0: 137 | self.stream.seek(self.position, 2) 138 | return 139 | for i in range(self.startline): 140 | self.lineno += 1 141 | self.stream.readline() 142 | if self.startline < 0: 143 | self.stream.seek(0, 2) 144 | 145 | def open(self, filename): 146 | self.filename = filename 147 | logger.info('SOURCE FILE dumping %s' % filename) 148 | try: 149 | self.stream = open(filename) 150 | return self.stream 151 | except Exception, e: 152 | logger.error('SOURCE FILE open %s failed, cause: %s' % (filename, e)) 153 | return None 154 | 155 | def fetch(self, fp): 156 | endpos = -1 157 | self.lineno = 0 158 | 159 | while True: 160 | pos = fp.tell() 161 | fp.seek(pos) 162 | for line in fp: 163 | if line[-1] == '\n': 164 | self.lineno += 1 165 | yield line 166 | else: 167 | endpos = fp.tell() 168 | fp.seek(-len(line), 1) 169 | break 170 | if not self.confirm_wait: 171 | break 172 | else: 173 | timer = time.time() 174 | yield Event.IDLE 175 | sleep(self.confirm_wait - (time.time() - timer)) 176 | ep = endpoint(fp) 177 | if ep <= fp.tell() or ep <= endpos: 178 | break 179 | fp.close() 180 | 181 | def __iter__(self): 182 | while True: 183 | files = [_ for _ in glob.glob(self.path) 184 | if _ not in self.filter] 185 | 186 | if not files: 187 | if not self.file_wait: 188 | break 189 | else: 190 | timer = time.time() 191 | yield Event.IDLE 192 | sleep(self.file_wait - (time.time() - timer)) 193 | continue 194 | 195 | logger.info('SOURCE FILE new file %s' % files) 196 | for filename in sorted(files): 197 | self.open(filename) 198 | self.seek() 199 | if not self.stream: 200 | continue 201 | 202 | try: 203 | for line in self.fetch(self.stream): 204 | yield line 205 | except Exception, e: 206 | logger.error('SOURCE FILE dumping %s failed, cause: %s' % (filename, e)) 207 | try: 208 | self.stream.close() 209 | self.stream = None 210 | except: 211 | pass 212 | continue 213 | 214 | if self.stream and not self.stream.closed: 215 | self.stream.close() 216 | self.filter.add(filename) 217 | logger.info('SOURCE FILE dumping %s End %s' % (filename, self.lineno)) 218 | 219 | 220 | class Stdin(Executor): 221 | def __iter__(self): 222 | while True: 223 | try: 224 | line = sys.stdin.readline() 225 | if not line: 226 | break 227 | yield line.strip() 228 | except KeyboardInterrupt: 229 | break 230 | 231 | 232 | class Csv(File): 233 | def __init__(self, path, filewait=None, confirmwait=None, cachefile=None, name=None, ignore_exc=True, **kwargs): 234 | super(Csv, self).__init__(path, filewait, confirmwait, cachefile, 235 | name=name, ignore_exc=ignore_exc, **kwargs) 236 | 237 | def fetch(self, fp): 238 | reader = csv.reader(fp, **self.kwargs) 239 | for line in reader: 240 | self.lineno += 1 241 | yield line 242 | 243 | 244 | class TCPClient(Executor): 245 | def __init__(self, address, **kwargs): 246 | super(TCPClient, self).__init__(**kwargs) 247 | self.address = address 248 | 249 | def initialize(self): 250 | import socket 251 | for _ in range(3): 252 | try: 253 | socket_af = socket.AF_UNIX if isinstance(self.address, basestring) else socket.AF_INET 254 | sock = socket.socket(socket_af, socket.SOCK_STREAM) 255 | sock.connect(self.address) 256 | sock.setblocking(False) 257 | return sock 258 | except socket.error: 259 | time.sleep(0.5) 260 | raise socket.error 261 | 262 | def read(self, sock): 263 | import socket 264 | try: 265 | msg = sock.recv(1024) 266 | if not msg: 267 | return None 268 | return msg 269 | except socket.error, e: 270 | if e.errno == 35: 271 | return Event.IDLE 272 | else: 273 | raise Exception('socket %s(%s) error' % (self.address, sock.fileno())) 274 | 275 | def handle(self, message): 276 | if not message: 277 | return '', [] 278 | if '\n' not in message: 279 | data = '' 280 | else: 281 | data, message = message.rsplit('\n', 1) 282 | return message, data.split('\n') if data else [] 283 | 284 | def __iter__(self): 285 | sock = self.initialize() 286 | message = '' 287 | while True: 288 | data = self.read(sock) 289 | if data is None and not message: 290 | break 291 | if is_event(data): 292 | yield data 293 | if not message: 294 | time.sleep(1) 295 | continue 296 | else: 297 | message += data 298 | message, items = self.handle(message) 299 | for item in items: 300 | yield item 301 | 302 | 303 | class Kafka(Executor): 304 | def __init__(self, topic, servers, EOF=None, **kwargs): 305 | super(Kafka, self).__init__(**kwargs) 306 | try: 307 | from kafka import KafkaConsumer 308 | except ImportError: 309 | raise Exception('Lack of kafka module, try to execute `pip install kafka-python>=1.3.1` install it') 310 | self.consumer = KafkaConsumer(topic, bootstrap_servers=servers, **kwargs) 311 | self.topic = topic 312 | self.servers = servers 313 | self.EOF = EOF 314 | 315 | def __iter__(self): 316 | while True: 317 | try: 318 | for msg in self.consumer: 319 | if msg.value == self.EOF: 320 | break 321 | yield msg.value 322 | except Exception, e: 323 | logger.error('SOURCE KAFKA fetch failed: topic: %s, ' 324 | 'err: %s' % (self.topic, e)) 325 | 326 | 327 | class Memory(Executor): 328 | def __init__(self, data, **kwargs): 329 | super(Memory, self).__init__(**kwargs) 330 | self.data = data 331 | 332 | def __iter__(self): 333 | for item in self.data: 334 | yield item 335 | 336 | 337 | class Faker(Executor): 338 | def __init__(self, function, maxsize=None, **kwargs): 339 | super(Faker, self).__init__(**kwargs) 340 | self.func = function 341 | self.maxsize = maxsize 342 | 343 | def __iter__(self): 344 | i = 0 345 | while not self.maxsize or i < self.maxsize: 346 | yield self.func() 347 | i += 1 348 | 349 | 350 | class Queue(Executor): 351 | def __init__(self, queue, EOF=None, wait=None, **kwargs): 352 | super(Queue, self).__init__(**kwargs) 353 | self.queue = queue 354 | self.EOF = EOF 355 | self.wait = wait or 0 356 | 357 | def __iter__(self): 358 | from Queue import Empty 359 | while True: 360 | try: 361 | item = self.queue.get_nowait() 362 | if item == self.EOF: 363 | break 364 | yield item 365 | except Empty: 366 | timer = time.time() 367 | yield Event.IDLE 368 | sleep(self.wait - (time.time() - timer)) 369 | 370 | 371 | class SQL(Executor): 372 | def __init__(self, conn, sql, batch=10000, **kwargs): 373 | super(SQL, self).__init__(**kwargs) 374 | self.conn = conn 375 | self.sql = sql 376 | self.batch = batch 377 | 378 | def __iter__(self): 379 | cursor = self.conn.execute(self.sql) 380 | items = cursor.fetchmany(self.batch) 381 | while items: 382 | yield items 383 | items = cursor.fetchmany(self.batch) 384 | -------------------------------------------------------------------------------- /pystream/executor/middleware.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import os 5 | import sys 6 | import time 7 | import json 8 | import socket 9 | import logging 10 | import multiprocessing 11 | from random import randint 12 | from asyncore import dispatcher 13 | 14 | from async import TCPClient 15 | from utils import Window, start_process, endpoint 16 | from executor import Executor, Group, Iterator 17 | 18 | 19 | __author__ = 'tong' 20 | __all__ = ['Queue', 'Subscribe'] 21 | 22 | logger = logging.getLogger('stream.logger') 23 | 24 | 25 | class Queue(Executor): 26 | EOF = type('EOF', (object, ), {})() 27 | 28 | def __init__(self, batch=None, timeout=None, qsize=1000, **kwargs): 29 | super(Queue, self).__init__(**kwargs) 30 | self.qsize = qsize 31 | self.timeout = timeout or 2 32 | self.batch = batch 33 | 34 | def run(self, *args): 35 | exe, callback = args 36 | for item in exe: 37 | callback(item) 38 | callback(self.EOF) 39 | 40 | @property 41 | def source(self): 42 | import source 43 | pipe = multiprocessing.Queue(self.qsize) 44 | sc = super(Queue, self).source 45 | rc = Group(window=Window(self.batch, self.timeout)) 46 | p = multiprocessing.Process(target=self.run, args=(sc | rc, pipe.put)) 47 | p.daemon = True 48 | p.start() 49 | iterator = Iterator() 50 | iterator._output = self._output 51 | return source.Queue(pipe, EOF=self.EOF) | iterator 52 | 53 | 54 | class Subscribe(Executor): 55 | def __init__(self, address=None, cache_path=None, maxsize=1024*1024, 56 | listen_num=5, archive_size=1024*1024*1024, **kwargs): 57 | self.mutex = multiprocessing.Lock() 58 | self.sensor = None 59 | self.server = None 60 | self.maxsize = maxsize 61 | self.listen_num = listen_num 62 | self.archive_size = archive_size or 1024*1024*1024 63 | self.cache_path = cache_path or '/tmp/pystream_data_%s' % time.time() 64 | if hasattr(socket, 'AF_UNIX'): 65 | self.address = address or '/tmp/pystream_sock_%s' % time.time() 66 | else: 67 | self.address = address or ('127.0.0.1', randint(20000, 50000)) 68 | if not os.path.exists(self.cache_path): 69 | os.mkdir(self.cache_path) 70 | super(Subscribe, self).__init__(**kwargs) 71 | 72 | def init_sensor(self): 73 | try: 74 | server = self._source | self.producer 75 | server.start() 76 | except Exception, e: 77 | logger.error('sensor error: %s' % e) 78 | 79 | @property 80 | def producer(self): 81 | return Sensor(self.address) 82 | 83 | def start(self): 84 | import asyncore 85 | if self._source and not self.sensor: 86 | self.sensor = start_process(self.init_sensor) 87 | TCPServer(self.address, self.cache_path, self.maxsize, self.listen_num, self.archive_size) 88 | asyncore.loop() 89 | 90 | def status(self, topic): 91 | return self._get('topic %s' % topic) 92 | 93 | def stop(self): 94 | return self._get('stop') 95 | 96 | @property 97 | def topics(self): 98 | return self._get('topics') 99 | 100 | def __iter__(self): 101 | raise Exception('please use `[]` to choose topic') 102 | 103 | def __getitem__(self, name): 104 | import source 105 | 106 | class Receiver(source.TCPClient): 107 | def initialize(self): 108 | sock = super(Receiver, self).initialize() 109 | if isinstance(name, basestring): 110 | sock.send('0{"topic": "%s"}\n' % name) 111 | elif len(name) == 2: 112 | sock.send('0{"topic": "%s", "number": %s}\n' % name) 113 | else: 114 | sock.send('0{"topic": "%s", "number": %s, "offset": %s}\n' % name) 115 | return sock 116 | 117 | class Mapper(Executor): 118 | def __init__(self, **kwargs): 119 | super(Mapper, self).__init__(**kwargs) 120 | self.file = None 121 | self.position = None 122 | 123 | def handle(self, item): 124 | if not item: 125 | return None 126 | self.file, self.position, item = item.split('#', 2) 127 | return item 128 | 129 | s = Receiver(self.address) 130 | u = Mapper() 131 | return s | u 132 | 133 | def _get(self, text): 134 | socket_af = socket.AF_UNIX if isinstance(self.address, basestring) else socket.AF_INET 135 | sock = socket.socket(socket_af, socket.SOCK_STREAM) 136 | sock.connect(self.address) 137 | sock.send('2%s\n' % text.strip()) 138 | fp = sock.makefile('r') 139 | res = fp.readline().strip() 140 | while not res: 141 | res = fp.readline().strip() 142 | sock.close() 143 | if res: 144 | return json.loads(res) 145 | return None 146 | 147 | 148 | class Sensor(TCPClient): 149 | def handle_connect(self): 150 | self.send('1') 151 | 152 | def handle_read(self): 153 | self.recv(3) 154 | 155 | def handle_write(self): 156 | self.message = ','.join(self.message) 157 | TCPClient.handle_write(self) 158 | 159 | 160 | class TCPServer(dispatcher): 161 | def __init__(self, address, path, maxsize=1024*1024, listen_num=5, archive_size=1024*1024*1024): 162 | dispatcher.__init__(self) 163 | socket_af = socket.AF_UNIX if isinstance(address, basestring) else socket.AF_INET 164 | self.create_socket(socket_af, socket.SOCK_STREAM) 165 | self.set_reuse_addr() 166 | self.bind(address) 167 | self.listen(listen_num) 168 | self.size = maxsize 169 | self.archive_size = archive_size 170 | self.path = path 171 | self.data = {} 172 | self.files = {} 173 | 174 | def location(self, topic): 175 | filenum, fp = self.files[topic] 176 | path = os.path.join(self.path, topic) 177 | items = self.data[topic] 178 | filesize = os.path.getsize(os.path.join(path, str(filenum))) 179 | size = 0 180 | if filesize + sum([len(_) for _ in items]) > self.archive_size: 181 | fp.close() 182 | fp = open(os.path.join(path, str(filenum+1)), 'a') 183 | self.files[topic] = [filenum+1, fp] 184 | for item in items: 185 | pos = fp.tell() 186 | fp.write('%s#%s#%s\n' % (self.files[topic][0], pos, item)) 187 | size += fp.tell() - pos 188 | self.data[topic] = [] 189 | logger.info('SUBSCRIBE topic [%s] location %s successfully' % (topic, size)) 190 | 191 | def topic(self, name): 192 | if name in self.data: 193 | return self.data[name] 194 | self.data[name] = [] 195 | path = os.path.join(self.path, name) 196 | if not os.path.exists(path): 197 | os.makedirs(path) 198 | open(os.path.join(path, '0'), 'w').close() 199 | filenum = max([int(_) for _ in os.listdir(path)]) 200 | self.files[name] = (filenum, open(os.path.join(path, str(filenum)), 'a')) 201 | return self.data[name] 202 | 203 | def handle_accept(self): 204 | pair = self.accept() 205 | if pair is not None: 206 | sock, addr = pair 207 | htype = None 208 | logger.info('server connect to %s(%s), pid: %s' % (addr, sock.fileno(), os.getpid())) 209 | counter = 0 210 | while not htype: 211 | try: 212 | sock.send('\n') 213 | htype = sock.recv(1) 214 | except socket.error: 215 | time.sleep(1) 216 | counter += 1 217 | if counter > 5: 218 | break 219 | logger.info('server connect to %s(%s), type: %s' % (addr, sock.fileno(), htype)) 220 | if htype == '0': 221 | GetHandler(self, sock) 222 | elif htype == '1': 223 | PutHandler(self, sock) 224 | elif htype == '2': 225 | StaHandler(self, sock) 226 | else: 227 | sock.close() 228 | 229 | def handle_error(self): 230 | logger.error('server socket %s error' % str(self.addr)) 231 | self.handle_close() 232 | 233 | def handle_expt(self): 234 | logger.error('server socket %s error: unhandled incoming priority event' % str(self.addr)) 235 | 236 | def handle_close(self): 237 | logger.info('server socket %s close' % str(self.addr)) 238 | self.close() 239 | 240 | 241 | class Handler(dispatcher): 242 | def __init__(self, server, *args): 243 | dispatcher.__init__(self, *args) 244 | self.server = server 245 | self.message = '' 246 | 247 | def handle_error(self, e=None): 248 | logger.error('server handler socket %s error: %s' % (str(self.addr), e)) 249 | self.handle_close() 250 | 251 | def handle_expt(self): 252 | logger.error('server handler socket %s error: unhandled incoming priority event' % str(self.addr)) 253 | 254 | def handle_close(self): 255 | logger.info('server(%s) socket %s close' % (self.__class__.__name__, self.addr)) 256 | self.close() 257 | 258 | def handle_read(self): 259 | while True: 260 | try: 261 | msg = self.recv(512) 262 | if not msg: 263 | break 264 | self.message += msg 265 | except socket.error, e: 266 | if e.errno == 35: 267 | break 268 | else: 269 | raise 270 | 271 | if '\n' not in self.message: 272 | data = '' 273 | else: 274 | data, self.message = self.message.rsplit('\n', 1) 275 | 276 | try: 277 | for item in data.split('\n'): 278 | if item: 279 | self.send(self.handle(item) + '\n') 280 | except Exception, e: 281 | self.handle_error(e) 282 | 283 | 284 | class PutHandler(Handler): 285 | def handle(self, data): 286 | topic, data = data.split(',', 1) 287 | self.server.topic(topic).append(data) 288 | if sys.getsizeof(self.server.data[topic]) >= self.server.size: 289 | self.server.location(topic) 290 | return '200' 291 | 292 | 293 | class StaHandler(Handler): 294 | def topics(self): 295 | keys = self.server.data.keys() 296 | if os.path.exists(self.server.path): 297 | keys += os.listdir(self.server.path) 298 | return json.dumps(list(set(keys))) 299 | 300 | def status(self, name): 301 | ret = {} 302 | if os.path.exists(self.server.path): 303 | filepath = os.path.join(self.server.path, name) 304 | if os.path.exists(filepath): 305 | files = os.listdir(filepath) 306 | ret['filenum'] = len(files) 307 | ret['filesize'] = sum([os.path.getsize(os.path.join(filepath, _)) for _ in files]) 308 | items = self.server.data.get(name, []) 309 | ret['memsize'] = sum([len(_) for _ in items]) 310 | return json.dumps(ret) 311 | 312 | def stop(self): 313 | for name in self.server.data: 314 | self.server.location(name) 315 | self.server.close() 316 | for handler in self._map.values(): 317 | if isinstance(handler, PutHandler): 318 | handler.close() 319 | return 'true' 320 | 321 | def handle(self, data): 322 | data = data.strip().lower() 323 | if data == 'topics': 324 | return self.topics() 325 | if data.startswith('topic '): 326 | name = data[6:] 327 | return self.status(name) 328 | if data == 'stop': 329 | return self.stop() 330 | return 'null' 331 | 332 | 333 | class GetHandler(Handler): 334 | def __init__(self, server, *args): 335 | Handler.__init__(self, server, *args) 336 | self.topic = None 337 | self.number = -1 338 | self.blocksize = 0 339 | self.offset = 0 340 | self.fp = None 341 | 342 | def handle(self, data): 343 | data = json.loads(data) 344 | self.topic = data['topic'] 345 | if data.get('number'): 346 | self.use(data['number']) 347 | if data.get('offset'): 348 | self.offset = int(data['offset']) 349 | self.fp.seek(self.offset) 350 | return '' 351 | 352 | def use(self, number): 353 | self.number = int(number) 354 | filename = os.path.join(self.server.path, self.topic, str(self.number)) 355 | if self.fp: 356 | self.fp.close() 357 | self.fp = open(filename) 358 | self.blocksize = os.path.getsize(filename) 359 | self.offset = 0 360 | 361 | def handle_write(self): 362 | from sendfile import sendfile 363 | self.offset += sendfile(self.socket.fileno(), self.fp.fileno(), self.offset, self.blocksize) 364 | 365 | def writable(self): 366 | if not self.topic: 367 | return False 368 | if self.fp and self.blocksize > self.offset: 369 | return True 370 | if self.fp: 371 | self.blocksize = endpoint(self.fp) 372 | if self.blocksize > self.offset: 373 | return True 374 | 375 | pathname = os.path.join(self.server.path, str(self.topic)) 376 | if os.path.exists(pathname): 377 | nums = sorted([int(_) for _ in os.listdir(pathname) if int(_) > self.number]) 378 | else: 379 | nums = [] 380 | 381 | if nums: 382 | self.use(nums[0]) 383 | return True 384 | if self.topic in self.server.data and self.server.data[self.topic]: 385 | self.server.location(self.topic) 386 | return False 387 | --------------------------------------------------------------------------------