├── blender ├── __pycache__ │ ├── feedparser.cpython-34.pyc │ └── asyncio_bridge.cpython-34.pyc ├── aiohttp │ ├── __pycache__ │ │ ├── abc.cpython-34.pyc │ │ ├── hdrs.cpython-34.pyc │ │ ├── log.cpython-34.pyc │ │ ├── web.cpython-34.pyc │ │ ├── wsgi.cpython-34.pyc │ │ ├── client.cpython-34.pyc │ │ ├── errors.cpython-34.pyc │ │ ├── helpers.cpython-34.pyc │ │ ├── parsers.cpython-34.pyc │ │ ├── server.cpython-34.pyc │ │ ├── streams.cpython-34.pyc │ │ ├── web_ws.cpython-34.pyc │ │ ├── worker.cpython-34.pyc │ │ ├── __init__.cpython-34.pyc │ │ ├── connector.cpython-34.pyc │ │ ├── multidict.cpython-34.pyc │ │ ├── multipart.cpython-34.pyc │ │ ├── protocol.cpython-34.pyc │ │ ├── test_utils.cpython-34.pyc │ │ ├── web_reqrep.cpython-34.pyc │ │ ├── websocket.cpython-34.pyc │ │ ├── client_reqrep.cpython-34.pyc │ │ ├── web_exceptions.cpython-34.pyc │ │ ├── web_urldispatcher.cpython-34.pyc │ │ └── websocket_client.cpython-34.pyc │ ├── log.py │ ├── abc.py │ ├── _websocket.pyx │ ├── __init__.py │ ├── hdrs.py │ ├── worker.py │ ├── errors.py │ ├── websocket_client.py │ ├── wsgi.py │ ├── web_exceptions.py │ ├── web.py │ ├── web_ws.py │ ├── test_utils.py │ └── helpers.py ├── chardet │ ├── __pycache__ │ │ ├── escsm.cpython-34.pyc │ │ ├── compat.cpython-34.pyc │ │ ├── jisfreq.cpython-34.pyc │ │ ├── jpcntx.cpython-34.pyc │ │ ├── mbcssm.cpython-34.pyc │ │ ├── __init__.cpython-34.pyc │ │ ├── big5freq.cpython-34.pyc │ │ ├── big5prober.cpython-34.pyc │ │ ├── chardetect.cpython-34.pyc │ │ ├── constants.cpython-34.pyc │ │ ├── escprober.cpython-34.pyc │ │ ├── euckrfreq.cpython-34.pyc │ │ ├── euctwfreq.cpython-34.pyc │ │ ├── gb2312freq.cpython-34.pyc │ │ ├── sjisprober.cpython-34.pyc │ │ ├── utf8prober.cpython-34.pyc │ │ ├── cp949prober.cpython-34.pyc │ │ ├── eucjpprober.cpython-34.pyc │ │ ├── euckrprober.cpython-34.pyc │ │ ├── euctwprober.cpython-34.pyc │ │ ├── gb2312prober.cpython-34.pyc │ │ ├── hebrewprober.cpython-34.pyc │ │ ├── latin1prober.cpython-34.pyc │ │ ├── charsetprober.cpython-34.pyc │ │ ├── langgreekmodel.cpython-34.pyc │ │ ├── langhebrewmodel.cpython-34.pyc │ │ ├── langthaimodel.cpython-34.pyc │ │ ├── mbcharsetprober.cpython-34.pyc │ │ ├── mbcsgroupprober.cpython-34.pyc │ │ ├── sbcharsetprober.cpython-34.pyc │ │ ├── sbcsgroupprober.cpython-34.pyc │ │ ├── chardistribution.cpython-34.pyc │ │ ├── langcyrillicmodel.cpython-34.pyc │ │ ├── universaldetector.cpython-34.pyc │ │ ├── charsetgroupprober.cpython-34.pyc │ │ ├── codingstatemachine.cpython-34.pyc │ │ ├── langbulgarianmodel.cpython-34.pyc │ │ └── langhungarianmodel.cpython-34.pyc │ ├── compat.py │ ├── chardetect.py │ ├── __init__.py │ ├── constants.py │ ├── euctwprober.py │ ├── euckrprober.py │ ├── gb2312prober.py │ ├── big5prober.py │ ├── cp949prober.py │ ├── charsetprober.py │ ├── mbcsgroupprober.py │ ├── codingstatemachine.py │ ├── utf8prober.py │ ├── escprober.py │ ├── sbcsgroupprober.py │ ├── mbcharsetprober.py │ ├── eucjpprober.py │ ├── sjisprober.py │ ├── charsetgroupprober.py │ ├── sbcharsetprober.py │ ├── latin1prober.py │ ├── universaldetector.py │ ├── escsm.py │ └── chardistribution.py ├── asyncio_bridge.py └── blenderServer.py ├── README.md └── torch ├── Mask.lua ├── Adam.lua ├── base.lua ├── get_data.lua ├── model_conv.lua ├── model_flat.lua └── CAE.lua /blender/__pycache__/feedparser.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/__pycache__/feedparser.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/abc.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/abc.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/hdrs.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/hdrs.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/log.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/log.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/web.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/web.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/wsgi.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/wsgi.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/escsm.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/escsm.cpython-34.pyc -------------------------------------------------------------------------------- /blender/__pycache__/asyncio_bridge.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/__pycache__/asyncio_bridge.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/client.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/client.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/errors.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/errors.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/helpers.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/helpers.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/parsers.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/parsers.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/server.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/server.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/streams.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/streams.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/web_ws.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/web_ws.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/worker.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/worker.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/compat.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/compat.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/jisfreq.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/jisfreq.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/jpcntx.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/jpcntx.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/mbcssm.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/mbcssm.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/__init__.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/__init__.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/connector.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/connector.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/multidict.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/multidict.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/multipart.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/multipart.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/protocol.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/protocol.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/test_utils.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/test_utils.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/web_reqrep.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/web_reqrep.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/websocket.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/websocket.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/__init__.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/__init__.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/big5freq.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/big5freq.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/big5prober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/big5prober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/chardetect.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/chardetect.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/constants.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/constants.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/escprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/escprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/euckrfreq.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/euckrfreq.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/euctwfreq.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/euctwfreq.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/gb2312freq.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/gb2312freq.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/sjisprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/sjisprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/utf8prober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/utf8prober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/cp949prober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/cp949prober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/eucjpprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/eucjpprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/euckrprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/euckrprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/euctwprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/euctwprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/gb2312prober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/gb2312prober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/hebrewprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/hebrewprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/latin1prober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/latin1prober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/client_reqrep.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/client_reqrep.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/web_exceptions.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/web_exceptions.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/charsetprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/charsetprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/langgreekmodel.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/langgreekmodel.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/langhebrewmodel.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/langhebrewmodel.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/langthaimodel.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/langthaimodel.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/mbcharsetprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/mbcharsetprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/mbcsgroupprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/mbcsgroupprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/sbcharsetprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/sbcharsetprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/sbcsgroupprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/sbcsgroupprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/web_urldispatcher.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/web_urldispatcher.cpython-34.pyc -------------------------------------------------------------------------------- /blender/aiohttp/__pycache__/websocket_client.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/aiohttp/__pycache__/websocket_client.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/chardistribution.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/chardistribution.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/langcyrillicmodel.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/langcyrillicmodel.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/universaldetector.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/universaldetector.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/charsetgroupprober.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/charsetgroupprober.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/codingstatemachine.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/codingstatemachine.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/langbulgarianmodel.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/langbulgarianmodel.cpython-34.pyc -------------------------------------------------------------------------------- /blender/chardet/__pycache__/langhungarianmodel.cpython-34.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danfischetti/shape-encoder/HEAD/blender/chardet/__pycache__/langhungarianmodel.cpython-34.pyc -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # shape-encoder 2 | Encodes multiple viewpoints of a 3D object into a single tensor, which can be decoded with a viewpoint dependent transformation. 3 | 4 | train_shape_conv is the main file which loads the model from model_conv. 5 | 6 | Relevant blog post: http://danfischetti.github.io/jekyll/update/2016/01/07/convolutional-shape-encoder.html 7 | -------------------------------------------------------------------------------- /blender/aiohttp/log.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | 4 | access_logger = logging.getLogger('aiohttp.access') 5 | client_logger = logging.getLogger('aiohttp.client') 6 | internal_logger = logging.getLogger('aiohttp.internal') 7 | server_logger = logging.getLogger('aiohttp.server') 8 | web_logger = logging.getLogger('aiohttp.web') 9 | ws_logger = logging.getLogger('aiohttp.websocket') 10 | -------------------------------------------------------------------------------- /blender/aiohttp/abc.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from abc import ABCMeta, abstractmethod 3 | 4 | 5 | class AbstractRouter(metaclass=ABCMeta): 6 | 7 | @asyncio.coroutine # pragma: no branch 8 | @abstractmethod 9 | def resolve(self, request): 10 | """Return MATCH_INFO for given request""" 11 | 12 | 13 | class AbstractMatchInfo(metaclass=ABCMeta): 14 | 15 | @property # pragma: no branch 16 | @abstractmethod 17 | def handler(self): 18 | """Return handler for match info""" 19 | 20 | @property # pragma: no branch 21 | @abstractmethod 22 | def route(self): 23 | """Return route for match info""" 24 | -------------------------------------------------------------------------------- /blender/aiohttp/_websocket.pyx: -------------------------------------------------------------------------------- 1 | from cpython cimport PyBytes_FromStringAndSize, PyBytes_AsString 2 | from cpython.ref cimport PyObject 3 | 4 | cdef extern from "Python.h": 5 | char* PyByteArray_AsString(object bytearray) except NULL 6 | 7 | 8 | def _websocket_mask_cython(bytes mask, bytearray data): 9 | cdef Py_ssize_t mask_len, data_len, i 10 | cdef char * in_buf 11 | cdef char * out_buf 12 | cdef char * mask_buf 13 | cdef bytes ret 14 | mask_len = len(mask) 15 | data_len = len(data) 16 | in_buf = PyByteArray_AsString(data) 17 | mask_buf = PyBytes_AsString(mask) 18 | for i in range(0, data_len): 19 | in_buf[i] = in_buf[i] ^ mask_buf[i % 4] 20 | return data 21 | -------------------------------------------------------------------------------- /torch/Mask.lua: -------------------------------------------------------------------------------- 1 | local Mask, parent = torch.class('nn.Mask','nn.Module') 2 | 3 | function Mask:__init(rows,cols) 4 | parent.__init(self) 5 | 6 | self.rows,self.cols = rows,cols 7 | 8 | self.output = torch.zeros(1,1,rows,cols) 9 | self.gradInput = torch.zeros(1,1,rows,cols) 10 | 11 | for i = 1,rows do 12 | for j = 1,cols do 13 | if (i%2==0 and j%2==0) then 14 | self.output[{1,1,i,j}] = 1 15 | end 16 | end 17 | end 18 | 19 | end 20 | 21 | function Mask:updateOutput(input) 22 | local size = input:size() 23 | return torch.expand(self.output,size[1],size[2],self.rows,self.cols) 24 | end 25 | 26 | function Mask:updateGradInput(input, gradOutput) 27 | local size = input:size() 28 | return torch.expand(self.gradInput,size[1],size[2],self.rows,self.cols) 29 | end -------------------------------------------------------------------------------- /torch/Adam.lua: -------------------------------------------------------------------------------- 1 | local Adam = torch.class('nn.Adam') 2 | 3 | function Adam:__init(params,gparams,alpha,beta1,beta2,epsilon,l) 4 | self.params = params 5 | self.gparams = gparams 6 | self.alpha = alpha or 0.0002 7 | self.beta1 = beta1 or 0.1 8 | self.beta2 = beta2 or 0.001 9 | self.epsilon = epsilon or 1e-8 10 | self.l = l or 1e-8 11 | self.t = 0 12 | self.m = params:clone():zero() 13 | self.v = params:clone():zero() 14 | end 15 | 16 | function Adam:step() 17 | self.t = self.t+1 18 | local b1_t = self.beta1*math.pow(self.l,self.t-1) 19 | self.m:mul(b1_t) 20 | self.m:add(1-b1_t,self.gparams) 21 | self.v:mul(self.beta2) 22 | self.v:addcmul(1-self.beta2,self.gparams,self.gparams) 23 | local m_bias = torch.div(self.m,1-b1_t) 24 | local v_bias = torch.div(self.v,1-self.beta2) 25 | self.params:addcdiv(-self.alpha,m_bias,torch.sqrt(v_bias) + self.epsilon) 26 | --self.params:add(-self.gparams*self.alpha) 27 | end 28 | 29 | -------------------------------------------------------------------------------- /blender/aiohttp/__init__.py: -------------------------------------------------------------------------------- 1 | # This relies on each of the submodules having an __all__ variable. 2 | 3 | __version__ = '0.17.2' 4 | 5 | 6 | from . import hdrs # noqa 7 | from .protocol import * # noqa 8 | from .connector import * # noqa 9 | from .client import * # noqa 10 | from .errors import * # noqa 11 | from .helpers import * # noqa 12 | from .parsers import * # noqa 13 | from .streams import * # noqa 14 | from .multidict import * # noqa 15 | from .multipart import * # noqa 16 | from .websocket_client import * # noqa 17 | 18 | 19 | __all__ = (client.__all__ + 20 | errors.__all__ + 21 | helpers.__all__ + 22 | parsers.__all__ + 23 | protocol.__all__ + 24 | connector.__all__ + 25 | streams.__all__ + 26 | multidict.__all__ + 27 | multipart.__all__ + 28 | websocket_client.__all__ + 29 | ('hdrs', '__version__')) 30 | -------------------------------------------------------------------------------- /blender/chardet/compat.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # Contributor(s): 3 | # Ian Cordasco - port to Python 4 | # 5 | # This library is free software; you can redistribute it and/or 6 | # modify it under the terms of the GNU Lesser General Public 7 | # License as published by the Free Software Foundation; either 8 | # version 2.1 of the License, or (at your option) any later version. 9 | # 10 | # This library is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 13 | # Lesser General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Lesser General Public 16 | # License along with this library; if not, write to the Free Software 17 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 18 | # 02110-1301 USA 19 | ######################### END LICENSE BLOCK ######################### 20 | 21 | import sys 22 | 23 | 24 | if sys.version_info < (3, 0): 25 | base_str = (str, unicode) 26 | else: 27 | base_str = (bytes, str) 28 | 29 | 30 | def wrap_ord(a): 31 | if sys.version_info < (3, 0) and isinstance(a, base_str): 32 | return ord(a) 33 | else: 34 | return a 35 | -------------------------------------------------------------------------------- /blender/chardet/chardetect.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Script which takes one or more file paths and reports on their detected 4 | encodings 5 | 6 | Example:: 7 | 8 | % chardetect somefile someotherfile 9 | somefile: windows-1252 with confidence 0.5 10 | someotherfile: ascii with confidence 1.0 11 | 12 | If no paths are provided, it takes its input from stdin. 13 | 14 | """ 15 | from io import open 16 | from sys import argv, stdin 17 | 18 | from chardet.universaldetector import UniversalDetector 19 | 20 | 21 | def description_of(file, name='stdin'): 22 | """Return a string describing the probable encoding of a file.""" 23 | u = UniversalDetector() 24 | for line in file: 25 | u.feed(line) 26 | u.close() 27 | result = u.result 28 | if result['encoding']: 29 | return '%s: %s with confidence %s' % (name, 30 | result['encoding'], 31 | result['confidence']) 32 | else: 33 | return '%s: no result' % name 34 | 35 | 36 | def main(): 37 | if len(argv) <= 1: 38 | print(description_of(stdin)) 39 | else: 40 | for path in argv[1:]: 41 | with open(path, 'rb') as f: 42 | print(description_of(f, path)) 43 | 44 | 45 | if __name__ == '__main__': 46 | main() 47 | -------------------------------------------------------------------------------- /blender/chardet/__init__.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # This library is free software; you can redistribute it and/or 3 | # modify it under the terms of the GNU Lesser General Public 4 | # License as published by the Free Software Foundation; either 5 | # version 2.1 of the License, or (at your option) any later version. 6 | # 7 | # This library is distributed in the hope that it will be useful, 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 10 | # Lesser General Public License for more details. 11 | # 12 | # You should have received a copy of the GNU Lesser General Public 13 | # License along with this library; if not, write to the Free Software 14 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 15 | # 02110-1301 USA 16 | ######################### END LICENSE BLOCK ######################### 17 | 18 | __version__ = "2.2.1" 19 | from sys import version_info 20 | 21 | 22 | def detect(aBuf): 23 | if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or 24 | (version_info >= (3, 0) and not isinstance(aBuf, bytes))): 25 | raise ValueError('Expected a bytes object, not a unicode object') 26 | 27 | from . import universaldetector 28 | u = universaldetector.UniversalDetector() 29 | u.reset() 30 | u.feed(aBuf) 31 | u.close() 32 | return u.result 33 | -------------------------------------------------------------------------------- /blender/chardet/constants.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | _debug = 0 30 | 31 | eDetecting = 0 32 | eFoundIt = 1 33 | eNotMe = 2 34 | 35 | eStart = 0 36 | eError = 1 37 | eItsMe = 2 38 | 39 | SHORTCUT_THRESHOLD = 0.95 40 | -------------------------------------------------------------------------------- /blender/chardet/euctwprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import EUCTWDistributionAnalysis 31 | from .mbcssm import EUCTWSMModel 32 | 33 | class EUCTWProber(MultiByteCharSetProber): 34 | def __init__(self): 35 | MultiByteCharSetProber.__init__(self) 36 | self._mCodingSM = CodingStateMachine(EUCTWSMModel) 37 | self._mDistributionAnalyzer = EUCTWDistributionAnalysis() 38 | self.reset() 39 | 40 | def get_charset_name(self): 41 | return "EUC-TW" 42 | -------------------------------------------------------------------------------- /blender/chardet/euckrprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import EUCKRDistributionAnalysis 31 | from .mbcssm import EUCKRSMModel 32 | 33 | 34 | class EUCKRProber(MultiByteCharSetProber): 35 | def __init__(self): 36 | MultiByteCharSetProber.__init__(self) 37 | self._mCodingSM = CodingStateMachine(EUCKRSMModel) 38 | self._mDistributionAnalyzer = EUCKRDistributionAnalysis() 39 | self.reset() 40 | 41 | def get_charset_name(self): 42 | return "EUC-KR" 43 | -------------------------------------------------------------------------------- /blender/chardet/gb2312prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import GB2312DistributionAnalysis 31 | from .mbcssm import GB2312SMModel 32 | 33 | class GB2312Prober(MultiByteCharSetProber): 34 | def __init__(self): 35 | MultiByteCharSetProber.__init__(self) 36 | self._mCodingSM = CodingStateMachine(GB2312SMModel) 37 | self._mDistributionAnalyzer = GB2312DistributionAnalysis() 38 | self.reset() 39 | 40 | def get_charset_name(self): 41 | return "GB2312" 42 | -------------------------------------------------------------------------------- /blender/chardet/big5prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Communicator client code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import Big5DistributionAnalysis 31 | from .mbcssm import Big5SMModel 32 | 33 | 34 | class Big5Prober(MultiByteCharSetProber): 35 | def __init__(self): 36 | MultiByteCharSetProber.__init__(self) 37 | self._mCodingSM = CodingStateMachine(Big5SMModel) 38 | self._mDistributionAnalyzer = Big5DistributionAnalysis() 39 | self.reset() 40 | 41 | def get_charset_name(self): 42 | return "Big5" 43 | -------------------------------------------------------------------------------- /blender/chardet/cp949prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import EUCKRDistributionAnalysis 31 | from .mbcssm import CP949SMModel 32 | 33 | 34 | class CP949Prober(MultiByteCharSetProber): 35 | def __init__(self): 36 | MultiByteCharSetProber.__init__(self) 37 | self._mCodingSM = CodingStateMachine(CP949SMModel) 38 | # NOTE: CP949 is a superset of EUC-KR, so the distribution should be 39 | # not different. 40 | self._mDistributionAnalyzer = EUCKRDistributionAnalysis() 41 | self.reset() 42 | 43 | def get_charset_name(self): 44 | return "CP949" 45 | -------------------------------------------------------------------------------- /blender/chardet/charsetprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | from . import constants 30 | import re 31 | 32 | 33 | class CharSetProber: 34 | def __init__(self): 35 | pass 36 | 37 | def reset(self): 38 | self._mState = constants.eDetecting 39 | 40 | def get_charset_name(self): 41 | return None 42 | 43 | def feed(self, aBuf): 44 | pass 45 | 46 | def get_state(self): 47 | return self._mState 48 | 49 | def get_confidence(self): 50 | return 0.0 51 | 52 | def filter_high_bit_only(self, aBuf): 53 | aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf) 54 | return aBuf 55 | 56 | def filter_without_english_letters(self, aBuf): 57 | aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf) 58 | return aBuf 59 | 60 | def filter_with_english_letters(self, aBuf): 61 | # TODO 62 | return aBuf 63 | -------------------------------------------------------------------------------- /blender/chardet/mbcsgroupprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # Proofpoint, Inc. 13 | # 14 | # This library is free software; you can redistribute it and/or 15 | # modify it under the terms of the GNU Lesser General Public 16 | # License as published by the Free Software Foundation; either 17 | # version 2.1 of the License, or (at your option) any later version. 18 | # 19 | # This library is distributed in the hope that it will be useful, 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 22 | # Lesser General Public License for more details. 23 | # 24 | # You should have received a copy of the GNU Lesser General Public 25 | # License along with this library; if not, write to the Free Software 26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 27 | # 02110-1301 USA 28 | ######################### END LICENSE BLOCK ######################### 29 | 30 | from .charsetgroupprober import CharSetGroupProber 31 | from .utf8prober import UTF8Prober 32 | from .sjisprober import SJISProber 33 | from .eucjpprober import EUCJPProber 34 | from .gb2312prober import GB2312Prober 35 | from .euckrprober import EUCKRProber 36 | from .cp949prober import CP949Prober 37 | from .big5prober import Big5Prober 38 | from .euctwprober import EUCTWProber 39 | 40 | 41 | class MBCSGroupProber(CharSetGroupProber): 42 | def __init__(self): 43 | CharSetGroupProber.__init__(self) 44 | self._mProbers = [ 45 | UTF8Prober(), 46 | SJISProber(), 47 | EUCJPProber(), 48 | GB2312Prober(), 49 | EUCKRProber(), 50 | CP949Prober(), 51 | Big5Prober(), 52 | EUCTWProber() 53 | ] 54 | self.reset() 55 | -------------------------------------------------------------------------------- /torch/base.lua: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2014, Facebook, Inc. 3 | -- All rights reserved. 4 | -- 5 | -- This source code is licensed under the Apache 2 license found in the 6 | -- LICENSE file in the root directory of this source tree. 7 | -- 8 | 9 | function g_disable_dropout(node) 10 | if type(node) == "table" and node.__typename == nil then 11 | for i = 1, #node do 12 | node[i]:apply(g_disable_dropout) 13 | end 14 | return 15 | end 16 | if string.match(node.__typename, "Dropout") then 17 | node.train = false 18 | end 19 | end 20 | 21 | function g_enable_dropout(node) 22 | if type(node) == "table" and node.__typename == nil then 23 | for i = 1, #node do 24 | node[i]:apply(g_enable_dropout) 25 | end 26 | return 27 | end 28 | if string.match(node.__typename, "Dropout") then 29 | node.train = true 30 | end 31 | end 32 | 33 | function g_cloneManyTimes(net, T) 34 | local clones = {} 35 | local params, gradParams = net:parameters() 36 | local mem = torch.MemoryFile("w"):binary() 37 | mem:writeObject(net) 38 | for t = 1, T do 39 | -- We need to use a new reader for each clone. 40 | -- We don't want to use the pointers to already read objects. 41 | local reader = torch.MemoryFile(mem:storage(), "r"):binary() 42 | local clone = reader:readObject() 43 | reader:close() 44 | local cloneParams, cloneGradParams = clone:parameters() 45 | for i = 1, #params do 46 | cloneParams[i]:set(params[i]) 47 | cloneGradParams[i]:set(gradParams[i]) 48 | end 49 | clones[t] = clone 50 | collectgarbage() 51 | end 52 | mem:close() 53 | return clones 54 | end 55 | 56 | function g_init_gpu(args) 57 | local gpuidx = args 58 | gpuidx = gpuidx[1] or 1 59 | print(string.format("Using %s-th gpu", gpuidx)) 60 | cutorch.setDevice(gpuidx) 61 | g_make_deterministic(1) 62 | end 63 | 64 | function g_make_deterministic(seed) 65 | torch.manualSeed(seed) 66 | cutorch.manualSeed(seed) 67 | torch.zeros(1, 1):cuda():uniform() 68 | end 69 | 70 | function g_replace_table(to, from) 71 | assert(#to == #from) 72 | for i = 1, #to do 73 | to[i]:copy(from[i]) 74 | end 75 | end 76 | 77 | function g_f3(f) 78 | return string.format("%.3f", f) 79 | end 80 | 81 | function g_d(f) 82 | return string.format("%d", torch.round(f)) 83 | end -------------------------------------------------------------------------------- /blender/chardet/codingstatemachine.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .constants import eStart 29 | from .compat import wrap_ord 30 | 31 | 32 | class CodingStateMachine: 33 | def __init__(self, sm): 34 | self._mModel = sm 35 | self._mCurrentBytePos = 0 36 | self._mCurrentCharLen = 0 37 | self.reset() 38 | 39 | def reset(self): 40 | self._mCurrentState = eStart 41 | 42 | def next_state(self, c): 43 | # for each byte we get its class 44 | # if it is first byte, we also get byte length 45 | # PY3K: aBuf is a byte stream, so c is an int, not a byte 46 | byteCls = self._mModel['classTable'][wrap_ord(c)] 47 | if self._mCurrentState == eStart: 48 | self._mCurrentBytePos = 0 49 | self._mCurrentCharLen = self._mModel['charLenTable'][byteCls] 50 | # from byte's class and stateTable, we get its next state 51 | curr_state = (self._mCurrentState * self._mModel['classFactor'] 52 | + byteCls) 53 | self._mCurrentState = self._mModel['stateTable'][curr_state] 54 | self._mCurrentBytePos += 1 55 | return self._mCurrentState 56 | 57 | def get_current_charlen(self): 58 | return self._mCurrentCharLen 59 | 60 | def get_coding_state_machine(self): 61 | return self._mModel['name'] 62 | -------------------------------------------------------------------------------- /torch/get_data.lua: -------------------------------------------------------------------------------- 1 | require 'image' 2 | json = require('json') 3 | require('json.rpc') 4 | require 'nn' 5 | 6 | local filepath = '/ramcache/renders/' 7 | 8 | params = { 9 | n_glimpses=4, 10 | n_samples=20, 11 | loc = {{0,0,0.1,0},{0,0.5,0.2,0},{0,1,-0.1,0},{0,-0.5,-0.2,0}}, 12 | n_batches=24, 13 | batch_size = 56 14 | } 15 | 16 | function get_view(l,index,n) 17 | json.rpc.call('http://localhost:9090','get_view',{l[1],l[2],l[3],l[4],index,n}) 18 | return image.load(filepath .. 'm' .. n .. '.png') 19 | end 20 | 21 | function loadBatch(n) 22 | json.rpc.call('http://localhost:9090','loadObjects',{n}) 23 | end 24 | 25 | x_in = {} 26 | x = {} 27 | y = {} 28 | 29 | 30 | function main() 31 | for i = 1,params.n_glimpses do 32 | x_in[i] = {} 33 | end 34 | for i = 1,params.n_samples do 35 | x[i] = {} 36 | y[i] = {} 37 | end 38 | for a = 1,16 do 39 | for i = 1,params.n_batches do 40 | print(i) 41 | local indices = {} 42 | local i2 = i 43 | if i == 16 then 44 | i2 = 25 45 | end 46 | for k = 1,params.batch_size do 47 | index=4*(i2-1)+((k-1)%14)*100+math.floor((k-1)/14) 48 | indices[k]=index 49 | end 50 | loadBatch(i2) 51 | for j = 1,params.batch_size do 52 | for k = 1,params.n_glimpses do 53 | local loc = params.loc[k] 54 | local img = get_view(loc,indices[j],j):view(1,1,64,64) 55 | table.insert(x_in[k],img) 56 | end 57 | for k = 1,params.n_samples do 58 | local loc = torch.Tensor({0,0,0,0}) 59 | local h = halton2() 60 | loc[2]=h[1]*2-1 61 | loc[3]=(h[2]*2-1)*.2 62 | local img = get_view(loc,indices[j],j):view(1,1,64,64) 63 | table.insert(x[k],img) 64 | table.insert(y[k],loc:view(1,4)) 65 | end 66 | end 67 | end 68 | end 69 | module = nn.JoinTable(1) 70 | for n = 1,params.n_samples do 71 | x[n] = module:forward(x[n]):clone() 72 | y[n] = module:forward(y[n]):clone() 73 | end 74 | for n = 1,params.n_glimpses do 75 | x_in[n] = module:forward(x_in[n]):clone() 76 | end 77 | torch.save('targets.dat',x) 78 | torch.save('renders.dat',x_in) 79 | torch.save('target_locs.dat',y) 80 | end 81 | 82 | IND = 0 83 | 84 | function halton(index,base) 85 | local result = 0 86 | local i = index 87 | local f = 1 88 | while i > 0 do 89 | f = f/base 90 | result = result+f*(i%base) 91 | i = math.floor(i/base) 92 | end 93 | return result 94 | end 95 | 96 | function halton2() 97 | IND=IND+1 98 | return {halton(IND,2),halton(IND,3)} 99 | end 100 | 101 | main() -------------------------------------------------------------------------------- /blender/chardet/utf8prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from . import constants 29 | from .charsetprober import CharSetProber 30 | from .codingstatemachine import CodingStateMachine 31 | from .mbcssm import UTF8SMModel 32 | 33 | ONE_CHAR_PROB = 0.5 34 | 35 | 36 | class UTF8Prober(CharSetProber): 37 | def __init__(self): 38 | CharSetProber.__init__(self) 39 | self._mCodingSM = CodingStateMachine(UTF8SMModel) 40 | self.reset() 41 | 42 | def reset(self): 43 | CharSetProber.reset(self) 44 | self._mCodingSM.reset() 45 | self._mNumOfMBChar = 0 46 | 47 | def get_charset_name(self): 48 | return "utf-8" 49 | 50 | def feed(self, aBuf): 51 | for c in aBuf: 52 | codingState = self._mCodingSM.next_state(c) 53 | if codingState == constants.eError: 54 | self._mState = constants.eNotMe 55 | break 56 | elif codingState == constants.eItsMe: 57 | self._mState = constants.eFoundIt 58 | break 59 | elif codingState == constants.eStart: 60 | if self._mCodingSM.get_current_charlen() >= 2: 61 | self._mNumOfMBChar += 1 62 | 63 | if self.get_state() == constants.eDetecting: 64 | if self.get_confidence() > constants.SHORTCUT_THRESHOLD: 65 | self._mState = constants.eFoundIt 66 | 67 | return self.get_state() 68 | 69 | def get_confidence(self): 70 | unlike = 0.99 71 | if self._mNumOfMBChar < 6: 72 | for i in range(0, self._mNumOfMBChar): 73 | unlike = unlike * ONE_CHAR_PROB 74 | return 1.0 - unlike 75 | else: 76 | return unlike 77 | -------------------------------------------------------------------------------- /blender/aiohttp/hdrs.py: -------------------------------------------------------------------------------- 1 | """HTTP Headers constants.""" 2 | from .multidict import upstr 3 | 4 | METH_ANY = upstr('*') 5 | METH_CONNECT = upstr('CONNECT') 6 | METH_HEAD = upstr('HEAD') 7 | METH_GET = upstr('GET') 8 | METH_DELETE = upstr('DELETE') 9 | METH_OPTIONS = upstr('OPTIONS') 10 | METH_PATCH = upstr('PATCH') 11 | METH_POST = upstr('POST') 12 | METH_PUT = upstr('PUT') 13 | METH_TRACE = upstr('TRACE') 14 | 15 | ACCEPT = upstr('ACCEPT') 16 | ACCEPT_CHARSET = upstr('ACCEPT-CHARSET') 17 | ACCEPT_ENCODING = upstr('ACCEPT-ENCODING') 18 | ACCEPT_LANGUAGE = upstr('ACCEPT-LANGUAGE') 19 | ACCEPT_RANGES = upstr('ACCEPT-RANGES') 20 | ACCESS_CONTROL_MAX_AGE = upstr('ACCESS-CONTROL-MAX-AGE') 21 | ACCESS_CONTROL_ALLOW_CREDENTIALS = upstr('ACCESS-CONTROL-ALLOW-CREDENTIALS') 22 | ACCESS_CONTROL_ALLOW_HEADERS = upstr('ACCESS-CONTROL-ALLOW-HEADERS') 23 | ACCESS_CONTROL_ALLOW_METHODS = upstr('ACCESS-CONTROL-ALLOW-METHODS') 24 | ACCESS_CONTROL_ALLOW_ORIGIN = upstr('ACCESS-CONTROL-ALLOW-ORIGIN') 25 | ACCESS_CONTROL_EXPOSE_HEADERS = upstr('ACCESS-CONTROL-EXPOSE-HEADERS') 26 | ACCESS_CONTROL_REQUEST_HEADERS = upstr('ACCESS-CONTROL-REQUEST-HEADERS') 27 | ACCESS_CONTROL_REQUEST_METHOD = upstr('ACCESS-CONTROL-REQUEST-METHOD') 28 | AGE = upstr('AGE') 29 | ALLOW = upstr('ALLOW') 30 | AUTHORIZATION = upstr('AUTHORIZATION') 31 | CACHE_CONTROL = upstr('CACHE-CONTROL') 32 | CONNECTION = upstr('CONNECTION') 33 | CONTENT_DISPOSITION = upstr('CONTENT-DISPOSITION') 34 | CONTENT_ENCODING = upstr('CONTENT-ENCODING') 35 | CONTENT_LANGUAGE = upstr('CONTENT-LANGUAGE') 36 | CONTENT_LENGTH = upstr('CONTENT-LENGTH') 37 | CONTENT_LOCATION = upstr('CONTENT-LOCATION') 38 | CONTENT_MD5 = upstr('CONTENT-MD5') 39 | CONTENT_RANGE = upstr('CONTENT-RANGE') 40 | CONTENT_TRANSFER_ENCODING = upstr('CONTENT-TRANSFER-ENCODING') 41 | CONTENT_TYPE = upstr('CONTENT-TYPE') 42 | COOKIE = upstr('COOKIE') 43 | DATE = upstr('DATE') 44 | DESTINATION = upstr('DESTINATION') 45 | DIGEST = upstr('DIGEST') 46 | ETAG = upstr('ETAG') 47 | EXPECT = upstr('EXPECT') 48 | EXPIRES = upstr('EXPIRES') 49 | FROM = upstr('FROM') 50 | HOST = upstr('HOST') 51 | IF_MATCH = upstr('IF-MATCH') 52 | IF_MODIFIED_SINCE = upstr('IF-MODIFIED-SINCE') 53 | IF_NONE_MATCH = upstr('IF-NONE-MATCH') 54 | IF_RANGE = upstr('IF-RANGE') 55 | IF_UNMODIFIED_SINCE = upstr('IF-UNMODIFIED-SINCE') 56 | KEEP_ALIVE = upstr('KEEP-ALIVE') 57 | LAST_EVENT_ID = upstr('LAST-EVENT-ID') 58 | LAST_MODIFIED = upstr('LAST-MODIFIED') 59 | LINK = upstr('LINK') 60 | LOCATION = upstr('LOCATION') 61 | MAX_FORWARDS = upstr('MAX-FORWARDS') 62 | ORIGIN = upstr('ORIGIN') 63 | PRAGMA = upstr('PRAGMA') 64 | PROXY_AUTHENTICATE = upstr('PROXY_AUTHENTICATE') 65 | PROXY_AUTHORIZATION = upstr('PROXY-AUTHORIZATION') 66 | RANGE = upstr('RANGE') 67 | REFERER = upstr('REFERER') 68 | RETRY_AFTER = upstr('RETRY-AFTER') 69 | SEC_WEBSOCKET_ACCEPT = upstr('SEC-WEBSOCKET-ACCEPT') 70 | SEC_WEBSOCKET_VERSION = upstr('SEC-WEBSOCKET-VERSION') 71 | SEC_WEBSOCKET_PROTOCOL = upstr('SEC-WEBSOCKET-PROTOCOL') 72 | SEC_WEBSOCKET_KEY = upstr('SEC-WEBSOCKET-KEY') 73 | SEC_WEBSOCKET_KEY1 = upstr('SEC-WEBSOCKET-KEY1') 74 | SERVER = upstr('SERVER') 75 | SET_COOKIE = upstr('SET-COOKIE') 76 | TE = upstr('TE') 77 | TRAILER = upstr('TRAILER') 78 | TRANSFER_ENCODING = upstr('TRANSFER-ENCODING') 79 | UPGRADE = upstr('UPGRADE') 80 | WEBSOCKET = upstr('WEBSOCKET') 81 | URI = upstr('URI') 82 | USER_AGENT = upstr('USER-AGENT') 83 | VARY = upstr('VARY') 84 | VIA = upstr('VIA') 85 | WANT_DIGEST = upstr('WANT-DIGEST') 86 | WARNING = upstr('WARNING') 87 | WWW_AUTHENTICATE = upstr('WWW-AUTHENTICATE') 88 | -------------------------------------------------------------------------------- /blender/chardet/escprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from . import constants 29 | from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel, 30 | ISO2022KRSMModel) 31 | from .charsetprober import CharSetProber 32 | from .codingstatemachine import CodingStateMachine 33 | from .compat import wrap_ord 34 | 35 | 36 | class EscCharSetProber(CharSetProber): 37 | def __init__(self): 38 | CharSetProber.__init__(self) 39 | self._mCodingSM = [ 40 | CodingStateMachine(HZSMModel), 41 | CodingStateMachine(ISO2022CNSMModel), 42 | CodingStateMachine(ISO2022JPSMModel), 43 | CodingStateMachine(ISO2022KRSMModel) 44 | ] 45 | self.reset() 46 | 47 | def reset(self): 48 | CharSetProber.reset(self) 49 | for codingSM in self._mCodingSM: 50 | if not codingSM: 51 | continue 52 | codingSM.active = True 53 | codingSM.reset() 54 | self._mActiveSM = len(self._mCodingSM) 55 | self._mDetectedCharset = None 56 | 57 | def get_charset_name(self): 58 | return self._mDetectedCharset 59 | 60 | def get_confidence(self): 61 | if self._mDetectedCharset: 62 | return 0.99 63 | else: 64 | return 0.00 65 | 66 | def feed(self, aBuf): 67 | for c in aBuf: 68 | # PY3K: aBuf is a byte array, so c is an int, not a byte 69 | for codingSM in self._mCodingSM: 70 | if not codingSM: 71 | continue 72 | if not codingSM.active: 73 | continue 74 | codingState = codingSM.next_state(wrap_ord(c)) 75 | if codingState == constants.eError: 76 | codingSM.active = False 77 | self._mActiveSM -= 1 78 | if self._mActiveSM <= 0: 79 | self._mState = constants.eNotMe 80 | return self.get_state() 81 | elif codingState == constants.eItsMe: 82 | self._mState = constants.eFoundIt 83 | self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8 84 | return self.get_state() 85 | 86 | return self.get_state() 87 | -------------------------------------------------------------------------------- /blender/chardet/sbcsgroupprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | from .charsetgroupprober import CharSetGroupProber 30 | from .sbcharsetprober import SingleByteCharSetProber 31 | from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, 32 | Latin5CyrillicModel, MacCyrillicModel, 33 | Ibm866Model, Ibm855Model) 34 | from .langgreekmodel import Latin7GreekModel, Win1253GreekModel 35 | from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel 36 | from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel 37 | from .langthaimodel import TIS620ThaiModel 38 | from .langhebrewmodel import Win1255HebrewModel 39 | from .hebrewprober import HebrewProber 40 | 41 | 42 | class SBCSGroupProber(CharSetGroupProber): 43 | def __init__(self): 44 | CharSetGroupProber.__init__(self) 45 | self._mProbers = [ 46 | SingleByteCharSetProber(Win1251CyrillicModel), 47 | SingleByteCharSetProber(Koi8rModel), 48 | SingleByteCharSetProber(Latin5CyrillicModel), 49 | SingleByteCharSetProber(MacCyrillicModel), 50 | SingleByteCharSetProber(Ibm866Model), 51 | SingleByteCharSetProber(Ibm855Model), 52 | SingleByteCharSetProber(Latin7GreekModel), 53 | SingleByteCharSetProber(Win1253GreekModel), 54 | SingleByteCharSetProber(Latin5BulgarianModel), 55 | SingleByteCharSetProber(Win1251BulgarianModel), 56 | SingleByteCharSetProber(Latin2HungarianModel), 57 | SingleByteCharSetProber(Win1250HungarianModel), 58 | SingleByteCharSetProber(TIS620ThaiModel), 59 | ] 60 | hebrewProber = HebrewProber() 61 | logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, 62 | False, hebrewProber) 63 | visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True, 64 | hebrewProber) 65 | hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber) 66 | self._mProbers.extend([hebrewProber, logicalHebrewProber, 67 | visualHebrewProber]) 68 | 69 | self.reset() 70 | -------------------------------------------------------------------------------- /blender/chardet/mbcharsetprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # Proofpoint, Inc. 13 | # 14 | # This library is free software; you can redistribute it and/or 15 | # modify it under the terms of the GNU Lesser General Public 16 | # License as published by the Free Software Foundation; either 17 | # version 2.1 of the License, or (at your option) any later version. 18 | # 19 | # This library is distributed in the hope that it will be useful, 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 22 | # Lesser General Public License for more details. 23 | # 24 | # You should have received a copy of the GNU Lesser General Public 25 | # License along with this library; if not, write to the Free Software 26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 27 | # 02110-1301 USA 28 | ######################### END LICENSE BLOCK ######################### 29 | 30 | import sys 31 | from . import constants 32 | from .charsetprober import CharSetProber 33 | 34 | 35 | class MultiByteCharSetProber(CharSetProber): 36 | def __init__(self): 37 | CharSetProber.__init__(self) 38 | self._mDistributionAnalyzer = None 39 | self._mCodingSM = None 40 | self._mLastChar = [0, 0] 41 | 42 | def reset(self): 43 | CharSetProber.reset(self) 44 | if self._mCodingSM: 45 | self._mCodingSM.reset() 46 | if self._mDistributionAnalyzer: 47 | self._mDistributionAnalyzer.reset() 48 | self._mLastChar = [0, 0] 49 | 50 | def get_charset_name(self): 51 | pass 52 | 53 | def feed(self, aBuf): 54 | aLen = len(aBuf) 55 | for i in range(0, aLen): 56 | codingState = self._mCodingSM.next_state(aBuf[i]) 57 | if codingState == constants.eError: 58 | if constants._debug: 59 | sys.stderr.write(self.get_charset_name() 60 | + ' prober hit error at byte ' + str(i) 61 | + '\n') 62 | self._mState = constants.eNotMe 63 | break 64 | elif codingState == constants.eItsMe: 65 | self._mState = constants.eFoundIt 66 | break 67 | elif codingState == constants.eStart: 68 | charLen = self._mCodingSM.get_current_charlen() 69 | if i == 0: 70 | self._mLastChar[1] = aBuf[0] 71 | self._mDistributionAnalyzer.feed(self._mLastChar, charLen) 72 | else: 73 | self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], 74 | charLen) 75 | 76 | self._mLastChar[0] = aBuf[aLen - 1] 77 | 78 | if self.get_state() == constants.eDetecting: 79 | if (self._mDistributionAnalyzer.got_enough_data() and 80 | (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): 81 | self._mState = constants.eFoundIt 82 | 83 | return self.get_state() 84 | 85 | def get_confidence(self): 86 | return self._mDistributionAnalyzer.get_confidence() 87 | -------------------------------------------------------------------------------- /blender/chardet/eucjpprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | import sys 29 | from . import constants 30 | from .mbcharsetprober import MultiByteCharSetProber 31 | from .codingstatemachine import CodingStateMachine 32 | from .chardistribution import EUCJPDistributionAnalysis 33 | from .jpcntx import EUCJPContextAnalysis 34 | from .mbcssm import EUCJPSMModel 35 | 36 | 37 | class EUCJPProber(MultiByteCharSetProber): 38 | def __init__(self): 39 | MultiByteCharSetProber.__init__(self) 40 | self._mCodingSM = CodingStateMachine(EUCJPSMModel) 41 | self._mDistributionAnalyzer = EUCJPDistributionAnalysis() 42 | self._mContextAnalyzer = EUCJPContextAnalysis() 43 | self.reset() 44 | 45 | def reset(self): 46 | MultiByteCharSetProber.reset(self) 47 | self._mContextAnalyzer.reset() 48 | 49 | def get_charset_name(self): 50 | return "EUC-JP" 51 | 52 | def feed(self, aBuf): 53 | aLen = len(aBuf) 54 | for i in range(0, aLen): 55 | # PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte 56 | codingState = self._mCodingSM.next_state(aBuf[i]) 57 | if codingState == constants.eError: 58 | if constants._debug: 59 | sys.stderr.write(self.get_charset_name() 60 | + ' prober hit error at byte ' + str(i) 61 | + '\n') 62 | self._mState = constants.eNotMe 63 | break 64 | elif codingState == constants.eItsMe: 65 | self._mState = constants.eFoundIt 66 | break 67 | elif codingState == constants.eStart: 68 | charLen = self._mCodingSM.get_current_charlen() 69 | if i == 0: 70 | self._mLastChar[1] = aBuf[0] 71 | self._mContextAnalyzer.feed(self._mLastChar, charLen) 72 | self._mDistributionAnalyzer.feed(self._mLastChar, charLen) 73 | else: 74 | self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen) 75 | self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], 76 | charLen) 77 | 78 | self._mLastChar[0] = aBuf[aLen - 1] 79 | 80 | if self.get_state() == constants.eDetecting: 81 | if (self._mContextAnalyzer.got_enough_data() and 82 | (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): 83 | self._mState = constants.eFoundIt 84 | 85 | return self.get_state() 86 | 87 | def get_confidence(self): 88 | contxtCf = self._mContextAnalyzer.get_confidence() 89 | distribCf = self._mDistributionAnalyzer.get_confidence() 90 | return max(contxtCf, distribCf) 91 | -------------------------------------------------------------------------------- /blender/chardet/sjisprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | import sys 29 | from .mbcharsetprober import MultiByteCharSetProber 30 | from .codingstatemachine import CodingStateMachine 31 | from .chardistribution import SJISDistributionAnalysis 32 | from .jpcntx import SJISContextAnalysis 33 | from .mbcssm import SJISSMModel 34 | from . import constants 35 | 36 | 37 | class SJISProber(MultiByteCharSetProber): 38 | def __init__(self): 39 | MultiByteCharSetProber.__init__(self) 40 | self._mCodingSM = CodingStateMachine(SJISSMModel) 41 | self._mDistributionAnalyzer = SJISDistributionAnalysis() 42 | self._mContextAnalyzer = SJISContextAnalysis() 43 | self.reset() 44 | 45 | def reset(self): 46 | MultiByteCharSetProber.reset(self) 47 | self._mContextAnalyzer.reset() 48 | 49 | def get_charset_name(self): 50 | return "SHIFT_JIS" 51 | 52 | def feed(self, aBuf): 53 | aLen = len(aBuf) 54 | for i in range(0, aLen): 55 | codingState = self._mCodingSM.next_state(aBuf[i]) 56 | if codingState == constants.eError: 57 | if constants._debug: 58 | sys.stderr.write(self.get_charset_name() 59 | + ' prober hit error at byte ' + str(i) 60 | + '\n') 61 | self._mState = constants.eNotMe 62 | break 63 | elif codingState == constants.eItsMe: 64 | self._mState = constants.eFoundIt 65 | break 66 | elif codingState == constants.eStart: 67 | charLen = self._mCodingSM.get_current_charlen() 68 | if i == 0: 69 | self._mLastChar[1] = aBuf[0] 70 | self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:], 71 | charLen) 72 | self._mDistributionAnalyzer.feed(self._mLastChar, charLen) 73 | else: 74 | self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3 75 | - charLen], charLen) 76 | self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], 77 | charLen) 78 | 79 | self._mLastChar[0] = aBuf[aLen - 1] 80 | 81 | if self.get_state() == constants.eDetecting: 82 | if (self._mContextAnalyzer.got_enough_data() and 83 | (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): 84 | self._mState = constants.eFoundIt 85 | 86 | return self.get_state() 87 | 88 | def get_confidence(self): 89 | contxtCf = self._mContextAnalyzer.get_confidence() 90 | distribCf = self._mDistributionAnalyzer.get_confidence() 91 | return max(contxtCf, distribCf) 92 | -------------------------------------------------------------------------------- /blender/chardet/charsetgroupprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Communicator client code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from . import constants 29 | import sys 30 | from .charsetprober import CharSetProber 31 | 32 | 33 | class CharSetGroupProber(CharSetProber): 34 | def __init__(self): 35 | CharSetProber.__init__(self) 36 | self._mActiveNum = 0 37 | self._mProbers = [] 38 | self._mBestGuessProber = None 39 | 40 | def reset(self): 41 | CharSetProber.reset(self) 42 | self._mActiveNum = 0 43 | for prober in self._mProbers: 44 | if prober: 45 | prober.reset() 46 | prober.active = True 47 | self._mActiveNum += 1 48 | self._mBestGuessProber = None 49 | 50 | def get_charset_name(self): 51 | if not self._mBestGuessProber: 52 | self.get_confidence() 53 | if not self._mBestGuessProber: 54 | return None 55 | # self._mBestGuessProber = self._mProbers[0] 56 | return self._mBestGuessProber.get_charset_name() 57 | 58 | def feed(self, aBuf): 59 | for prober in self._mProbers: 60 | if not prober: 61 | continue 62 | if not prober.active: 63 | continue 64 | st = prober.feed(aBuf) 65 | if not st: 66 | continue 67 | if st == constants.eFoundIt: 68 | self._mBestGuessProber = prober 69 | return self.get_state() 70 | elif st == constants.eNotMe: 71 | prober.active = False 72 | self._mActiveNum -= 1 73 | if self._mActiveNum <= 0: 74 | self._mState = constants.eNotMe 75 | return self.get_state() 76 | return self.get_state() 77 | 78 | def get_confidence(self): 79 | st = self.get_state() 80 | if st == constants.eFoundIt: 81 | return 0.99 82 | elif st == constants.eNotMe: 83 | return 0.01 84 | bestConf = 0.0 85 | self._mBestGuessProber = None 86 | for prober in self._mProbers: 87 | if not prober: 88 | continue 89 | if not prober.active: 90 | if constants._debug: 91 | sys.stderr.write(prober.get_charset_name() 92 | + ' not active\n') 93 | continue 94 | cf = prober.get_confidence() 95 | if constants._debug: 96 | sys.stderr.write('%s confidence = %s\n' % 97 | (prober.get_charset_name(), cf)) 98 | if bestConf < cf: 99 | bestConf = cf 100 | self._mBestGuessProber = prober 101 | if not self._mBestGuessProber: 102 | return 0.0 103 | return bestConf 104 | # else: 105 | # self._mBestGuessProber = self._mProbers[0] 106 | # return self._mBestGuessProber.get_confidence() 107 | -------------------------------------------------------------------------------- /blender/aiohttp/worker.py: -------------------------------------------------------------------------------- 1 | """Async gunicorn worker for aiohttp.web""" 2 | 3 | import asyncio 4 | import logging 5 | import os 6 | import signal 7 | import sys 8 | import gunicorn.workers.base as base 9 | 10 | __all__ = ('GunicornWebWorker',) 11 | 12 | 13 | class GunicornWebWorker(base.Worker): 14 | 15 | def __init__(self, *args, **kw): # pragma: no cover 16 | super().__init__(*args, **kw) 17 | 18 | self.servers = {} 19 | self.exit_code = 0 20 | 21 | def init_process(self): 22 | # create new event_loop after fork 23 | asyncio.get_event_loop().close() 24 | 25 | self.loop = asyncio.new_event_loop() 26 | asyncio.set_event_loop(self.loop) 27 | 28 | super().init_process() 29 | 30 | def run(self): 31 | self._runner = asyncio.async(self._run(), loop=self.loop) 32 | 33 | try: 34 | self.loop.run_until_complete(self._runner) 35 | finally: 36 | self.loop.close() 37 | 38 | sys.exit(self.exit_code) 39 | 40 | def make_handler(self, app, host, port): 41 | if hasattr(self.cfg, 'debug'): 42 | is_debug = self.cfg.debug 43 | else: 44 | is_debug = self.log.loglevel == logging.DEBUG 45 | 46 | return app.make_handler( 47 | host=host, 48 | port=port, 49 | logger=self.log, 50 | debug=is_debug, 51 | timeout=self.cfg.timeout, 52 | keep_alive=self.cfg.keepalive, 53 | access_log=self.log.access_log, 54 | access_log_format=self.cfg.access_log_format) 55 | 56 | @asyncio.coroutine 57 | def close(self): 58 | if self.servers: 59 | servers = self.servers 60 | self.servers = None 61 | 62 | # stop accepting connections 63 | for server, handler in servers.items(): 64 | self.log.info("Stopping server: %s, connections: %s", 65 | self.pid, len(handler.connections)) 66 | server.close() 67 | 68 | # stop alive connections 69 | tasks = [ 70 | handler.finish_connections( 71 | timeout=self.cfg.graceful_timeout / 100 * 95) 72 | for handler in servers.values()] 73 | yield from asyncio.wait(tasks, loop=self.loop) 74 | 75 | # stop application 76 | yield from self.wsgi.finish() 77 | 78 | @asyncio.coroutine 79 | def _run(self): 80 | for sock in self.sockets: 81 | handler = self.make_handler(self.wsgi, *sock.cfg_addr) 82 | srv = yield from self.loop.create_server(handler, sock=sock.sock) 83 | self.servers[srv] = handler 84 | 85 | # If our parent changed then we shut down. 86 | pid = os.getpid() 87 | try: 88 | while self.alive: 89 | self.notify() 90 | 91 | if pid == os.getpid() and self.ppid != os.getppid(): 92 | self.alive = False 93 | self.log.info("Parent changed, shutting down: %s", self) 94 | else: 95 | yield from asyncio.sleep(1.0, loop=self.loop) 96 | except (Exception, BaseException, GeneratorExit, KeyboardInterrupt): 97 | pass 98 | 99 | yield from self.close() 100 | 101 | def init_signal(self): 102 | # init new signaling 103 | self.loop.add_signal_handler(signal.SIGQUIT, self.handle_quit) 104 | self.loop.add_signal_handler(signal.SIGTERM, self.handle_exit) 105 | self.loop.add_signal_handler(signal.SIGINT, self.handle_quit) 106 | self.loop.add_signal_handler(signal.SIGWINCH, self.handle_winch) 107 | self.loop.add_signal_handler(signal.SIGUSR1, self.handle_usr1) 108 | self.loop.add_signal_handler(signal.SIGABRT, self.handle_abort) 109 | 110 | # Don't let SIGTERM and SIGUSR1 disturb active requests 111 | # by interrupting system calls 112 | signal.siginterrupt(signal.SIGTERM, False) 113 | signal.siginterrupt(signal.SIGUSR1, False) 114 | 115 | def handle_quit(self, sig, frame): 116 | self.alive = False 117 | 118 | def handle_abort(self, sig, frame): 119 | self.alive = False 120 | self.exit_code = 1 121 | -------------------------------------------------------------------------------- /torch/model_conv.lua: -------------------------------------------------------------------------------- 1 | require 'nn' 2 | require 'cunn' 3 | require 'cutorch' 4 | require 'cudnn' 5 | cudnn.benchmark = true 6 | require 'nngraph' 7 | require 'Adam' 8 | require 'Mask' 9 | require 'base' 10 | require 'Alert' 11 | 12 | params = { 13 | nfeats={32,64,64}, 14 | nfeats_decoder={32,64,64}, 15 | out_size = 64*8*8, 16 | out_size_decoder = 64*8*8, 17 | conv_layers = 3, 18 | filtsize = 5, 19 | poolsize = 2, 20 | img_size=64, 21 | rnn_layers=2, 22 | l_size=4, 23 | n_glimpses=4, 24 | init_weight=0.05, 25 | batch_size = 4, 26 | max_grad_norm = 0.7 27 | } 28 | 29 | 30 | 31 | local function CGRU(x, prev_h, input_size, output_size) 32 | -- Calculate 2 gates in one go 33 | local i2h = cudnn.SpatialConvolution(input_size, 2*output_size,5,5,1,1,2,2) 34 | local h2h = cudnn.SpatialConvolution(input_size, 2*output_size,5,5,1,1,2,2) 35 | local gates = nn.CAddTable()({i2h(x), h2h(prev_h)}) 36 | 37 | local i2h2 = cudnn.SpatialConvolution(input_size, output_size,5,5,1,1,2,2) 38 | local h2h2 = cudnn.SpatialConvolution(input_size, output_size,5,5,1,1,2,2) 39 | 40 | -- Use Narrrow to slice each gate and apply nonlinearity 41 | local u = nn.Sigmoid()(nn.Narrow(2,1,output_size)(gates)) 42 | local r = nn.Sigmoid()(nn.Narrow(2,output_size+1,output_size)(gates)) 43 | local update = nn.CMulTable()({u, prev_h}) 44 | local reset = nn.CMulTable()({r, prev_h}) 45 | 46 | local next_h = nn.CAddTable()({update, 47 | nn.CMulTable()({ 48 | nn.AddConstant(1,false)(nn.MulConstant(-1,false)(u)), 49 | nn.Tanh()(nn.CAddTable()({i2h2(x), h2h2(reset)})) 50 | }) 51 | }) 52 | 53 | return next_h 54 | end 55 | 56 | local function CGRU2(s, input_size) 57 | -- Calculate 2 gates in one go 58 | local i2h = cudnn.SpatialConvolution(input_size, 2*input_size,5,5,1,1,2,2) 59 | local gates = i2h(s) 60 | 61 | local i2h2 = cudnn.SpatialConvolution(input_size, input_size,5,5,1,1,2,2) 62 | 63 | -- Use Narrrow to slice each gate and apply nonlinearity 64 | local u = nn.Sigmoid()(nn.Narrow(2,1,input_size)(gates)) 65 | local r = nn.Sigmoid()(nn.Narrow(2,input_size+1,input_size)(gates)) 66 | local update = nn.CMulTable()({u, s}) 67 | local reset = nn.CMulTable()({r, s}) 68 | 69 | local next_s = nn.CAddTable()({update, 70 | nn.CMulTable()({ 71 | nn.AddConstant(1,false)(nn.MulConstant(-1,false)(u)), 72 | nn.Tanh()(i2h2(reset)) 73 | }) 74 | }) 75 | 76 | return next_s 77 | end 78 | 79 | function core_network() 80 | local x = nn.Identity()() 81 | local prev_s = nn.Identity()() 82 | local prev_l = nn.Identity()() 83 | 84 | local l_transform = nn.Linear(4,8*8*8) 85 | l_transform.weight:normal(0, params.init_weight) 86 | 87 | local g_x = x 88 | local g_l = nn.View(8,8,8):setNumInputDims(1)(l_transform(prev_l)) 89 | 90 | local g = nn.JoinTable(1,3)({g_x,g_l}) 91 | 92 | local h = {[0] = g} 93 | 94 | local next_s = {} 95 | local split = {prev_s:split(params.rnn_layers)} 96 | 97 | for i = 1, params.rnn_layers do 98 | local prev_h = split[i] 99 | local next_c, next_h 100 | next_h = CGRU(h[i - 1], prev_h, 72, 72) 101 | table.insert(next_s, next_h) 102 | h[i] = next_h 103 | end 104 | 105 | local module = nn.gModule({x, prev_l, prev_s}, 106 | {h[params.rnn_layers], nn.Identity()(next_s)}) 107 | module:getParameters():normal(0, params.init_weight) 108 | 109 | return module 110 | end 111 | 112 | function combine_network() 113 | 114 | local h = nn.Identity()() 115 | local l = nn.Identity()() 116 | local l_transform1 = nn.Linear(4,128) 117 | 118 | local l_transform2 = nn.Linear(128,8*8*8) 119 | 120 | local g_l = nn.Tanh()(l_transform1(l)) 121 | 122 | local g_l = nn.View(8,8,8):setNumInputDims(1)(l_transform2(g_l)) 123 | 124 | local g = nn.JoinTable(1,3)({h,g_l}) 125 | 126 | g = CGRU2(g,80) 127 | g = CGRU2(g,80) 128 | g = CGRU2(g,80) 129 | 130 | y = nn.Narrow(2,1,64)(g) 131 | 132 | local module = nn.gModule({h,l},{y}) 133 | 134 | module:getParameters():normal(0, params.init_weight) 135 | 136 | return module 137 | end 138 | 139 | function mask_module(batch,feats,rows,cols) 140 | local x = nn.Identity()() 141 | local mask = nn.Mask(batch,feats,rows,cols)(x) 142 | return nn.gModule({x},{nn.CMulTable(true)({x,mask})}) 143 | end 144 | -------------------------------------------------------------------------------- /blender/chardet/sbcharsetprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | import sys 30 | from . import constants 31 | from .charsetprober import CharSetProber 32 | from .compat import wrap_ord 33 | 34 | SAMPLE_SIZE = 64 35 | SB_ENOUGH_REL_THRESHOLD = 1024 36 | POSITIVE_SHORTCUT_THRESHOLD = 0.95 37 | NEGATIVE_SHORTCUT_THRESHOLD = 0.05 38 | SYMBOL_CAT_ORDER = 250 39 | NUMBER_OF_SEQ_CAT = 4 40 | POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1 41 | #NEGATIVE_CAT = 0 42 | 43 | 44 | class SingleByteCharSetProber(CharSetProber): 45 | def __init__(self, model, reversed=False, nameProber=None): 46 | CharSetProber.__init__(self) 47 | self._mModel = model 48 | # TRUE if we need to reverse every pair in the model lookup 49 | self._mReversed = reversed 50 | # Optional auxiliary prober for name decision 51 | self._mNameProber = nameProber 52 | self.reset() 53 | 54 | def reset(self): 55 | CharSetProber.reset(self) 56 | # char order of last character 57 | self._mLastOrder = 255 58 | self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT 59 | self._mTotalSeqs = 0 60 | self._mTotalChar = 0 61 | # characters that fall in our sampling range 62 | self._mFreqChar = 0 63 | 64 | def get_charset_name(self): 65 | if self._mNameProber: 66 | return self._mNameProber.get_charset_name() 67 | else: 68 | return self._mModel['charsetName'] 69 | 70 | def feed(self, aBuf): 71 | if not self._mModel['keepEnglishLetter']: 72 | aBuf = self.filter_without_english_letters(aBuf) 73 | aLen = len(aBuf) 74 | if not aLen: 75 | return self.get_state() 76 | for c in aBuf: 77 | order = self._mModel['charToOrderMap'][wrap_ord(c)] 78 | if order < SYMBOL_CAT_ORDER: 79 | self._mTotalChar += 1 80 | if order < SAMPLE_SIZE: 81 | self._mFreqChar += 1 82 | if self._mLastOrder < SAMPLE_SIZE: 83 | self._mTotalSeqs += 1 84 | if not self._mReversed: 85 | i = (self._mLastOrder * SAMPLE_SIZE) + order 86 | model = self._mModel['precedenceMatrix'][i] 87 | else: # reverse the order of the letters in the lookup 88 | i = (order * SAMPLE_SIZE) + self._mLastOrder 89 | model = self._mModel['precedenceMatrix'][i] 90 | self._mSeqCounters[model] += 1 91 | self._mLastOrder = order 92 | 93 | if self.get_state() == constants.eDetecting: 94 | if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD: 95 | cf = self.get_confidence() 96 | if cf > POSITIVE_SHORTCUT_THRESHOLD: 97 | if constants._debug: 98 | sys.stderr.write('%s confidence = %s, we have a' 99 | 'winner\n' % 100 | (self._mModel['charsetName'], cf)) 101 | self._mState = constants.eFoundIt 102 | elif cf < NEGATIVE_SHORTCUT_THRESHOLD: 103 | if constants._debug: 104 | sys.stderr.write('%s confidence = %s, below negative' 105 | 'shortcut threshhold %s\n' % 106 | (self._mModel['charsetName'], cf, 107 | NEGATIVE_SHORTCUT_THRESHOLD)) 108 | self._mState = constants.eNotMe 109 | 110 | return self.get_state() 111 | 112 | def get_confidence(self): 113 | r = 0.01 114 | if self._mTotalSeqs > 0: 115 | r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs 116 | / self._mModel['mTypicalPositiveRatio']) 117 | r = r * self._mFreqChar / self._mTotalChar 118 | if r >= 1.0: 119 | r = 0.99 120 | return r 121 | -------------------------------------------------------------------------------- /blender/aiohttp/errors.py: -------------------------------------------------------------------------------- 1 | """http related errors.""" 2 | 3 | from asyncio import TimeoutError 4 | 5 | 6 | __all__ = ( 7 | 'DisconnectedError', 'ClientDisconnectedError', 'ServerDisconnectedError', 8 | 9 | 'HttpProcessingError', 'BadHttpMessage', 10 | 'HttpMethodNotAllowed', 'HttpBadRequest', 'HttpProxyError', 11 | 'BadStatusLine', 'LineTooLong', 'InvalidHeader', 12 | 13 | 'ClientError', 'ClientHttpProcessingError', 'ClientConnectionError', 14 | 'ClientOSError', 'ClientTimeoutError', 'ProxyConnectionError', 15 | 'ClientRequestError', 'ClientResponseError', 16 | 'FingerprintMismatch', 17 | 18 | 'WSServerHandshakeError', 'WSClientDisconnectedError') 19 | 20 | 21 | class DisconnectedError(Exception): 22 | """Disconnected.""" 23 | 24 | 25 | class ClientDisconnectedError(DisconnectedError): 26 | """Client disconnected.""" 27 | 28 | 29 | class ServerDisconnectedError(DisconnectedError): 30 | """Server disconnected.""" 31 | 32 | 33 | class WSClientDisconnectedError(ClientDisconnectedError): 34 | """Deprecated.""" 35 | 36 | 37 | class ClientError(Exception): 38 | """Base class for client connection errors.""" 39 | 40 | 41 | class ClientHttpProcessingError(ClientError): 42 | """Base class for client http processing errors.""" 43 | 44 | 45 | class ClientRequestError(ClientHttpProcessingError): 46 | """Connection error during sending request.""" 47 | 48 | 49 | class ClientResponseError(ClientHttpProcessingError): 50 | """Connection error during reading response.""" 51 | 52 | 53 | class ClientConnectionError(ClientError): 54 | """Base class for client socket errors.""" 55 | 56 | 57 | class ClientOSError(ClientConnectionError, OSError): 58 | """OSError error.""" 59 | 60 | 61 | class ClientTimeoutError(ClientConnectionError, TimeoutError): 62 | """Client connection timeout error.""" 63 | 64 | 65 | class ProxyConnectionError(ClientConnectionError): 66 | """Proxy connection error. 67 | 68 | Raised in :class:`aiohttp.connector.ProxyConnector` if 69 | connection to proxy can not be established. 70 | """ 71 | 72 | 73 | class HttpProcessingError(Exception): 74 | """Http error. 75 | 76 | Shortcut for raising http errors with custom code, message and headers. 77 | 78 | :param int code: HTTP Error code. 79 | :param str message: (optional) Error message. 80 | :param list of [tuple] headers: (optional) Headers to be sent in response. 81 | """ 82 | 83 | code = 0 84 | message = '' 85 | headers = None 86 | 87 | def __init__(self, *, code=None, message='', headers=None): 88 | if code is not None: 89 | self.code = code 90 | self.headers = headers 91 | self.message = message 92 | 93 | super().__init__("%s, message='%s'" % (self.code, message)) 94 | 95 | 96 | class WSServerHandshakeError(HttpProcessingError): 97 | """websocket server handshake error.""" 98 | 99 | def __init__(self, message, *, headers=None): 100 | super().__init__(message=message, headers=headers) 101 | 102 | 103 | class HttpProxyError(HttpProcessingError): 104 | """Http proxy error. 105 | 106 | Raised in :class:`aiohttp.connector.ProxyConnector` if 107 | proxy responds with status other than ``200 OK`` 108 | on ``CONNECT`` request. 109 | """ 110 | 111 | 112 | class BadHttpMessage(HttpProcessingError): 113 | 114 | code = 400 115 | message = 'Bad Request' 116 | 117 | def __init__(self, message, *, headers=None): 118 | super().__init__(message=message, headers=headers) 119 | 120 | 121 | class HttpMethodNotAllowed(HttpProcessingError): 122 | 123 | code = 405 124 | message = 'Method Not Allowed' 125 | 126 | 127 | class HttpBadRequest(BadHttpMessage): 128 | 129 | code = 400 130 | message = 'Bad Request' 131 | 132 | 133 | class ContentEncodingError(BadHttpMessage): 134 | """Content encoding error.""" 135 | 136 | 137 | class TransferEncodingError(BadHttpMessage): 138 | """transfer encoding error.""" 139 | 140 | 141 | class LineTooLong(BadHttpMessage): 142 | 143 | def __init__(self, line, limit='Unknown'): 144 | super().__init__( 145 | "got more than %s bytes when reading %s" % (limit, line)) 146 | 147 | 148 | class InvalidHeader(BadHttpMessage): 149 | 150 | def __init__(self, hdr): 151 | super().__init__('Invalid HTTP Header: {}'.format(hdr)) 152 | self.hdr = hdr 153 | 154 | 155 | class BadStatusLine(BadHttpMessage): 156 | 157 | def __init__(self, line=''): 158 | if not line: 159 | line = repr(line) 160 | self.args = line, 161 | self.line = line 162 | 163 | 164 | class ParserError(Exception): 165 | """Base parser error.""" 166 | 167 | 168 | class LineLimitExceededParserError(ParserError): 169 | """Line is too long.""" 170 | 171 | def __init__(self, msg, limit): 172 | super().__init__(msg) 173 | self.limit = limit 174 | 175 | 176 | class FingerprintMismatch(ClientConnectionError): 177 | """SSL certificate does not match expected fingerprint.""" 178 | 179 | def __init__(self, expected, got, host, port): 180 | self.expected = expected 181 | self.got = got 182 | self.host = host 183 | self.port = port 184 | 185 | def __repr__(self): 186 | return '<{} expected={} got={} host={} port={}>'.format( 187 | self.__class__.__name__, self.expected, self.got, 188 | self.host, self.port) 189 | -------------------------------------------------------------------------------- /blender/chardet/latin1prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | from .charsetprober import CharSetProber 30 | from .constants import eNotMe 31 | from .compat import wrap_ord 32 | 33 | FREQ_CAT_NUM = 4 34 | 35 | UDF = 0 # undefined 36 | OTH = 1 # other 37 | ASC = 2 # ascii capital letter 38 | ASS = 3 # ascii small letter 39 | ACV = 4 # accent capital vowel 40 | ACO = 5 # accent capital other 41 | ASV = 6 # accent small vowel 42 | ASO = 7 # accent small other 43 | CLASS_NUM = 8 # total classes 44 | 45 | Latin1_CharToClass = ( 46 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 47 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F 48 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 49 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F 50 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 51 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F 52 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 53 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F 54 | OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 55 | ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F 56 | ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 57 | ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F 58 | OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 59 | ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F 60 | ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 61 | ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F 62 | OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 63 | OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F 64 | UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 65 | OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F 66 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 67 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF 68 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 69 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF 70 | ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 71 | ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF 72 | ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 73 | ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF 74 | ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 75 | ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF 76 | ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 77 | ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF 78 | ) 79 | 80 | # 0 : illegal 81 | # 1 : very unlikely 82 | # 2 : normal 83 | # 3 : very likely 84 | Latin1ClassModel = ( 85 | # UDF OTH ASC ASS ACV ACO ASV ASO 86 | 0, 0, 0, 0, 0, 0, 0, 0, # UDF 87 | 0, 3, 3, 3, 3, 3, 3, 3, # OTH 88 | 0, 3, 3, 3, 3, 3, 3, 3, # ASC 89 | 0, 3, 3, 3, 1, 1, 3, 3, # ASS 90 | 0, 3, 3, 3, 1, 2, 1, 2, # ACV 91 | 0, 3, 3, 3, 3, 3, 3, 3, # ACO 92 | 0, 3, 1, 3, 1, 1, 1, 3, # ASV 93 | 0, 3, 1, 3, 1, 1, 3, 3, # ASO 94 | ) 95 | 96 | 97 | class Latin1Prober(CharSetProber): 98 | def __init__(self): 99 | CharSetProber.__init__(self) 100 | self.reset() 101 | 102 | def reset(self): 103 | self._mLastCharClass = OTH 104 | self._mFreqCounter = [0] * FREQ_CAT_NUM 105 | CharSetProber.reset(self) 106 | 107 | def get_charset_name(self): 108 | return "windows-1252" 109 | 110 | def feed(self, aBuf): 111 | aBuf = self.filter_with_english_letters(aBuf) 112 | for c in aBuf: 113 | charClass = Latin1_CharToClass[wrap_ord(c)] 114 | freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM) 115 | + charClass] 116 | if freq == 0: 117 | self._mState = eNotMe 118 | break 119 | self._mFreqCounter[freq] += 1 120 | self._mLastCharClass = charClass 121 | 122 | return self.get_state() 123 | 124 | def get_confidence(self): 125 | if self.get_state() == eNotMe: 126 | return 0.01 127 | 128 | total = sum(self._mFreqCounter) 129 | if total < 0.01: 130 | confidence = 0.0 131 | else: 132 | confidence = ((self._mFreqCounter[3] / total) 133 | - (self._mFreqCounter[1] * 20.0 / total)) 134 | if confidence < 0.0: 135 | confidence = 0.0 136 | # lower the confidence of latin1 so that other more accurate 137 | # detector can take priority. 138 | confidence = confidence * 0.5 139 | return confidence 140 | -------------------------------------------------------------------------------- /blender/asyncio_bridge.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | import asyncio 3 | import heapq 4 | import socket 5 | import subprocess 6 | import time 7 | import os 8 | import sys 9 | 10 | bl_info = {"name": "AsyncIO Event Loop", "category": "Python", "author": "Andreas Klostermann"} 11 | 12 | # Global Event Queue 13 | event_queue = asyncio.Queue() 14 | 15 | 16 | def _run_once(self): 17 | """Run one full iteration of the event loop. 18 | 19 | This calls all currently ready callbacks, polls for I/O, 20 | schedules the resulting callbacks, and finally schedules 21 | 'call_later' callbacks. 22 | 23 | This is copied verbatim from the standard library code, with 24 | only one little change, namely the default timeout value. 25 | """ 26 | # Remove delayed calls that were cancelled from head of queue. 27 | while self._scheduled and self._scheduled[0]._cancelled: 28 | heapq.heappop(self._scheduled) 29 | 30 | # Set default timeout for call to "select" API. In the original 31 | # standard library code this timeout is 0, meaning select with block 32 | # until anything happens. Can't have that with foreign event loops! 33 | timeout = 1.0/30 34 | if self._ready: 35 | timeout = 0 36 | elif self._scheduled: 37 | # Compute the desired timeout. 38 | when = self._scheduled[0]._when 39 | deadline = max(0, when - self.time()) 40 | if timeout is None: 41 | timeout = deadline 42 | else: 43 | timeout = min(timeout, deadline) 44 | event_list = self._selector.select(timeout) 45 | self._process_events(event_list) 46 | 47 | # Handle 'later' callbacks that are ready. 48 | end_time = self.time() + self._clock_resolution 49 | while self._scheduled: 50 | handle = self._scheduled[0] 51 | if handle._when >= end_time: 52 | break 53 | handle = heapq.heappop(self._scheduled) 54 | self._ready.append(handle) 55 | 56 | # This is the only place where callbacks are actually *called*. 57 | # All other places just add them to ready. 58 | # Note: We run all currently scheduled callbacks, but not any 59 | # callbacks scheduled by callbacks run this time around -- 60 | # they will be run the next time (after another I/O poll). 61 | # Use an idiom that is threadsafe without using locks. 62 | ntodo = len(self._ready) 63 | for i in range(ntodo): 64 | handle = self._ready.popleft() 65 | if not handle._cancelled: 66 | handle._run() 67 | handle = None # Needed to break cycles when an exception 68 | 69 | class AsyncioBridgeOperator(bpy.types.Operator): 70 | """Operator which runs its self from a timer""" 71 | bl_idname = "bpy.start_asyncio_bridge" 72 | bl_label = "Start Asyncio Modal Operator" 73 | 74 | _timer = None 75 | 76 | def modal(self, context, event): 77 | if event.type == 'TIMER': 78 | _run_once(self.loop) 79 | else: 80 | for listener_id, listener in self.listeners.items(): 81 | fire, catch = listener.check_event(event) 82 | if fire: 83 | listener.flag.set() 84 | # In the case of firing an event, it is important to 85 | # quit the listener processing in this loop iteration. 86 | # This assures that only one asyncio.Event flag is 87 | # set per iteration. 88 | if catch: 89 | return {'RUNNING_MODAL'} 90 | else: 91 | return {'PASS_THROUGH'} 92 | 93 | return {'PASS_THROUGH'} 94 | 95 | def execute(self, context): 96 | self.types = {} 97 | self.listeners = {} 98 | self.listener_id = 0 99 | self.loop = asyncio.get_event_loop() 100 | self.loop.operator = self 101 | wm = context.window_manager 102 | self._timer = wm.event_timer_add(0.005, context.window) 103 | wm.modal_handler_add(self) 104 | return {'RUNNING_MODAL'} 105 | 106 | def cancel(self, context): 107 | wm = context.window_manager 108 | wm.event_timer_remove(self._timer) 109 | 110 | def add_listener(self, listener): 111 | self.listeners[self.listener_id] = listener 112 | listener.id = self.listener_id 113 | self.listener_id += 1 114 | 115 | def remove_listener(self, listener): 116 | del self.listeners[listener.id] 117 | 118 | 119 | class BlenderListener(object): 120 | def __init__(self, event_type=None, callback=None, catch=False): 121 | self.event_type = event_type 122 | self.callback = callback 123 | self.catch = catch 124 | self.event = None 125 | self.operator = asyncio.get_event_loop().operator 126 | self.operator.add_listener(self) 127 | self.flag = asyncio.Event() 128 | 129 | def check_event(self, event): 130 | self.event = event 131 | if self.event_type is not None: 132 | if event.type != self.event_type: 133 | return False, False 134 | if self.callback is not None: 135 | return self.callback(event), self.catch 136 | else: 137 | return True, self.catch 138 | def clear(self): 139 | self.flag.clear() 140 | 141 | @asyncio.coroutine 142 | def wait(self): 143 | yield from self.flag.wait() 144 | self.flag.clear() 145 | 146 | def remove(self): 147 | self.operator.remove_listener(self) 148 | 149 | 150 | def register(): 151 | bpy.utils.register_class(AsyncioBridgeOperator) 152 | 153 | def unregister(): 154 | bpy.utils.unregister_class(AsyncioBridgeOperator) 155 | 156 | asyncio.get_event_loop().operator = None 157 | 158 | if __name__ == "__main__": 159 | register() 160 | bpy.ops.bpy.start_asyncio_bridge() 161 | -------------------------------------------------------------------------------- /blender/blenderServer.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | import json 3 | import math 4 | import gc 5 | import random 6 | import asyncio 7 | from asyncio import coroutine, sleep, Task, wait_for 8 | import sys 9 | import os 10 | sys.path.append(os.path.dirname(__file__)) 11 | import aiohttp 12 | import asyncio_bridge 13 | import feedparser 14 | from asyncio_bridge import BlenderListener 15 | from mathutils import Vector 16 | 17 | global IND 18 | IND = 0 19 | 20 | def center(): 21 | for i in range(56): 22 | obj = bpy.data.objects[2+i] 23 | bpy.ops.object.select_all(action='DESELECT') 24 | obj.select = True 25 | bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY',center='BOUNDS') 26 | bpy.data.objects[2+i].location.x = 0 27 | bpy.data.objects[2+i].location.y = 0 28 | bpy.data.objects[2+i].location.z = 0 29 | 30 | def augment(): 31 | global IND 32 | IND = IND + 1 33 | r1 = halton(IND,2) 34 | r2 = halton(IND,3) 35 | r3 = halton(IND,5) 36 | for i in range(56): 37 | obj = bpy.data.objects[2+i] 38 | bpy.ops.object.select_all(action='DESELECT') 39 | obj.select = True 40 | bpy.ops.transform.rotate(value = (3.14/2)*(2*r1-1),axis = (0.0,0.0,1.0)) 41 | bpy.ops.transform.rotate(value = (3.14/2)*(2*r2-1),axis = (1.0,0.0,0.0)) 42 | bpy.ops.transform.rotate(value = (3.14/2)*(2*r3-1),axis = (0.0,1.0,0.0)) 43 | s1 = 1 + 0.3*(2*random.random()-1) 44 | s3 = 1 + 0.3*(2*random.random()-1) 45 | s2 = 1 + 0.3*(2*random.random()-1) 46 | obj.scale = [s1,s2,s3] 47 | 48 | def halton(index,base): 49 | result = 0 50 | i = index 51 | f = 1 52 | while (i > 0): 53 | f = f/float(base) 54 | result = result+f*(i%base) 55 | i = math.floor(i/float(base)) 56 | return result 57 | 58 | 59 | def get_view(area,a,b,z,z_look,index,n): 60 | r = 0.4+0.75*(1+a) 61 | theta = math.pi*b 62 | x = r*math.cos(theta) 63 | y = r*math.sin(theta) 64 | moveCamera(area,x,y,z,0.5*z_look) 65 | do_render(area,index,n) 66 | 67 | def do_render(area,index,n): 68 | bpy.context.scene.render.filepath='/ramcache/renders/m'+str(n)+'.png' 69 | obj = bpy.data.objects['m'+str(index)] 70 | bpy.ops.object.select_all(action='DESELECT') 71 | obj.select = True 72 | bpy.data.objects['Lamp'].select = True 73 | ctx=bpy.context.copy() 74 | ctx['area']=area 75 | bpy.ops.object.hide_render_clear(ctx) 76 | bpy.ops.render.render(use_viewport=True,write_still=True) 77 | bpy.ops.object.hide_render_set(ctx) 78 | 79 | def look_at(obj_camera, point): 80 | loc_camera = obj_camera.matrix_world.to_translation() 81 | 82 | direction = point - loc_camera 83 | # point the cameras '-Z' and use its 'Y' as up 84 | rot_quat = direction.to_track_quat('-Z', 'Y') 85 | 86 | # assume we're using euler rotation 87 | obj_camera.rotation_euler = rot_quat.to_euler() 88 | 89 | def moveCamera(area,x,y,z,z_look): 90 | bpy.ops.object.select_all(action='DESELECT') 91 | bpy.data.objects['Camera'].select = True 92 | ctx=bpy.context.copy() 93 | ctx['area']=area 94 | bpy.ops.transform.translate(ctx,value=Vector((x,y,z)) - bpy.data.objects['Camera'].location) 95 | look_at(bpy.data.objects['Camera'],Vector((0.0,0.0,z_look))) 96 | 97 | def deleteObjects(): 98 | #bpy.ops.object.mode_set(mode='OBJECT') 99 | bpy.ops.object.select_by_type(type='MESH') 100 | bpy.ops.object.delete(use_global=False) 101 | 102 | for item in bpy.data.meshes: 103 | bpy.data.meshes.remove(item) 104 | 105 | gc.collect() 106 | 107 | def loadObjects(batch): 108 | deleteObjects() 109 | 110 | filepath = "/home/dan/git/shape-classifier/princeton_blend/princeton" 111 | scn = bpy.context.scene 112 | 113 | with bpy.data.libraries.load(filepath + str(batch) +".blend") as (data_from, data_to): 114 | data_to.objects = data_from.objects 115 | 116 | for obj in data_to.objects: 117 | if obj is not None and obj.type != 'CAMERA' and obj.type != 'LAMP': 118 | scn.objects.link(obj) 119 | 120 | for obj in bpy.data.objects: 121 | if obj.type == 'CAMERA' or obj.type == 'LAMP': 122 | if obj.name != 'Camera' and obj.name != 'Lamp': 123 | bpy.data.objects.remove(obj) 124 | 125 | center() 126 | augment() 127 | 128 | for area in bpy.context.screen.areas: 129 | if area.type == 'VIEW_3D': 130 | area.spaces[0].region_3d.view_perspective = 'CAMERA' 131 | override=bpy.context.copy() 132 | override['area']=area 133 | bpy.ops.object.select_all(action='DESELECT') 134 | 135 | bpy.data.objects['Lamp'].select = True 136 | bpy.ops.object.hide_render_set(override,unselected=True) 137 | bpy.data.objects['Lamp'].select = False 138 | 139 | 140 | def callMethod(method,params,area): 141 | if method == "moveCamera": 142 | moveCamera(area,params[0],params[1],params[2]) 143 | elif method == "get_view": 144 | get_view(area,params[0],params[1],params[2],params[3],params[4],params[5]) 145 | elif method == "loadObjects": 146 | loadObjects(params[0]) 147 | 148 | @coroutine 149 | def http_server(): 150 | import aiohttp 151 | from aiohttp import web 152 | 153 | ctx_3d = {} 154 | 155 | for area in bpy.context.screen.areas: 156 | if area.type == 'VIEW_3D': 157 | ctx_3d = area 158 | 159 | bpy.context.scene.render.resolution_x = 128 160 | bpy.context.scene.render.resolution_y = 128 161 | bpy.context.scene.render.image_settings.file_format='PNG' 162 | bpy.context.scene.render.image_settings.color_mode='BW' 163 | 164 | world = bpy.context.scene.world 165 | wset = world.light_settings 166 | wset.use_environment_light = True 167 | wset.environment_energy = 0.2 168 | wset.gather_method = 'APPROXIMATE' 169 | 170 | @coroutine 171 | def handle(request): 172 | data = yield from request.text() 173 | obj = json.loads(data) 174 | callMethod(obj['method'],obj['params'],ctx_3d) 175 | return web.Response(text=json.dumps({'jsonrpc':'2.0','id':obj['id'],'result':'GOOD WORK!'})) 176 | 177 | 178 | @coroutine 179 | def init(loop): 180 | app = web.Application(loop=loop) 181 | app.router.add_route('POST', '/', handle) 182 | 183 | srv = yield from loop.create_server(app.make_handler(), 184 | '127.0.0.1', 9090) 185 | return srv 186 | yield from init(asyncio.get_event_loop()) 187 | 188 | if __name__ == "__main__": 189 | asyncio_bridge.register() 190 | bpy.ops.bpy.start_asyncio_bridge() 191 | Task(http_server()) 192 | -------------------------------------------------------------------------------- /blender/aiohttp/websocket_client.py: -------------------------------------------------------------------------------- 1 | """WebSocket client for asyncio.""" 2 | 3 | import asyncio 4 | 5 | import aiohttp 6 | from .websocket import Message 7 | from .websocket import WebSocketError 8 | from .websocket import MSG_BINARY, MSG_TEXT, MSG_CLOSE, MSG_PING, MSG_PONG 9 | 10 | __all__ = ('ws_connect', 'MsgType') 11 | 12 | 13 | try: 14 | from enum import IntEnum 15 | except ImportError: # pragma: no cover 16 | IntEnum = object 17 | 18 | 19 | class MsgType(IntEnum): 20 | 21 | text = MSG_TEXT 22 | binary = MSG_BINARY 23 | ping = MSG_PING 24 | pong = MSG_PONG 25 | close = MSG_CLOSE 26 | closed = 20 27 | error = 21 28 | 29 | closedMessage = Message(MsgType.closed, None, None) 30 | 31 | 32 | class ClientWebSocketResponse: 33 | 34 | def __init__(self, reader, writer, protocol, 35 | response, timeout, autoclose, autoping, loop): 36 | self._response = response 37 | self._conn = response.connection 38 | 39 | self._writer = writer 40 | self._reader = reader 41 | self._protocol = protocol 42 | self._closed = False 43 | self._closing = False 44 | self._close_code = None 45 | self._timeout = timeout 46 | self._autoclose = autoclose 47 | self._autoping = autoping 48 | self._loop = loop 49 | self._waiting = False 50 | self._exception = None 51 | 52 | @property 53 | def closed(self): 54 | return self._closed 55 | 56 | @property 57 | def close_code(self): 58 | return self._close_code 59 | 60 | @property 61 | def protocol(self): 62 | return self._protocol 63 | 64 | def exception(self): 65 | return self._exception 66 | 67 | def ping(self, message='b'): 68 | if self._closed: 69 | raise RuntimeError('websocket connection is closed') 70 | self._writer.ping(message) 71 | 72 | def pong(self, message='b'): 73 | if self._closed: 74 | raise RuntimeError('websocket connection is closed') 75 | self._writer.pong(message) 76 | 77 | def send_str(self, data): 78 | if self._closed: 79 | raise RuntimeError('websocket connection is closed') 80 | if not isinstance(data, str): 81 | raise TypeError('data argument must be str (%r)' % type(data)) 82 | self._writer.send(data, binary=False) 83 | 84 | def send_bytes(self, data): 85 | if self._closed: 86 | raise RuntimeError('websocket connection is closed') 87 | if not isinstance(data, (bytes, bytearray, memoryview)): 88 | raise TypeError('data argument must be byte-ish (%r)' % 89 | type(data)) 90 | self._writer.send(data, binary=True) 91 | 92 | @asyncio.coroutine 93 | def close(self, *, code=1000, message=b''): 94 | if not self._closed: 95 | self._closed = True 96 | try: 97 | self._writer.close(code, message) 98 | except asyncio.CancelledError: 99 | self._close_code = 1006 100 | self._response.close(force=True) 101 | raise 102 | except Exception as exc: 103 | self._close_code = 1006 104 | self._exception = exc 105 | self._response.close(force=True) 106 | return True 107 | 108 | if self._closing: 109 | self._response.close(force=True) 110 | return True 111 | 112 | while True: 113 | try: 114 | msg = yield from asyncio.wait_for( 115 | self._reader.read(), self._timeout, loop=self._loop) 116 | except asyncio.CancelledError: 117 | self._close_code = 1006 118 | self._response.close(force=True) 119 | raise 120 | except Exception as exc: 121 | self._close_code = 1006 122 | self._exception = exc 123 | self._response.close(force=True) 124 | return True 125 | 126 | if msg.tp == MsgType.close: 127 | self._close_code = msg.data 128 | self._response.close(force=True) 129 | return True 130 | else: 131 | return False 132 | 133 | @asyncio.coroutine 134 | def receive(self): 135 | if self._waiting: 136 | raise RuntimeError('Concurrent call to receive() is not allowed') 137 | 138 | self._waiting = True 139 | try: 140 | while True: 141 | if self._closed: 142 | return closedMessage 143 | 144 | try: 145 | msg = yield from self._reader.read() 146 | except (asyncio.CancelledError, asyncio.TimeoutError): 147 | raise 148 | except WebSocketError as exc: 149 | self._close_code = exc.code 150 | yield from self.close(code=exc.code) 151 | return Message(MsgType.error, exc, None) 152 | except Exception as exc: 153 | self._exception = exc 154 | self._closing = True 155 | self._close_code = 1006 156 | yield from self.close() 157 | return Message(MsgType.error, exc, None) 158 | 159 | if msg.tp == MsgType.close: 160 | self._closing = True 161 | self._close_code = msg.data 162 | if not self._closed and self._autoclose: 163 | yield from self.close() 164 | return msg 165 | elif not self._closed: 166 | if msg.tp == MsgType.ping and self._autoping: 167 | self._writer.pong(msg.data) 168 | elif msg.tp == MsgType.pong and self._autoping: 169 | continue 170 | else: 171 | return msg 172 | finally: 173 | self._waiting = False 174 | 175 | 176 | @asyncio.coroutine 177 | def ws_connect(url, *, protocols=(), timeout=10.0, connector=None, 178 | ws_response_class=ClientWebSocketResponse, autoclose=True, 179 | autoping=True, loop=None): 180 | 181 | if loop is None: 182 | asyncio.get_event_loop() 183 | 184 | if connector is None: 185 | connector = aiohttp.TCPConnector(loop=loop, force_close=True) 186 | 187 | session = aiohttp.ClientSession(loop=loop, connector=connector, 188 | ws_response_class=ws_response_class) 189 | 190 | try: 191 | resp = yield from session.ws_connect( 192 | url, 193 | protocols=protocols, 194 | timeout=timeout, 195 | autoclose=autoclose, 196 | autoping=autoping) 197 | return resp 198 | 199 | finally: 200 | session.detach() 201 | -------------------------------------------------------------------------------- /torch/model_flat.lua: -------------------------------------------------------------------------------- 1 | require 'nn' 2 | require 'cunn' 3 | require 'cutorch' 4 | require 'cudnn' 5 | cudnn.benchmark = true 6 | require 'nngraph' 7 | require 'Adam' 8 | require 'Mask' 9 | require 'base' 10 | require 'Alert' 11 | 12 | params = { 13 | nfeats={32,64,128}, 14 | nfeats_decoder={32,64,128}, 15 | out_size = 128*8*8, 16 | out_size_decoder = 128*8*8, 17 | conv_layers = 3, 18 | filtsize = 5, 19 | poolsize = 2, 20 | img_size=64, 21 | rnn_layers=1, 22 | fc_layers=0, 23 | rnn_size=1024, 24 | g_size=1024, 25 | l_size=4, 26 | n_glimpses=4, 27 | init_weight=0.05, 28 | batch_size = 4, 29 | max_grad_norm = 2 30 | } 31 | 32 | local function rnn(x, prev_h, input_size, output_size) 33 | local i2h = nn.Linear(input_size, output_size) 34 | local h2h = nn.Linear(params.rnn_size, output_size) 35 | 36 | local next_h = nn.Tanh()(nn.BatchNormalization(output_size)(nn.CAddTable()({i2h(x), h2h(prev_h)}))) 37 | 38 | return i2h, h2h, next_h 39 | end 40 | 41 | local function lstm(x, prev_c, prev_h, input_size, output_size) 42 | -- Calculate all four gates in one go 43 | local i2h = nn.Linear(input_size, 4*output_size) 44 | local h2h = nn.Linear(params.rnn_size, 4*output_size) 45 | local gates = nn.CAddTable()({i2h(x), h2h(prev_h)}) 46 | 47 | -- Reshape to (batch_size, n_gates, hid_size) 48 | -- Then slize the n_gates dimension, i.e dimension 2 49 | local reshaped_gates = nn.Reshape(4,output_size)(gates) 50 | local sliced_gates = nn.SplitTable(2)(reshaped_gates) 51 | 52 | -- Use select gate to fetch each gate and apply nonlinearity 53 | local in_gate = nn.Sigmoid()(nn.SelectTable(1)(sliced_gates)) 54 | local in_transform = nn.Tanh()(nn.BatchNormalization(params.rnn_size)(nn.SelectTable(2)(sliced_gates))) 55 | local forget_gate = nn.Sigmoid()(nn.SelectTable(3)(sliced_gates)) 56 | local out_gate = nn.Sigmoid()(nn.SelectTable(4)(sliced_gates)) 57 | 58 | local next_c = nn.CAddTable()({ 59 | nn.CMulTable()({forget_gate, prev_c}), 60 | nn.CMulTable()({in_gate, in_transform}) 61 | }) 62 | local next_h = nn.CMulTable()({out_gate, nn.Tanh()(next_c)}) 63 | 64 | return i2h, h2h, next_c, next_h 65 | end 66 | 67 | function core_network() 68 | local x = nn.Identity()() 69 | local prev_s = nn.Identity()() 70 | local prev_l = nn.Identity()() 71 | 72 | local l_transform = nn.Linear(4,128) 73 | l_transform.weight:normal(0, params.init_weight) 74 | 75 | local transforms = {} 76 | transforms[0] = nn.Linear(params.out_size,params.g_size-128) 77 | transforms[0].weight:normal(0, params.init_weight) 78 | 79 | --local enc = encoder(3) 80 | 81 | local g_x = nn.BatchNormalization(params.g_size-128)(transforms[0](nn.View(params.out_size):setNumInputDims(3)(x))) 82 | local g_l = l_transform(prev_l) 83 | 84 | local g = nn.JoinTable(1,1)({g_x,g_l}) 85 | 86 | --local j_transform = nn.Linear(2*params.g_size,params.g_size) 87 | --j_transform.weight:normal(0, params.init_weight) 88 | 89 | --g = nn.BatchNormalization(params.g_size)(j_transform(g)) 90 | --g = nn.ReLU(true)(g) 91 | 92 | --for i = 1,params.fc_layers do 93 | --transforms[i] = nn.Linear(params.g_size,params.g_size) 94 | --g = nn.CMulTable()({transforms[i](g), g_l}) 95 | --end 96 | 97 | --local h = {[0] = nn.Dropout(0.8)(g)} 98 | local h = {[0] = g} 99 | 100 | local next_s = {} 101 | local split = {prev_s:split(2 * params.rnn_layers)} 102 | --[[local split 103 | if params.rnn_layers == 1 then 104 | split = prev_s 105 | else 106 | split = {prev_s:split(params.rnn_layers)} 107 | end]] 108 | local i2h = {} 109 | local h2h = {} 110 | for i = 1, params.rnn_layers do 111 | local prev_c = split[2 * i - 1] 112 | local prev_h = split[2 * i] 113 | --local prev_h = split[i] 114 | local next_c, next_h 115 | if i == 1 then 116 | i2h[i], h2h[i], next_c, next_h = lstm(h[i - 1], prev_c, prev_h, params.g_size, params.rnn_size) 117 | --i2h[i], h2h[i], next_h = rnn(h[i - 1], prev_h, params.g_size, params.rnn_size) 118 | else 119 | i2h[i], h2h[i], next_c, next_h = lstm(h[i - 1], prev_c, prev_h, params.rnn_size, params.rnn_size) 120 | --i2h[i], h2h[i], next_h = rnn(h[i - 1], prev_h, params.rnn_size, params.rnn_size) 121 | end 122 | table.insert(next_s, next_c) 123 | table.insert(next_s, next_h) 124 | h[i] = next_h 125 | end 126 | 127 | local module = nn.gModule({x, prev_l, prev_s}, 128 | {h[params.rnn_layers], nn.Identity()(next_s)}) 129 | module:getParameters():normal(0, params.init_weight) 130 | 131 | for i = 0,params.fc_layers do 132 | transforms[i].bias:normal(0, params.init_weight/20) 133 | end 134 | for i = 1,params.rnn_layers do 135 | i2h[i].weight:normal(0,params.init_weight/10) 136 | h2h[i].weight:normal(0,params.init_weight/10) 137 | i2h[i].bias:normal(0,params.init_weight/200) 138 | h2h[i].bias:normal(0,params.init_weight/200) 139 | end 140 | return module 141 | end 142 | 143 | function combine_network() 144 | 145 | local slice_width = 64 146 | 147 | local h = nn.Identity()() 148 | local l = nn.Identity()() 149 | 150 | local transforms = {} 151 | transforms[1] = nn.Linear(params.rnn_size,params.out_size_decoder/4) 152 | transforms[2] = nn.Linear(params.out_size_decoder/4,params.out_size_decoder/4) 153 | transforms[3] = nn.Linear(params.out_size_decoder/4,params.out_size_decoder) 154 | local l_transforms = {} 155 | l_transforms[0] = nn.Linear(4,16) 156 | l_transforms[1] = nn.Linear(16,2*slice_width) 157 | l_transforms[2] = nn.Linear(2*slice_width,params.out_size_decoder/4) 158 | l_transforms[3] = nn.Linear(2*slice_width,params.out_size_decoder/4) 159 | --l_transforms[4] = nn.Linear(4*slice_width,4*slice_width) 160 | 161 | --local h1 = nn.Narrow(2,1,slice_width)(h) 162 | local g_l0 = nn.ReLU(true)(l_transforms[0](l)) 163 | g_l0 = nn.ReLU(true)(l_transforms[1](g_l0)) 164 | --g_l0 = nn.JoinTable(1,1)({g_l0,h1}) 165 | --g_l0 = nn.ReLU(true)(nn.BatchNormalization(4*slice_width)(l_transforms[4](g_l0))) 166 | local g_l1 = l_transforms[2](g_l0) 167 | local g_l2 = l_transforms[3](g_l0) 168 | 169 | --local h2 = nn.Narrow(2,slice_width+1,params.rnn_size-slice_width)(h) 170 | local h2 = h 171 | --local h2 = h 172 | h2 = transforms[1](h2) 173 | h2 = nn.CMulTable()({h2,g_l1}) 174 | --h2 = transforms[2](h2) 175 | --h2 = nn.BatchNormalization(params.out_size_decoder/4)(h2) 176 | h2 = nn.Tanh(true)(h2) 177 | --h2 = nn.Dropout(0.75)(h2) 178 | h2 = nn.CMulTable()({h2,g_l2}) 179 | h2 = transforms[3](h2) 180 | --h2 = nn.BatchNormalization(params.out_size_decoder)(h2) 181 | local y = h2 182 | 183 | for i = 1,3 do 184 | transforms[i].weight:normal(0,params.init_weight) 185 | transforms[i].bias:zero() 186 | end 187 | for i = 0,3 do 188 | l_transforms[i].weight:normal(0,2*params.init_weight) 189 | l_transforms[i].bias:zero() 190 | end 191 | 192 | return nn.gModule({h,l},{nn.View(128,8,8):setNumInputDims(1)(y)}) 193 | end 194 | 195 | function mask_module(batch,feats,rows,cols) 196 | local x = nn.Identity()() 197 | local mask = nn.Mask(batch,feats,rows,cols)(x) 198 | return nn.gModule({x},{nn.CMulTable(true)({x,mask})}) 199 | end 200 | -------------------------------------------------------------------------------- /torch/CAE.lua: -------------------------------------------------------------------------------- 1 | require 'nn' 2 | require 'cunn' 3 | require 'cutorch' 4 | require 'cudnn' 5 | cudnn.benchmark = true 6 | require 'nngraph' 7 | require 'Adam' 8 | require 'Mask' 9 | require 'base' 10 | require 'Alert' 11 | 12 | 13 | --[[function encoder() 14 | local nfeats = params.nfeats 15 | local filtsize = params.filtsize 16 | local poolsize = params.poolsize 17 | nfeats[0] = 1 18 | 19 | local x_conv = {} 20 | x_conv[0] = x 21 | 22 | local net = nn.Sequential() 23 | 24 | local conv_transforms = {} 25 | 26 | for i = 1,params.conv_layers do 27 | conv_transforms[i] = cudnn.SpatialConvolution(nfeats[i-1], nfeats[i], filtsize, filtsize) 28 | net:add(conv_transforms[i]) 29 | net:add(nn.ReLU(true)) 30 | net:add(cudnn.SpatialMaxPooling(poolsize,poolsize,poolsize,poolsize)) 31 | end 32 | 33 | local fc_transforms = {} 34 | net:add(nn.View(params.out_size)) 35 | 36 | fc_transforms[1] = nn.Linear(params.out_size,params.out_size) 37 | 38 | net:add(fc_transforms[1]) 39 | net:add(nn.ReLU(true)) 40 | 41 | for i = 1,params.conv_layers do 42 | local init1 = 60/(nfeats[i-1]*params.filtsize*params.filtsize) 43 | conv_transforms[i].weight:normal(0,init1) 44 | conv_transforms[i].bias:normal(0, init1/120) 45 | end 46 | for i = 1,params.fc_layers do 47 | fc_transforms[i].bias:normal(0, params.init_weight/20) 48 | end 49 | 50 | return net 51 | end 52 | 53 | function decoder() 54 | local nfeats = {128,96,128} 55 | local filtsize = params.filtsize 56 | local net = nn.Sequential() 57 | local transforms = {} 58 | transforms[0] = nn.Linear(params.out_size,nfeats[3]*8*8) 59 | net:add(transforms[0]) 60 | net:add(nn.View(nfeats[3],8,8)) 61 | net:add(nn.SpatialUpSamplingNearest(2)) 62 | net:add(mask_module(16,16)) 63 | net:add(nn.SpatialZeroPadding(2,2,2,2)) 64 | transforms[1] = cudnn.SpatialConvolution(nfeats[3], nfeats[2], filtsize, filtsize) 65 | net:add(transforms[1]) 66 | net:add(nn.ReLU(true)) 67 | net:add(nn.SpatialUpSamplingNearest(2)) 68 | net:add(mask_module(32,32)) 69 | net:add(nn.SpatialZeroPadding(2,2,2,2)) 70 | transforms[2]=cudnn.SpatialConvolution(nfeats[2], nfeats[1], filtsize, filtsize) 71 | net:add(transforms[2]) 72 | net:add(nn.ReLU(true)) 73 | net:add(nn.SpatialUpSamplingNearest(2)) 74 | net:add(mask_module(64,64)) 75 | net:add(nn.SpatialZeroPadding(2,2,2,2)) 76 | transforms[3] = cudnn.SpatialConvolution(nfeats[1], nfeats[1], filtsize, filtsize) 77 | net:add(transforms[3]) 78 | net:add(nn.ReLU(true)) 79 | net:add(nn.SpatialZeroPadding(2,2,2,2)) 80 | transforms[4] = cudnn.SpatialConvolution(nfeats[1], 1, filtsize, filtsize) 81 | net:add(transforms[4]) 82 | net:add(nn.Sigmoid(true)) 83 | 84 | transforms[0].weight:normal(0,params.init_weight) 85 | transforms[0].bias:zero() 86 | for i = 1,4 do 87 | transforms[i].weight:normal(0,params.init_weight/10) 88 | transforms[i].bias:zero() 89 | end 90 | transforms[4].weight:normal(0,params.init_weight/10) 91 | transforms[4].bias:zero() 92 | return net 93 | end]]-- 94 | 95 | function mask_module(batch,feats,rows,cols) 96 | local x = nn.Identity()() 97 | local mask = nn.Mask(batch,feats,rows,cols)(x) 98 | return nn.gModule({x},{nn.CMulTable(true)({x,mask})}) 99 | end 100 | 101 | function decoder_layer(feats_in,feats_out,width) 102 | local filtsize = params.filtsize 103 | local net = nn.Sequential() 104 | net:add(nn.SpatialUpSamplingNearest(2)) 105 | --net:add(mask_module(width*2,width*2)) 106 | net:add(nn.SpatialZeroPadding(2,2,2,2)) 107 | transform = cudnn.SpatialConvolution(feats_in,feats_out, filtsize, filtsize) 108 | net:add(transform) 109 | net:add(nn.SpatialBatchNormalization(feats_out)) 110 | net:add(nn.ReLU(true)) 111 | transform.weight:normal(0,params.init_weight/10) 112 | transform.bias:zero() 113 | return net 114 | end 115 | 116 | function decoder_last_layer(feats_in,feats_out) 117 | local filtsize = params.filtsize 118 | local net = nn.Sequential() 119 | net:add(nn.SpatialZeroPadding(2,2,2,2)) 120 | transform = cudnn.SpatialConvolution(feats_in, feats_out, filtsize, filtsize) 121 | net:add(transform) 122 | net:add(nn.SpatialBatchNormalization(feats_out)) 123 | --net:add(nn.Sigmoid(true)) 124 | transform.weight:normal(0,params.init_weight/10) 125 | transform.bias:zero() 126 | return net 127 | end 128 | 129 | function encoder_layer(feats_in,feats_out) 130 | local filtsize = params.filtsize 131 | local poolsize = params.poolsize 132 | local net = nn.Sequential() 133 | transform = cudnn.SpatialConvolution(feats_in, feats_out, filtsize, filtsize) 134 | net:add(nn.SpatialZeroPadding(2,2,2,2)) 135 | net:add(transform) 136 | net:add(nn.SpatialBatchNormalization(feats_out)) 137 | net:add(nn.ReLU(true)) 138 | net:add(cudnn.SpatialMaxPooling(poolsize,poolsize,poolsize,poolsize)) 139 | local init1 = 60/(feats_in*params.filtsize*params.filtsize) 140 | transform.weight:normal(0,init1) 141 | transform.bias:normal(0, init1/120) 142 | return net 143 | end 144 | 145 | function encoder(n_layers) 146 | local net = nn.Sequential() 147 | local feats = params.nfeats 148 | feats[0]=1 149 | for i = 1,n_layers do 150 | net:add(encoder_layer(feats[i-1],feats[i])) 151 | if i == 2 then 152 | net:add(nn.SpatialDropout(0.5)) 153 | end 154 | end 155 | return net 156 | end 157 | 158 | function decoder(n_layers) 159 | local net = nn.Sequential() 160 | local feats = params.nfeats_decoder 161 | feats[0]=32 162 | local widths = {32,16,8} 163 | for i = n_layers,1,-1 do 164 | --[[if i == 2 then 165 | net:add(nn.SpatialDropout()) 166 | end]] 167 | net:add(decoder_layer(feats[i],feats[i-1],widths[i])) 168 | end 169 | net:add(decoder_last_layer(feats[0],1)) 170 | return net 171 | end 172 | 173 | function loc_transform(encoder,width,n_feats) 174 | 175 | local filtsize = params.filtsize 176 | 177 | local x = nn.Identity()() 178 | local l = nn.Identity()() 179 | 180 | local transforms = {} 181 | transforms[1] = nn.Linear(4,256) 182 | transforms[2] = nn.Linear(256,256) 183 | transforms[3] = nn.Linear(256,n_feats*width*width) 184 | transforms[4] = nn.Linear(256,n_feats*width*width) 185 | transforms[5] = cudnn.SpatialConvolution(n_feats, n_feats, filtsize, filtsize) 186 | transforms[6] = cudnn.SpatialConvolution(n_feats, n_feats, filtsize, filtsize) 187 | 188 | local l_2 = nn.ReLU(true)(transforms[1](l)) 189 | local l_3 = nn.ReLU(true)(transforms[2](l_2)) 190 | local l_map1 = nn.View(n_feats,width,width)(nn.Tanh(true)(transforms[3](l_3))) 191 | local l_map2 = nn.View(n_feats,width,width)(nn.Sigmoid(true)(transforms[4](l_3))) 192 | 193 | local x_2 = nn.SpatialZeroPadding(2,2,2,2)(nn.CMulTable()({x,l_map1})) 194 | local x_3 = nn.ReLU(true)(transforms[5](x_2)) 195 | local x_4 = nn.SpatialZeroPadding(2,2,2,2)(nn.CMulTable()({x_3,l_map2})) 196 | local out = nn.ReLU(true)(transforms[6](x_4)) 197 | 198 | for i = 1,4 do 199 | transforms[i].weight:normal(0,params.init_weight/10) 200 | transforms[i].bias:zero() 201 | end 202 | transforms[5].weight:normal(0,params.init_weight/10) 203 | transforms[5].bias:zero() 204 | transforms[6].weight:normal(0,params.init_weight/10) 205 | transforms[6].bias:zero() 206 | 207 | local module = nn.gModule({x,l},{out}) 208 | 209 | local net = nn.ParallelTable() 210 | net:add(encoder) 211 | net:add(nn.Identity()) 212 | 213 | local out_net = nn.Sequential() 214 | out_net:add(net) 215 | out_net:add(module) 216 | 217 | return out_net 218 | 219 | end -------------------------------------------------------------------------------- /blender/aiohttp/wsgi.py: -------------------------------------------------------------------------------- 1 | """wsgi server. 2 | 3 | TODO: 4 | * proxy protocol 5 | * x-forward security 6 | * wsgi file support (os.sendfile) 7 | """ 8 | 9 | import asyncio 10 | import inspect 11 | import io 12 | import os 13 | import sys 14 | from urllib.parse import urlsplit 15 | 16 | import aiohttp 17 | from aiohttp import server, helpers, hdrs 18 | 19 | __all__ = ('WSGIServerHttpProtocol',) 20 | 21 | 22 | class WSGIServerHttpProtocol(server.ServerHttpProtocol): 23 | """HTTP Server that implements the Python WSGI protocol. 24 | 25 | It uses 'wsgi.async' of 'True'. 'wsgi.input' can behave differently 26 | depends on 'readpayload' constructor parameter. If readpayload is set to 27 | True, wsgi server reads all incoming data into BytesIO object and 28 | sends it as 'wsgi.input' environ var. If readpayload is set to false 29 | 'wsgi.input' is a StreamReader and application should read incoming 30 | data with "yield from environ['wsgi.input'].read()". It defaults to False. 31 | """ 32 | 33 | SCRIPT_NAME = os.environ.get('SCRIPT_NAME', '') 34 | 35 | def __init__(self, app, readpayload=False, is_ssl=False, *args, **kw): 36 | super().__init__(*args, **kw) 37 | 38 | self.wsgi = app 39 | self.is_ssl = is_ssl 40 | self.readpayload = readpayload 41 | 42 | def create_wsgi_response(self, message): 43 | return WsgiResponse(self.writer, message) 44 | 45 | def create_wsgi_environ(self, message, payload): 46 | uri_parts = urlsplit(message.path) 47 | url_scheme = 'https' if self.is_ssl else 'http' 48 | 49 | environ = { 50 | 'wsgi.input': payload, 51 | 'wsgi.errors': sys.stderr, 52 | 'wsgi.version': (1, 0), 53 | 'wsgi.async': True, 54 | 'wsgi.multithread': False, 55 | 'wsgi.multiprocess': False, 56 | 'wsgi.run_once': False, 57 | 'wsgi.file_wrapper': FileWrapper, 58 | 'wsgi.url_scheme': url_scheme, 59 | 'SERVER_SOFTWARE': aiohttp.HttpMessage.SERVER_SOFTWARE, 60 | 'REQUEST_METHOD': message.method, 61 | 'QUERY_STRING': uri_parts.query or '', 62 | 'RAW_URI': message.path, 63 | 'SERVER_PROTOCOL': 'HTTP/%s.%s' % message.version 64 | } 65 | 66 | # authors should be aware that REMOTE_HOST and REMOTE_ADDR 67 | # may not qualify the remote addr: 68 | # http://www.ietf.org/rfc/rfc3875 69 | forward = self.transport.get_extra_info('addr', '127.0.0.1') 70 | script_name = self.SCRIPT_NAME 71 | server = forward 72 | 73 | for hdr_name, hdr_value in message.headers.items(): 74 | if hdr_name == 'HOST': 75 | server = hdr_value 76 | elif hdr_name == 'SCRIPT_NAME': 77 | script_name = hdr_value 78 | elif hdr_name == 'CONTENT-TYPE': 79 | environ['CONTENT_TYPE'] = hdr_value 80 | continue 81 | elif hdr_name == 'CONTENT-LENGTH': 82 | environ['CONTENT_LENGTH'] = hdr_value 83 | continue 84 | 85 | key = 'HTTP_%s' % hdr_name.replace('-', '_') 86 | if key in environ: 87 | hdr_value = '%s,%s' % (environ[key], hdr_value) 88 | 89 | environ[key] = hdr_value 90 | 91 | remote = helpers.parse_remote_addr(forward) 92 | environ['REMOTE_ADDR'] = remote[0] 93 | environ['REMOTE_PORT'] = remote[1] 94 | 95 | if isinstance(server, str): 96 | server = server.split(':') 97 | if len(server) == 1: 98 | server.append('80' if url_scheme == 'http' else '443') 99 | 100 | environ['SERVER_NAME'] = server[0] 101 | environ['SERVER_PORT'] = str(server[1]) 102 | 103 | path_info = uri_parts.path 104 | if script_name: 105 | path_info = path_info.split(script_name, 1)[-1] 106 | 107 | environ['PATH_INFO'] = path_info 108 | environ['SCRIPT_NAME'] = script_name 109 | 110 | environ['async.reader'] = self.reader 111 | environ['async.writer'] = self.writer 112 | 113 | return environ 114 | 115 | @asyncio.coroutine 116 | def handle_request(self, message, payload): 117 | """Handle a single HTTP request""" 118 | now = self._loop.time() 119 | 120 | if self.readpayload: 121 | wsgiinput = io.BytesIO() 122 | wsgiinput.write((yield from payload.read())) 123 | wsgiinput.seek(0) 124 | payload = wsgiinput 125 | 126 | environ = self.create_wsgi_environ(message, payload) 127 | response = self.create_wsgi_response(message) 128 | 129 | riter = self.wsgi(environ, response.start_response) 130 | if isinstance(riter, asyncio.Future) or inspect.isgenerator(riter): 131 | riter = yield from riter 132 | 133 | resp = response.response 134 | try: 135 | for item in riter: 136 | if isinstance(item, asyncio.Future): 137 | item = yield from item 138 | yield from resp.write(item) 139 | 140 | yield from resp.write_eof() 141 | finally: 142 | if hasattr(riter, 'close'): 143 | riter.close() 144 | 145 | if resp.keep_alive(): 146 | self.keep_alive(True) 147 | 148 | self.log_access( 149 | message, environ, response.response, self._loop.time() - now) 150 | 151 | 152 | class FileWrapper: 153 | """Custom file wrapper.""" 154 | 155 | def __init__(self, fobj, chunk_size=8192): 156 | self.fobj = fobj 157 | self.chunk_size = chunk_size 158 | if hasattr(fobj, 'close'): 159 | self.close = fobj.close 160 | 161 | def __iter__(self): 162 | return self 163 | 164 | def __next__(self): 165 | data = self.fobj.read(self.chunk_size) 166 | if data: 167 | return data 168 | raise StopIteration 169 | 170 | 171 | class WsgiResponse: 172 | """Implementation of start_response() callable as specified by PEP 3333""" 173 | 174 | status = None 175 | 176 | HOP_HEADERS = { 177 | hdrs.CONNECTION, 178 | hdrs.KEEP_ALIVE, 179 | hdrs.PROXY_AUTHENTICATE, 180 | hdrs.PROXY_AUTHORIZATION, 181 | hdrs.TE, 182 | hdrs.TRAILER, 183 | hdrs.TRANSFER_ENCODING, 184 | hdrs.UPGRADE, 185 | } 186 | 187 | def __init__(self, writer, message): 188 | self.writer = writer 189 | self.message = message 190 | 191 | def start_response(self, status, headers, exc_info=None): 192 | if exc_info: 193 | try: 194 | if self.status: 195 | raise exc_info[1] 196 | finally: 197 | exc_info = None 198 | 199 | status_code = int(status.split(' ', 1)[0]) 200 | 201 | self.status = status 202 | resp = self.response = aiohttp.Response( 203 | self.writer, status_code, 204 | self.message.version, self.message.should_close) 205 | resp.HOP_HEADERS = self.HOP_HEADERS 206 | resp.add_headers(*headers) 207 | 208 | if resp.has_chunked_hdr: 209 | resp.enable_chunked_encoding() 210 | 211 | # send headers immediately for websocket connection 212 | if status_code == 101 and resp.upgrade and resp.websocket: 213 | resp.send_headers() 214 | else: 215 | resp._send_headers = True 216 | return self.response.write 217 | -------------------------------------------------------------------------------- /blender/chardet/universaldetector.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | from . import constants 30 | import sys 31 | import codecs 32 | from .latin1prober import Latin1Prober # windows-1252 33 | from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets 34 | from .sbcsgroupprober import SBCSGroupProber # single-byte character sets 35 | from .escprober import EscCharSetProber # ISO-2122, etc. 36 | import re 37 | 38 | MINIMUM_THRESHOLD = 0.20 39 | ePureAscii = 0 40 | eEscAscii = 1 41 | eHighbyte = 2 42 | 43 | 44 | class UniversalDetector: 45 | def __init__(self): 46 | self._highBitDetector = re.compile(b'[\x80-\xFF]') 47 | self._escDetector = re.compile(b'(\033|~{)') 48 | self._mEscCharSetProber = None 49 | self._mCharSetProbers = [] 50 | self.reset() 51 | 52 | def reset(self): 53 | self.result = {'encoding': None, 'confidence': 0.0} 54 | self.done = False 55 | self._mStart = True 56 | self._mGotData = False 57 | self._mInputState = ePureAscii 58 | self._mLastChar = b'' 59 | if self._mEscCharSetProber: 60 | self._mEscCharSetProber.reset() 61 | for prober in self._mCharSetProbers: 62 | prober.reset() 63 | 64 | def feed(self, aBuf): 65 | if self.done: 66 | return 67 | 68 | aLen = len(aBuf) 69 | if not aLen: 70 | return 71 | 72 | if not self._mGotData: 73 | # If the data starts with BOM, we know it is UTF 74 | if aBuf[:3] == codecs.BOM: 75 | # EF BB BF UTF-8 with BOM 76 | self.result = {'encoding': "UTF-8", 'confidence': 1.0} 77 | elif aBuf[:4] == codecs.BOM_UTF32_LE: 78 | # FF FE 00 00 UTF-32, little-endian BOM 79 | self.result = {'encoding': "UTF-32LE", 'confidence': 1.0} 80 | elif aBuf[:4] == codecs.BOM_UTF32_BE: 81 | # 00 00 FE FF UTF-32, big-endian BOM 82 | self.result = {'encoding': "UTF-32BE", 'confidence': 1.0} 83 | elif aBuf[:4] == b'\xFE\xFF\x00\x00': 84 | # FE FF 00 00 UCS-4, unusual octet order BOM (3412) 85 | self.result = { 86 | 'encoding': "X-ISO-10646-UCS-4-3412", 87 | 'confidence': 1.0 88 | } 89 | elif aBuf[:4] == b'\x00\x00\xFF\xFE': 90 | # 00 00 FF FE UCS-4, unusual octet order BOM (2143) 91 | self.result = { 92 | 'encoding': "X-ISO-10646-UCS-4-2143", 93 | 'confidence': 1.0 94 | } 95 | elif aBuf[:2] == codecs.BOM_LE: 96 | # FF FE UTF-16, little endian BOM 97 | self.result = {'encoding': "UTF-16LE", 'confidence': 1.0} 98 | elif aBuf[:2] == codecs.BOM_BE: 99 | # FE FF UTF-16, big endian BOM 100 | self.result = {'encoding': "UTF-16BE", 'confidence': 1.0} 101 | 102 | self._mGotData = True 103 | if self.result['encoding'] and (self.result['confidence'] > 0.0): 104 | self.done = True 105 | return 106 | 107 | if self._mInputState == ePureAscii: 108 | if self._highBitDetector.search(aBuf): 109 | self._mInputState = eHighbyte 110 | elif ((self._mInputState == ePureAscii) and 111 | self._escDetector.search(self._mLastChar + aBuf)): 112 | self._mInputState = eEscAscii 113 | 114 | self._mLastChar = aBuf[-1:] 115 | 116 | if self._mInputState == eEscAscii: 117 | if not self._mEscCharSetProber: 118 | self._mEscCharSetProber = EscCharSetProber() 119 | if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt: 120 | self.result = {'encoding': self._mEscCharSetProber.get_charset_name(), 121 | 'confidence': self._mEscCharSetProber.get_confidence()} 122 | self.done = True 123 | elif self._mInputState == eHighbyte: 124 | if not self._mCharSetProbers: 125 | self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(), 126 | Latin1Prober()] 127 | for prober in self._mCharSetProbers: 128 | if prober.feed(aBuf) == constants.eFoundIt: 129 | self.result = {'encoding': prober.get_charset_name(), 130 | 'confidence': prober.get_confidence()} 131 | self.done = True 132 | break 133 | 134 | def close(self): 135 | if self.done: 136 | return 137 | if not self._mGotData: 138 | if constants._debug: 139 | sys.stderr.write('no data received!\n') 140 | return 141 | self.done = True 142 | 143 | if self._mInputState == ePureAscii: 144 | self.result = {'encoding': 'ascii', 'confidence': 1.0} 145 | return self.result 146 | 147 | if self._mInputState == eHighbyte: 148 | proberConfidence = None 149 | maxProberConfidence = 0.0 150 | maxProber = None 151 | for prober in self._mCharSetProbers: 152 | if not prober: 153 | continue 154 | proberConfidence = prober.get_confidence() 155 | if proberConfidence > maxProberConfidence: 156 | maxProberConfidence = proberConfidence 157 | maxProber = prober 158 | if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD): 159 | self.result = {'encoding': maxProber.get_charset_name(), 160 | 'confidence': maxProber.get_confidence()} 161 | return self.result 162 | 163 | if constants._debug: 164 | sys.stderr.write('no probers hit minimum threshhold\n') 165 | for prober in self._mCharSetProbers[0].mProbers: 166 | if not prober: 167 | continue 168 | sys.stderr.write('%s confidence = %s\n' % 169 | (prober.get_charset_name(), 170 | prober.get_confidence())) 171 | -------------------------------------------------------------------------------- /blender/aiohttp/web_exceptions.py: -------------------------------------------------------------------------------- 1 | __all__ = ( 2 | 'HTTPException', 3 | 'HTTPError', 4 | 'HTTPRedirection', 5 | 'HTTPSuccessful', 6 | 'HTTPOk', 7 | 'HTTPCreated', 8 | 'HTTPAccepted', 9 | 'HTTPNonAuthoritativeInformation', 10 | 'HTTPNoContent', 11 | 'HTTPResetContent', 12 | 'HTTPPartialContent', 13 | 'HTTPMultipleChoices', 14 | 'HTTPMovedPermanently', 15 | 'HTTPFound', 16 | 'HTTPSeeOther', 17 | 'HTTPNotModified', 18 | 'HTTPUseProxy', 19 | 'HTTPTemporaryRedirect', 20 | 'HTTPClientError', 21 | 'HTTPBadRequest', 22 | 'HTTPUnauthorized', 23 | 'HTTPPaymentRequired', 24 | 'HTTPForbidden', 25 | 'HTTPNotFound', 26 | 'HTTPMethodNotAllowed', 27 | 'HTTPNotAcceptable', 28 | 'HTTPProxyAuthenticationRequired', 29 | 'HTTPRequestTimeout', 30 | 'HTTPConflict', 31 | 'HTTPGone', 32 | 'HTTPLengthRequired', 33 | 'HTTPPreconditionFailed', 34 | 'HTTPRequestEntityTooLarge', 35 | 'HTTPRequestURITooLong', 36 | 'HTTPUnsupportedMediaType', 37 | 'HTTPRequestRangeNotSatisfiable', 38 | 'HTTPExpectationFailed', 39 | 'HTTPServerError', 40 | 'HTTPInternalServerError', 41 | 'HTTPNotImplemented', 42 | 'HTTPBadGateway', 43 | 'HTTPServiceUnavailable', 44 | 'HTTPGatewayTimeout', 45 | 'HTTPVersionNotSupported', 46 | ) 47 | 48 | from .web_reqrep import Response 49 | 50 | 51 | ############################################################ 52 | # HTTP Exceptions 53 | ############################################################ 54 | 55 | class HTTPException(Response, Exception): 56 | 57 | # You should set in subclasses: 58 | # status = 200 59 | 60 | status_code = None 61 | empty_body = False 62 | 63 | def __init__(self, *, headers=None, reason=None, 64 | body=None, text=None, content_type=None): 65 | Response.__init__(self, status=self.status_code, 66 | headers=headers, reason=reason, 67 | body=body, text=text, content_type=content_type) 68 | Exception.__init__(self, self.reason) 69 | if self.body is None and not self.empty_body: 70 | self.text = "{}: {}".format(self.status, self.reason) 71 | 72 | 73 | class HTTPError(HTTPException): 74 | """Base class for exceptions with status codes in the 400s and 500s.""" 75 | 76 | 77 | class HTTPRedirection(HTTPException): 78 | """Base class for exceptions with status codes in the 300s.""" 79 | 80 | 81 | class HTTPSuccessful(HTTPException): 82 | """Base class for exceptions with status codes in the 200s.""" 83 | 84 | 85 | class HTTPOk(HTTPSuccessful): 86 | status_code = 200 87 | 88 | 89 | class HTTPCreated(HTTPSuccessful): 90 | status_code = 201 91 | 92 | 93 | class HTTPAccepted(HTTPSuccessful): 94 | status_code = 202 95 | 96 | 97 | class HTTPNonAuthoritativeInformation(HTTPSuccessful): 98 | status_code = 203 99 | 100 | 101 | class HTTPNoContent(HTTPSuccessful): 102 | status_code = 204 103 | empty_body = True 104 | 105 | 106 | class HTTPResetContent(HTTPSuccessful): 107 | status_code = 205 108 | empty_body = True 109 | 110 | 111 | class HTTPPartialContent(HTTPSuccessful): 112 | status_code = 206 113 | 114 | 115 | ############################################################ 116 | # 3xx redirection 117 | ############################################################ 118 | 119 | 120 | class _HTTPMove(HTTPRedirection): 121 | 122 | def __init__(self, location, *, headers=None, reason=None, 123 | body=None, text=None, content_type=None): 124 | if not location: 125 | raise ValueError("HTTP redirects need a location to redirect to.") 126 | super().__init__(headers=headers, reason=reason, 127 | body=body, text=text, content_type=content_type) 128 | self.headers['Location'] = location 129 | self.location = location 130 | 131 | 132 | class HTTPMultipleChoices(_HTTPMove): 133 | status_code = 300 134 | 135 | 136 | class HTTPMovedPermanently(_HTTPMove): 137 | status_code = 301 138 | 139 | 140 | class HTTPFound(_HTTPMove): 141 | status_code = 302 142 | 143 | 144 | # This one is safe after a POST (the redirected location will be 145 | # retrieved with GET): 146 | class HTTPSeeOther(_HTTPMove): 147 | status_code = 303 148 | 149 | 150 | class HTTPNotModified(HTTPRedirection): 151 | # FIXME: this should include a date or etag header 152 | status_code = 304 153 | empty_body = True 154 | 155 | 156 | class HTTPUseProxy(_HTTPMove): 157 | # Not a move, but looks a little like one 158 | status_code = 305 159 | 160 | 161 | class HTTPTemporaryRedirect(_HTTPMove): 162 | status_code = 307 163 | 164 | 165 | ############################################################ 166 | # 4xx client error 167 | ############################################################ 168 | 169 | 170 | class HTTPClientError(HTTPError): 171 | pass 172 | 173 | 174 | class HTTPBadRequest(HTTPClientError): 175 | status_code = 400 176 | 177 | 178 | class HTTPUnauthorized(HTTPClientError): 179 | status_code = 401 180 | 181 | 182 | class HTTPPaymentRequired(HTTPClientError): 183 | status_code = 402 184 | 185 | 186 | class HTTPForbidden(HTTPClientError): 187 | status_code = 403 188 | 189 | 190 | class HTTPNotFound(HTTPClientError): 191 | status_code = 404 192 | 193 | 194 | class HTTPMethodNotAllowed(HTTPClientError): 195 | status_code = 405 196 | 197 | def __init__(self, method, allowed_methods, *, headers=None, reason=None, 198 | body=None, text=None, content_type=None): 199 | allow = ','.join(sorted(allowed_methods)) 200 | super().__init__(headers=headers, reason=reason, 201 | body=body, text=text, content_type=content_type) 202 | self.headers['Allow'] = allow 203 | self.allowed_methods = allowed_methods 204 | self.method = method.upper() 205 | 206 | 207 | class HTTPNotAcceptable(HTTPClientError): 208 | status_code = 406 209 | 210 | 211 | class HTTPProxyAuthenticationRequired(HTTPClientError): 212 | status_code = 407 213 | 214 | 215 | class HTTPRequestTimeout(HTTPClientError): 216 | status_code = 408 217 | 218 | 219 | class HTTPConflict(HTTPClientError): 220 | status_code = 409 221 | 222 | 223 | class HTTPGone(HTTPClientError): 224 | status_code = 410 225 | 226 | 227 | class HTTPLengthRequired(HTTPClientError): 228 | status_code = 411 229 | 230 | 231 | class HTTPPreconditionFailed(HTTPClientError): 232 | status_code = 412 233 | 234 | 235 | class HTTPRequestEntityTooLarge(HTTPClientError): 236 | status_code = 413 237 | 238 | 239 | class HTTPRequestURITooLong(HTTPClientError): 240 | status_code = 414 241 | 242 | 243 | class HTTPUnsupportedMediaType(HTTPClientError): 244 | status_code = 415 245 | 246 | 247 | class HTTPRequestRangeNotSatisfiable(HTTPClientError): 248 | status_code = 416 249 | 250 | 251 | class HTTPExpectationFailed(HTTPClientError): 252 | status_code = 417 253 | 254 | 255 | ############################################################ 256 | # 5xx Server Error 257 | ############################################################ 258 | # Response status codes beginning with the digit "5" indicate cases in 259 | # which the server is aware that it has erred or is incapable of 260 | # performing the request. Except when responding to a HEAD request, the 261 | # server SHOULD include an entity containing an explanation of the error 262 | # situation, and whether it is a temporary or permanent condition. User 263 | # agents SHOULD display any included entity to the user. These response 264 | # codes are applicable to any request method. 265 | 266 | 267 | class HTTPServerError(HTTPError): 268 | pass 269 | 270 | 271 | class HTTPInternalServerError(HTTPServerError): 272 | status_code = 500 273 | 274 | 275 | class HTTPNotImplemented(HTTPServerError): 276 | status_code = 501 277 | 278 | 279 | class HTTPBadGateway(HTTPServerError): 280 | status_code = 502 281 | 282 | 283 | class HTTPServiceUnavailable(HTTPServerError): 284 | status_code = 503 285 | 286 | 287 | class HTTPGatewayTimeout(HTTPServerError): 288 | status_code = 504 289 | 290 | 291 | class HTTPVersionNotSupported(HTTPServerError): 292 | status_code = 505 293 | -------------------------------------------------------------------------------- /blender/aiohttp/web.py: -------------------------------------------------------------------------------- 1 | from . import web_reqrep 2 | from . import web_exceptions 3 | from . import web_urldispatcher 4 | from . import web_ws 5 | from .web_reqrep import * # noqa 6 | from .web_exceptions import * # noqa 7 | from .web_urldispatcher import * # noqa 8 | from .web_ws import * # noqa 9 | from .protocol import HttpVersion # noqa 10 | 11 | __all__ = (web_reqrep.__all__ + 12 | web_exceptions.__all__ + 13 | web_urldispatcher.__all__ + 14 | web_ws.__all__ + 15 | ('Application', 'RequestHandler', 16 | 'RequestHandlerFactory', 'HttpVersion')) 17 | 18 | 19 | import asyncio 20 | 21 | from . import hdrs 22 | from .abc import AbstractRouter, AbstractMatchInfo 23 | from .log import web_logger 24 | from .server import ServerHttpProtocol 25 | 26 | 27 | class RequestHandler(ServerHttpProtocol): 28 | 29 | _meth = 'none' 30 | _path = 'none' 31 | 32 | def __init__(self, manager, app, router, *, 33 | secure_proxy_ssl_header=None, **kwargs): 34 | super().__init__(**kwargs) 35 | 36 | self._manager = manager 37 | self._app = app 38 | self._router = router 39 | self._middlewares = app.middlewares 40 | self._secure_proxy_ssl_header = secure_proxy_ssl_header 41 | 42 | def __repr__(self): 43 | return "<{} {}:{} {}>".format( 44 | self.__class__.__name__, self._meth, self._path, 45 | 'connected' if self.transport is not None else 'disconnected') 46 | 47 | def connection_made(self, transport): 48 | super().connection_made(transport) 49 | 50 | self._manager.connection_made(self, transport) 51 | 52 | def connection_lost(self, exc): 53 | self._manager.connection_lost(self, exc) 54 | 55 | super().connection_lost(exc) 56 | 57 | @asyncio.coroutine 58 | def handle_request(self, message, payload): 59 | if self.access_log: 60 | now = self._loop.time() 61 | 62 | app = self._app 63 | request = Request( 64 | app, message, payload, 65 | self.transport, self.reader, self.writer, 66 | secure_proxy_ssl_header=self._secure_proxy_ssl_header) 67 | self._meth = request.method 68 | self._path = request.path 69 | try: 70 | match_info = yield from self._router.resolve(request) 71 | 72 | assert isinstance(match_info, AbstractMatchInfo), match_info 73 | 74 | resp = None 75 | request._match_info = match_info 76 | expect = request.headers.get(hdrs.EXPECT) 77 | if expect and expect.lower() == "100-continue": 78 | resp = ( 79 | yield from match_info.route.handle_expect_header(request)) 80 | 81 | if resp is None: 82 | handler = match_info.handler 83 | for factory in reversed(self._middlewares): 84 | handler = yield from factory(app, handler) 85 | resp = yield from handler(request) 86 | 87 | assert isinstance(resp, StreamResponse), \ 88 | ("Handler {!r} should return response instance, " 89 | "got {!r} [middlewares {!r}]").format( 90 | match_info.handler, type(resp), self._middlewares) 91 | except HTTPException as exc: 92 | resp = exc 93 | 94 | resp_msg = resp.start(request) 95 | yield from resp.write_eof() 96 | 97 | # notify server about keep-alive 98 | self.keep_alive(resp_msg.keep_alive()) 99 | 100 | # log access 101 | if self.access_log: 102 | self.log_access(message, None, resp_msg, self._loop.time() - now) 103 | 104 | # for repr 105 | self._meth = 'none' 106 | self._path = 'none' 107 | 108 | 109 | class RequestHandlerFactory: 110 | 111 | def __init__(self, app, router, *, 112 | handler=RequestHandler, loop=None, 113 | secure_proxy_ssl_header=None, **kwargs): 114 | self._app = app 115 | self._router = router 116 | self._handler = handler 117 | self._loop = loop 118 | self._connections = {} 119 | self._secure_proxy_ssl_header = secure_proxy_ssl_header 120 | self._kwargs = kwargs 121 | self._kwargs.setdefault('logger', app.logger) 122 | 123 | @property 124 | def secure_proxy_ssl_header(self): 125 | return self._secure_proxy_ssl_header 126 | 127 | @property 128 | def connections(self): 129 | return list(self._connections.keys()) 130 | 131 | def connection_made(self, handler, transport): 132 | self._connections[handler] = transport 133 | 134 | def connection_lost(self, handler, exc=None): 135 | if handler in self._connections: 136 | del self._connections[handler] 137 | 138 | @asyncio.coroutine 139 | def finish_connections(self, timeout=None): 140 | # try to close connections in 90% of graceful timeout 141 | timeout90 = None 142 | if timeout: 143 | timeout90 = timeout / 100 * 90 144 | 145 | for handler in self._connections.keys(): 146 | handler.closing(timeout=timeout90) 147 | 148 | @asyncio.coroutine 149 | def cleanup(): 150 | sleep = 0.05 151 | while self._connections: 152 | yield from asyncio.sleep(sleep, loop=self._loop) 153 | if sleep < 5: 154 | sleep = sleep * 2 155 | 156 | if timeout: 157 | try: 158 | yield from asyncio.wait_for( 159 | cleanup(), timeout, loop=self._loop) 160 | except asyncio.TimeoutError: 161 | self._app.logger.warning( 162 | "Not all connections are closed (pending: %d)", 163 | len(self._connections)) 164 | 165 | for transport in self._connections.values(): 166 | transport.close() 167 | 168 | self._connections.clear() 169 | 170 | def __call__(self): 171 | return self._handler( 172 | self, self._app, self._router, loop=self._loop, 173 | secure_proxy_ssl_header=self._secure_proxy_ssl_header, 174 | **self._kwargs) 175 | 176 | 177 | class Application(dict): 178 | 179 | def __init__(self, *, logger=web_logger, loop=None, 180 | router=None, handler_factory=RequestHandlerFactory, 181 | middlewares=()): 182 | if loop is None: 183 | loop = asyncio.get_event_loop() 184 | if router is None: 185 | router = UrlDispatcher() 186 | assert isinstance(router, AbstractRouter), router 187 | 188 | self._router = router 189 | self._handler_factory = handler_factory 190 | self._finish_callbacks = [] 191 | self._loop = loop 192 | self.logger = logger 193 | 194 | for factory in middlewares: 195 | assert asyncio.iscoroutinefunction(factory), factory 196 | self._middlewares = list(middlewares) 197 | 198 | @property 199 | def router(self): 200 | return self._router 201 | 202 | @property 203 | def loop(self): 204 | return self._loop 205 | 206 | @property 207 | def middlewares(self): 208 | return self._middlewares 209 | 210 | def make_handler(self, **kwargs): 211 | return self._handler_factory( 212 | self, self.router, loop=self.loop, **kwargs) 213 | 214 | @asyncio.coroutine 215 | def finish(self): 216 | callbacks = self._finish_callbacks 217 | self._finish_callbacks = [] 218 | 219 | for (cb, args, kwargs) in callbacks: 220 | try: 221 | res = cb(self, *args, **kwargs) 222 | if (asyncio.iscoroutine(res) or 223 | isinstance(res, asyncio.Future)): 224 | yield from res 225 | except Exception as exc: 226 | self._loop.call_exception_handler({ 227 | 'message': "Error in finish callback", 228 | 'exception': exc, 229 | 'application': self, 230 | }) 231 | 232 | def register_on_finish(self, func, *args, **kwargs): 233 | self._finish_callbacks.insert(0, (func, args, kwargs)) 234 | 235 | def __call__(self): 236 | """gunicorn compatibility""" 237 | return self 238 | 239 | def __repr__(self): 240 | return "" 241 | -------------------------------------------------------------------------------- /blender/chardet/escsm.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .constants import eStart, eError, eItsMe 29 | 30 | HZ_cls = ( 31 | 1,0,0,0,0,0,0,0, # 00 - 07 32 | 0,0,0,0,0,0,0,0, # 08 - 0f 33 | 0,0,0,0,0,0,0,0, # 10 - 17 34 | 0,0,0,1,0,0,0,0, # 18 - 1f 35 | 0,0,0,0,0,0,0,0, # 20 - 27 36 | 0,0,0,0,0,0,0,0, # 28 - 2f 37 | 0,0,0,0,0,0,0,0, # 30 - 37 38 | 0,0,0,0,0,0,0,0, # 38 - 3f 39 | 0,0,0,0,0,0,0,0, # 40 - 47 40 | 0,0,0,0,0,0,0,0, # 48 - 4f 41 | 0,0,0,0,0,0,0,0, # 50 - 57 42 | 0,0,0,0,0,0,0,0, # 58 - 5f 43 | 0,0,0,0,0,0,0,0, # 60 - 67 44 | 0,0,0,0,0,0,0,0, # 68 - 6f 45 | 0,0,0,0,0,0,0,0, # 70 - 77 46 | 0,0,0,4,0,5,2,0, # 78 - 7f 47 | 1,1,1,1,1,1,1,1, # 80 - 87 48 | 1,1,1,1,1,1,1,1, # 88 - 8f 49 | 1,1,1,1,1,1,1,1, # 90 - 97 50 | 1,1,1,1,1,1,1,1, # 98 - 9f 51 | 1,1,1,1,1,1,1,1, # a0 - a7 52 | 1,1,1,1,1,1,1,1, # a8 - af 53 | 1,1,1,1,1,1,1,1, # b0 - b7 54 | 1,1,1,1,1,1,1,1, # b8 - bf 55 | 1,1,1,1,1,1,1,1, # c0 - c7 56 | 1,1,1,1,1,1,1,1, # c8 - cf 57 | 1,1,1,1,1,1,1,1, # d0 - d7 58 | 1,1,1,1,1,1,1,1, # d8 - df 59 | 1,1,1,1,1,1,1,1, # e0 - e7 60 | 1,1,1,1,1,1,1,1, # e8 - ef 61 | 1,1,1,1,1,1,1,1, # f0 - f7 62 | 1,1,1,1,1,1,1,1, # f8 - ff 63 | ) 64 | 65 | HZ_st = ( 66 | eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07 67 | eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f 68 | eItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-17 69 | 5,eError, 6,eError, 5, 5, 4,eError,# 18-1f 70 | 4,eError, 4, 4, 4,eError, 4,eError,# 20-27 71 | 4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f 72 | ) 73 | 74 | HZCharLenTable = (0, 0, 0, 0, 0, 0) 75 | 76 | HZSMModel = {'classTable': HZ_cls, 77 | 'classFactor': 6, 78 | 'stateTable': HZ_st, 79 | 'charLenTable': HZCharLenTable, 80 | 'name': "HZ-GB-2312"} 81 | 82 | ISO2022CN_cls = ( 83 | 2,0,0,0,0,0,0,0, # 00 - 07 84 | 0,0,0,0,0,0,0,0, # 08 - 0f 85 | 0,0,0,0,0,0,0,0, # 10 - 17 86 | 0,0,0,1,0,0,0,0, # 18 - 1f 87 | 0,0,0,0,0,0,0,0, # 20 - 27 88 | 0,3,0,0,0,0,0,0, # 28 - 2f 89 | 0,0,0,0,0,0,0,0, # 30 - 37 90 | 0,0,0,0,0,0,0,0, # 38 - 3f 91 | 0,0,0,4,0,0,0,0, # 40 - 47 92 | 0,0,0,0,0,0,0,0, # 48 - 4f 93 | 0,0,0,0,0,0,0,0, # 50 - 57 94 | 0,0,0,0,0,0,0,0, # 58 - 5f 95 | 0,0,0,0,0,0,0,0, # 60 - 67 96 | 0,0,0,0,0,0,0,0, # 68 - 6f 97 | 0,0,0,0,0,0,0,0, # 70 - 77 98 | 0,0,0,0,0,0,0,0, # 78 - 7f 99 | 2,2,2,2,2,2,2,2, # 80 - 87 100 | 2,2,2,2,2,2,2,2, # 88 - 8f 101 | 2,2,2,2,2,2,2,2, # 90 - 97 102 | 2,2,2,2,2,2,2,2, # 98 - 9f 103 | 2,2,2,2,2,2,2,2, # a0 - a7 104 | 2,2,2,2,2,2,2,2, # a8 - af 105 | 2,2,2,2,2,2,2,2, # b0 - b7 106 | 2,2,2,2,2,2,2,2, # b8 - bf 107 | 2,2,2,2,2,2,2,2, # c0 - c7 108 | 2,2,2,2,2,2,2,2, # c8 - cf 109 | 2,2,2,2,2,2,2,2, # d0 - d7 110 | 2,2,2,2,2,2,2,2, # d8 - df 111 | 2,2,2,2,2,2,2,2, # e0 - e7 112 | 2,2,2,2,2,2,2,2, # e8 - ef 113 | 2,2,2,2,2,2,2,2, # f0 - f7 114 | 2,2,2,2,2,2,2,2, # f8 - ff 115 | ) 116 | 117 | ISO2022CN_st = ( 118 | eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07 119 | eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f 120 | eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17 121 | eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1f 122 | eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27 123 | 5, 6,eError,eError,eError,eError,eError,eError,# 28-2f 124 | eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37 125 | eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f 126 | ) 127 | 128 | ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0) 129 | 130 | ISO2022CNSMModel = {'classTable': ISO2022CN_cls, 131 | 'classFactor': 9, 132 | 'stateTable': ISO2022CN_st, 133 | 'charLenTable': ISO2022CNCharLenTable, 134 | 'name': "ISO-2022-CN"} 135 | 136 | ISO2022JP_cls = ( 137 | 2,0,0,0,0,0,0,0, # 00 - 07 138 | 0,0,0,0,0,0,2,2, # 08 - 0f 139 | 0,0,0,0,0,0,0,0, # 10 - 17 140 | 0,0,0,1,0,0,0,0, # 18 - 1f 141 | 0,0,0,0,7,0,0,0, # 20 - 27 142 | 3,0,0,0,0,0,0,0, # 28 - 2f 143 | 0,0,0,0,0,0,0,0, # 30 - 37 144 | 0,0,0,0,0,0,0,0, # 38 - 3f 145 | 6,0,4,0,8,0,0,0, # 40 - 47 146 | 0,9,5,0,0,0,0,0, # 48 - 4f 147 | 0,0,0,0,0,0,0,0, # 50 - 57 148 | 0,0,0,0,0,0,0,0, # 58 - 5f 149 | 0,0,0,0,0,0,0,0, # 60 - 67 150 | 0,0,0,0,0,0,0,0, # 68 - 6f 151 | 0,0,0,0,0,0,0,0, # 70 - 77 152 | 0,0,0,0,0,0,0,0, # 78 - 7f 153 | 2,2,2,2,2,2,2,2, # 80 - 87 154 | 2,2,2,2,2,2,2,2, # 88 - 8f 155 | 2,2,2,2,2,2,2,2, # 90 - 97 156 | 2,2,2,2,2,2,2,2, # 98 - 9f 157 | 2,2,2,2,2,2,2,2, # a0 - a7 158 | 2,2,2,2,2,2,2,2, # a8 - af 159 | 2,2,2,2,2,2,2,2, # b0 - b7 160 | 2,2,2,2,2,2,2,2, # b8 - bf 161 | 2,2,2,2,2,2,2,2, # c0 - c7 162 | 2,2,2,2,2,2,2,2, # c8 - cf 163 | 2,2,2,2,2,2,2,2, # d0 - d7 164 | 2,2,2,2,2,2,2,2, # d8 - df 165 | 2,2,2,2,2,2,2,2, # e0 - e7 166 | 2,2,2,2,2,2,2,2, # e8 - ef 167 | 2,2,2,2,2,2,2,2, # f0 - f7 168 | 2,2,2,2,2,2,2,2, # f8 - ff 169 | ) 170 | 171 | ISO2022JP_st = ( 172 | eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07 173 | eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f 174 | eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17 175 | eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f 176 | eError, 5,eError,eError,eError, 4,eError,eError,# 20-27 177 | eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2f 178 | eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37 179 | eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f 180 | eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47 181 | ) 182 | 183 | ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) 184 | 185 | ISO2022JPSMModel = {'classTable': ISO2022JP_cls, 186 | 'classFactor': 10, 187 | 'stateTable': ISO2022JP_st, 188 | 'charLenTable': ISO2022JPCharLenTable, 189 | 'name': "ISO-2022-JP"} 190 | 191 | ISO2022KR_cls = ( 192 | 2,0,0,0,0,0,0,0, # 00 - 07 193 | 0,0,0,0,0,0,0,0, # 08 - 0f 194 | 0,0,0,0,0,0,0,0, # 10 - 17 195 | 0,0,0,1,0,0,0,0, # 18 - 1f 196 | 0,0,0,0,3,0,0,0, # 20 - 27 197 | 0,4,0,0,0,0,0,0, # 28 - 2f 198 | 0,0,0,0,0,0,0,0, # 30 - 37 199 | 0,0,0,0,0,0,0,0, # 38 - 3f 200 | 0,0,0,5,0,0,0,0, # 40 - 47 201 | 0,0,0,0,0,0,0,0, # 48 - 4f 202 | 0,0,0,0,0,0,0,0, # 50 - 57 203 | 0,0,0,0,0,0,0,0, # 58 - 5f 204 | 0,0,0,0,0,0,0,0, # 60 - 67 205 | 0,0,0,0,0,0,0,0, # 68 - 6f 206 | 0,0,0,0,0,0,0,0, # 70 - 77 207 | 0,0,0,0,0,0,0,0, # 78 - 7f 208 | 2,2,2,2,2,2,2,2, # 80 - 87 209 | 2,2,2,2,2,2,2,2, # 88 - 8f 210 | 2,2,2,2,2,2,2,2, # 90 - 97 211 | 2,2,2,2,2,2,2,2, # 98 - 9f 212 | 2,2,2,2,2,2,2,2, # a0 - a7 213 | 2,2,2,2,2,2,2,2, # a8 - af 214 | 2,2,2,2,2,2,2,2, # b0 - b7 215 | 2,2,2,2,2,2,2,2, # b8 - bf 216 | 2,2,2,2,2,2,2,2, # c0 - c7 217 | 2,2,2,2,2,2,2,2, # c8 - cf 218 | 2,2,2,2,2,2,2,2, # d0 - d7 219 | 2,2,2,2,2,2,2,2, # d8 - df 220 | 2,2,2,2,2,2,2,2, # e0 - e7 221 | 2,2,2,2,2,2,2,2, # e8 - ef 222 | 2,2,2,2,2,2,2,2, # f0 - f7 223 | 2,2,2,2,2,2,2,2, # f8 - ff 224 | ) 225 | 226 | ISO2022KR_st = ( 227 | eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07 228 | eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f 229 | eItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17 230 | eError,eError,eError,eError, 5,eError,eError,eError,# 18-1f 231 | eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27 232 | ) 233 | 234 | ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0) 235 | 236 | ISO2022KRSMModel = {'classTable': ISO2022KR_cls, 237 | 'classFactor': 6, 238 | 'stateTable': ISO2022KR_st, 239 | 'charLenTable': ISO2022KRCharLenTable, 240 | 'name': "ISO-2022-KR"} 241 | 242 | # flake8: noqa 243 | -------------------------------------------------------------------------------- /blender/aiohttp/web_ws.py: -------------------------------------------------------------------------------- 1 | __all__ = ('WebSocketResponse', 'MsgType') 2 | 3 | import asyncio 4 | import warnings 5 | 6 | from . import hdrs 7 | from .errors import HttpProcessingError, ClientDisconnectedError 8 | from .websocket import do_handshake, Message, WebSocketError 9 | from .websocket_client import MsgType, closedMessage 10 | from .web_exceptions import ( 11 | HTTPBadRequest, HTTPMethodNotAllowed, HTTPInternalServerError) 12 | from .web_reqrep import StreamResponse 13 | 14 | THRESHOLD_CONNLOST_ACCESS = 5 15 | 16 | 17 | class WebSocketResponse(StreamResponse): 18 | 19 | def __init__(self, *, 20 | timeout=10.0, autoclose=True, autoping=True, protocols=()): 21 | super().__init__(status=101) 22 | self._protocols = protocols 23 | self._protocol = None 24 | self._writer = None 25 | self._reader = None 26 | self._closed = False 27 | self._closing = False 28 | self._conn_lost = 0 29 | self._close_code = None 30 | self._loop = None 31 | self._waiting = False 32 | self._exception = None 33 | self._timeout = timeout 34 | self._autoclose = autoclose 35 | self._autoping = autoping 36 | 37 | def start(self, request): 38 | # make pre-check to don't hide it by do_handshake() exceptions 39 | resp_impl = self._start_pre_check(request) 40 | if resp_impl is not None: 41 | return resp_impl 42 | 43 | try: 44 | status, headers, parser, writer, protocol = do_handshake( 45 | request.method, request.headers, request.transport, 46 | self._protocols) 47 | except HttpProcessingError as err: 48 | if err.code == 405: 49 | raise HTTPMethodNotAllowed( 50 | request.method, [hdrs.METH_GET], body=b'') 51 | elif err.code == 400: 52 | raise HTTPBadRequest(text=err.message, headers=err.headers) 53 | else: # pragma: no cover 54 | raise HTTPInternalServerError() from err 55 | 56 | if self.status != status: 57 | self.set_status(status) 58 | for k, v in headers: 59 | self.headers[k] = v 60 | self.force_close() 61 | 62 | resp_impl = super().start(request) 63 | 64 | self._reader = request._reader.set_parser(parser) 65 | self._writer = writer 66 | self._protocol = protocol 67 | self._loop = request.app.loop 68 | 69 | return resp_impl 70 | 71 | def can_start(self, request): 72 | if self._writer is not None: 73 | raise RuntimeError('Already started') 74 | try: 75 | _, _, _, _, protocol = do_handshake( 76 | request.method, request.headers, request.transport, 77 | self._protocols) 78 | except HttpProcessingError: 79 | return False, None 80 | else: 81 | return True, protocol 82 | 83 | @property 84 | def closed(self): 85 | return self._closed 86 | 87 | @property 88 | def close_code(self): 89 | return self._close_code 90 | 91 | @property 92 | def protocol(self): 93 | return self._protocol 94 | 95 | def exception(self): 96 | return self._exception 97 | 98 | def ping(self, message='b'): 99 | if self._writer is None: 100 | raise RuntimeError('Call .start() first') 101 | if self._closed: 102 | raise RuntimeError('websocket connection is closing') 103 | self._writer.ping(message) 104 | 105 | def pong(self, message='b'): 106 | # unsolicited pong 107 | if self._writer is None: 108 | raise RuntimeError('Call .start() first') 109 | if self._closed: 110 | raise RuntimeError('websocket connection is closing') 111 | self._writer.pong(message) 112 | 113 | def send_str(self, data): 114 | if self._writer is None: 115 | raise RuntimeError('Call .start() first') 116 | if self._closed: 117 | raise RuntimeError('websocket connection is closing') 118 | if not isinstance(data, str): 119 | raise TypeError('data argument must be str (%r)' % type(data)) 120 | self._writer.send(data, binary=False) 121 | 122 | def send_bytes(self, data): 123 | if self._writer is None: 124 | raise RuntimeError('Call .start() first') 125 | if self._closed: 126 | raise RuntimeError('websocket connection is closing') 127 | if not isinstance(data, (bytes, bytearray, memoryview)): 128 | raise TypeError('data argument must be byte-ish (%r)' % 129 | type(data)) 130 | self._writer.send(data, binary=True) 131 | 132 | @asyncio.coroutine 133 | def wait_closed(self): # pragma: no cover 134 | warnings.warn( 135 | 'wait_closed() coroutine is deprecated. use close() instead', 136 | DeprecationWarning) 137 | 138 | return (yield from self.close()) 139 | 140 | @asyncio.coroutine 141 | def write_eof(self): 142 | if self._eof_sent: 143 | return 144 | if self._resp_impl is None: 145 | raise RuntimeError("Response has not been started") 146 | 147 | yield from self.close() 148 | self._eof_sent = True 149 | 150 | @asyncio.coroutine 151 | def close(self, *, code=1000, message=b''): 152 | if self._writer is None: 153 | raise RuntimeError('Call .start() first') 154 | 155 | if not self._closed: 156 | self._closed = True 157 | try: 158 | self._writer.close(code, message) 159 | except (asyncio.CancelledError, asyncio.TimeoutError): 160 | self._close_code = 1006 161 | raise 162 | except Exception as exc: 163 | self._close_code = 1006 164 | self._exception = exc 165 | return True 166 | 167 | if self._closing: 168 | return True 169 | 170 | while True: 171 | try: 172 | msg = yield from asyncio.wait_for( 173 | self._reader.read(), 174 | timeout=self._timeout, loop=self._loop) 175 | except asyncio.CancelledError: 176 | self._close_code = 1006 177 | raise 178 | except Exception as exc: 179 | self._close_code = 1006 180 | self._exception = exc 181 | return True 182 | 183 | if msg.tp == MsgType.close: 184 | self._close_code = msg.data 185 | return True 186 | else: 187 | return False 188 | 189 | @asyncio.coroutine 190 | def receive(self): 191 | if self._reader is None: 192 | raise RuntimeError('Call .start() first') 193 | if self._waiting: 194 | raise RuntimeError('Concurrent call to receive() is not allowed') 195 | 196 | self._waiting = True 197 | try: 198 | while True: 199 | if self._closed: 200 | self._conn_lost += 1 201 | if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS: 202 | raise RuntimeError('WebSocket connection is closed.') 203 | return closedMessage 204 | 205 | try: 206 | msg = yield from self._reader.read() 207 | except (asyncio.CancelledError, asyncio.TimeoutError): 208 | raise 209 | except WebSocketError as exc: 210 | self._close_code = exc.code 211 | yield from self.close(code=exc.code) 212 | return Message(MsgType.error, exc, None) 213 | except ClientDisconnectedError: 214 | self._closed = True 215 | self._close_code = 1006 216 | return Message(MsgType.close, None, None) 217 | except Exception as exc: 218 | self._exception = exc 219 | self._closing = True 220 | self._close_code = 1006 221 | yield from self.close() 222 | return Message(MsgType.error, exc, None) 223 | 224 | if msg.tp == MsgType.close: 225 | self._closing = True 226 | self._close_code = msg.data 227 | if not self._closed and self._autoclose: 228 | yield from self.close() 229 | return msg 230 | elif not self._closed: 231 | if msg.tp == MsgType.ping and self._autoping: 232 | self._writer.pong(msg.data) 233 | elif msg.tp == MsgType.pong and self._autoping: 234 | continue 235 | else: 236 | return msg 237 | finally: 238 | self._waiting = False 239 | 240 | @asyncio.coroutine 241 | def receive_msg(self): # pragma: no cover 242 | warnings.warn( 243 | 'receive_msg() coroutine is deprecated. use receive() instead', 244 | DeprecationWarning) 245 | return (yield from self.receive()) 246 | 247 | @asyncio.coroutine 248 | def receive_str(self): 249 | msg = yield from self.receive() 250 | if msg.tp != MsgType.text: 251 | raise TypeError( 252 | "Received message {}:{!r} is not str".format(msg.tp, msg.data)) 253 | return msg.data 254 | 255 | @asyncio.coroutine 256 | def receive_bytes(self): 257 | msg = yield from self.receive() 258 | if msg.tp != MsgType.binary: 259 | raise TypeError( 260 | "Received message {}:{!r} is not bytes".format(msg.tp, 261 | msg.data)) 262 | return msg.data 263 | 264 | def write(self, data): 265 | raise RuntimeError("Cannot call .write() for websocket") 266 | -------------------------------------------------------------------------------- /blender/chardet/chardistribution.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Communicator client code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE, 29 | EUCTW_TYPICAL_DISTRIBUTION_RATIO) 30 | from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE, 31 | EUCKR_TYPICAL_DISTRIBUTION_RATIO) 32 | from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE, 33 | GB2312_TYPICAL_DISTRIBUTION_RATIO) 34 | from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE, 35 | BIG5_TYPICAL_DISTRIBUTION_RATIO) 36 | from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE, 37 | JIS_TYPICAL_DISTRIBUTION_RATIO) 38 | from .compat import wrap_ord 39 | 40 | ENOUGH_DATA_THRESHOLD = 1024 41 | SURE_YES = 0.99 42 | SURE_NO = 0.01 43 | MINIMUM_DATA_THRESHOLD = 3 44 | 45 | 46 | class CharDistributionAnalysis: 47 | def __init__(self): 48 | # Mapping table to get frequency order from char order (get from 49 | # GetOrder()) 50 | self._mCharToFreqOrder = None 51 | self._mTableSize = None # Size of above table 52 | # This is a constant value which varies from language to language, 53 | # used in calculating confidence. See 54 | # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html 55 | # for further detail. 56 | self._mTypicalDistributionRatio = None 57 | self.reset() 58 | 59 | def reset(self): 60 | """reset analyser, clear any state""" 61 | # If this flag is set to True, detection is done and conclusion has 62 | # been made 63 | self._mDone = False 64 | self._mTotalChars = 0 # Total characters encountered 65 | # The number of characters whose frequency order is less than 512 66 | self._mFreqChars = 0 67 | 68 | def feed(self, aBuf, aCharLen): 69 | """feed a character with known length""" 70 | if aCharLen == 2: 71 | # we only care about 2-bytes character in our distribution analysis 72 | order = self.get_order(aBuf) 73 | else: 74 | order = -1 75 | if order >= 0: 76 | self._mTotalChars += 1 77 | # order is valid 78 | if order < self._mTableSize: 79 | if 512 > self._mCharToFreqOrder[order]: 80 | self._mFreqChars += 1 81 | 82 | def get_confidence(self): 83 | """return confidence based on existing data""" 84 | # if we didn't receive any character in our consideration range, 85 | # return negative answer 86 | if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD: 87 | return SURE_NO 88 | 89 | if self._mTotalChars != self._mFreqChars: 90 | r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars) 91 | * self._mTypicalDistributionRatio)) 92 | if r < SURE_YES: 93 | return r 94 | 95 | # normalize confidence (we don't want to be 100% sure) 96 | return SURE_YES 97 | 98 | def got_enough_data(self): 99 | # It is not necessary to receive all data to draw conclusion. 100 | # For charset detection, certain amount of data is enough 101 | return self._mTotalChars > ENOUGH_DATA_THRESHOLD 102 | 103 | def get_order(self, aBuf): 104 | # We do not handle characters based on the original encoding string, 105 | # but convert this encoding string to a number, here called order. 106 | # This allows multiple encodings of a language to share one frequency 107 | # table. 108 | return -1 109 | 110 | 111 | class EUCTWDistributionAnalysis(CharDistributionAnalysis): 112 | def __init__(self): 113 | CharDistributionAnalysis.__init__(self) 114 | self._mCharToFreqOrder = EUCTWCharToFreqOrder 115 | self._mTableSize = EUCTW_TABLE_SIZE 116 | self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO 117 | 118 | def get_order(self, aBuf): 119 | # for euc-TW encoding, we are interested 120 | # first byte range: 0xc4 -- 0xfe 121 | # second byte range: 0xa1 -- 0xfe 122 | # no validation needed here. State machine has done that 123 | first_char = wrap_ord(aBuf[0]) 124 | if first_char >= 0xC4: 125 | return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1 126 | else: 127 | return -1 128 | 129 | 130 | class EUCKRDistributionAnalysis(CharDistributionAnalysis): 131 | def __init__(self): 132 | CharDistributionAnalysis.__init__(self) 133 | self._mCharToFreqOrder = EUCKRCharToFreqOrder 134 | self._mTableSize = EUCKR_TABLE_SIZE 135 | self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO 136 | 137 | def get_order(self, aBuf): 138 | # for euc-KR encoding, we are interested 139 | # first byte range: 0xb0 -- 0xfe 140 | # second byte range: 0xa1 -- 0xfe 141 | # no validation needed here. State machine has done that 142 | first_char = wrap_ord(aBuf[0]) 143 | if first_char >= 0xB0: 144 | return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1 145 | else: 146 | return -1 147 | 148 | 149 | class GB2312DistributionAnalysis(CharDistributionAnalysis): 150 | def __init__(self): 151 | CharDistributionAnalysis.__init__(self) 152 | self._mCharToFreqOrder = GB2312CharToFreqOrder 153 | self._mTableSize = GB2312_TABLE_SIZE 154 | self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO 155 | 156 | def get_order(self, aBuf): 157 | # for GB2312 encoding, we are interested 158 | # first byte range: 0xb0 -- 0xfe 159 | # second byte range: 0xa1 -- 0xfe 160 | # no validation needed here. State machine has done that 161 | first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1]) 162 | if (first_char >= 0xB0) and (second_char >= 0xA1): 163 | return 94 * (first_char - 0xB0) + second_char - 0xA1 164 | else: 165 | return -1 166 | 167 | 168 | class Big5DistributionAnalysis(CharDistributionAnalysis): 169 | def __init__(self): 170 | CharDistributionAnalysis.__init__(self) 171 | self._mCharToFreqOrder = Big5CharToFreqOrder 172 | self._mTableSize = BIG5_TABLE_SIZE 173 | self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO 174 | 175 | def get_order(self, aBuf): 176 | # for big5 encoding, we are interested 177 | # first byte range: 0xa4 -- 0xfe 178 | # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe 179 | # no validation needed here. State machine has done that 180 | first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1]) 181 | if first_char >= 0xA4: 182 | if second_char >= 0xA1: 183 | return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 184 | else: 185 | return 157 * (first_char - 0xA4) + second_char - 0x40 186 | else: 187 | return -1 188 | 189 | 190 | class SJISDistributionAnalysis(CharDistributionAnalysis): 191 | def __init__(self): 192 | CharDistributionAnalysis.__init__(self) 193 | self._mCharToFreqOrder = JISCharToFreqOrder 194 | self._mTableSize = JIS_TABLE_SIZE 195 | self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO 196 | 197 | def get_order(self, aBuf): 198 | # for sjis encoding, we are interested 199 | # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe 200 | # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe 201 | # no validation needed here. State machine has done that 202 | first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1]) 203 | if (first_char >= 0x81) and (first_char <= 0x9F): 204 | order = 188 * (first_char - 0x81) 205 | elif (first_char >= 0xE0) and (first_char <= 0xEF): 206 | order = 188 * (first_char - 0xE0 + 31) 207 | else: 208 | return -1 209 | order = order + second_char - 0x40 210 | if second_char > 0x7F: 211 | order = -1 212 | return order 213 | 214 | 215 | class EUCJPDistributionAnalysis(CharDistributionAnalysis): 216 | def __init__(self): 217 | CharDistributionAnalysis.__init__(self) 218 | self._mCharToFreqOrder = JISCharToFreqOrder 219 | self._mTableSize = JIS_TABLE_SIZE 220 | self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO 221 | 222 | def get_order(self, aBuf): 223 | # for euc-JP encoding, we are interested 224 | # first byte range: 0xa0 -- 0xfe 225 | # second byte range: 0xa1 -- 0xfe 226 | # no validation needed here. State machine has done that 227 | char = wrap_ord(aBuf[0]) 228 | if char >= 0xA0: 229 | return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1 230 | else: 231 | return -1 232 | -------------------------------------------------------------------------------- /blender/aiohttp/test_utils.py: -------------------------------------------------------------------------------- 1 | """Utilities shared by tests.""" 2 | 3 | import cgi 4 | import contextlib 5 | import gc 6 | import email.parser 7 | import http.server 8 | import json 9 | import logging 10 | import io 11 | import os 12 | import re 13 | import ssl 14 | import sys 15 | import threading 16 | import traceback 17 | import urllib.parse 18 | 19 | import asyncio 20 | import aiohttp 21 | from aiohttp import server 22 | from aiohttp import helpers 23 | 24 | 25 | def run_briefly(loop): 26 | @asyncio.coroutine 27 | def once(): 28 | pass 29 | t = asyncio.Task(once(), loop=loop) 30 | loop.run_until_complete(t) 31 | 32 | 33 | @contextlib.contextmanager 34 | def run_server(loop, *, listen_addr=('127.0.0.1', 0), 35 | use_ssl=False, router=None): 36 | properties = {} 37 | transports = [] 38 | 39 | class HttpRequestHandler: 40 | 41 | def __init__(self, addr): 42 | if isinstance(addr, tuple): 43 | host, port = addr 44 | self.host = host 45 | self.port = port 46 | else: 47 | self.host = host = 'localhost' 48 | self.port = port = 0 49 | self.address = addr 50 | self._url = '{}://{}:{}'.format( 51 | 'https' if use_ssl else 'http', host, port) 52 | 53 | def __getitem__(self, key): 54 | return properties[key] 55 | 56 | def __setitem__(self, key, value): 57 | properties[key] = value 58 | 59 | def url(self, *suffix): 60 | return urllib.parse.urljoin( 61 | self._url, '/'.join(str(s) for s in suffix)) 62 | 63 | class TestHttpServer(server.ServerHttpProtocol): 64 | 65 | def connection_made(self, transport): 66 | transports.append(transport) 67 | 68 | super().connection_made(transport) 69 | 70 | def handle_request(self, message, payload): 71 | if properties.get('close', False): 72 | return 73 | 74 | if properties.get('noresponse', False): 75 | yield from asyncio.sleep(99999) 76 | 77 | for hdr, val in message.headers.items(): 78 | if (hdr == 'EXPECT') and (val == '100-continue'): 79 | self.transport.write(b'HTTP/1.0 100 Continue\r\n\r\n') 80 | break 81 | 82 | if router is not None: 83 | body = yield from payload.read() 84 | 85 | rob = router( 86 | self, properties, self.transport, message, body) 87 | rob.dispatch() 88 | 89 | else: 90 | response = aiohttp.Response(self.writer, 200, message.version) 91 | 92 | text = b'Test message' 93 | response.add_header('Content-type', 'text/plain') 94 | response.add_header('Content-length', str(len(text))) 95 | response.send_headers() 96 | response.write(text) 97 | response.write_eof() 98 | 99 | if use_ssl: 100 | here = os.path.join(os.path.dirname(__file__), '..', 'tests') 101 | keyfile = os.path.join(here, 'sample.key') 102 | certfile = os.path.join(here, 'sample.crt') 103 | sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) 104 | sslcontext.load_cert_chain(certfile, keyfile) 105 | else: 106 | sslcontext = None 107 | 108 | def run(loop, fut): 109 | thread_loop = asyncio.new_event_loop() 110 | asyncio.set_event_loop(thread_loop) 111 | 112 | if isinstance(listen_addr, tuple): 113 | host, port = listen_addr 114 | server_coroutine = thread_loop.create_server( 115 | lambda: TestHttpServer(keep_alive=0.5), 116 | host, port, ssl=sslcontext) 117 | else: 118 | try: 119 | os.unlink(listen_addr) 120 | except FileNotFoundError: 121 | pass 122 | server_coroutine = thread_loop.create_unix_server( 123 | lambda: TestHttpServer(keep_alive=0.5, timeout=15), 124 | listen_addr, ssl=sslcontext) 125 | server = thread_loop.run_until_complete(server_coroutine) 126 | 127 | waiter = asyncio.Future(loop=thread_loop) 128 | loop.call_soon_threadsafe( 129 | fut.set_result, (thread_loop, waiter, 130 | server.sockets[0].getsockname())) 131 | 132 | try: 133 | thread_loop.run_until_complete(waiter) 134 | finally: 135 | # call pending connection_made if present 136 | run_briefly(thread_loop) 137 | 138 | # close opened transports 139 | for tr in transports: 140 | tr.close() 141 | 142 | run_briefly(thread_loop) # call close callbacks 143 | 144 | server.close() 145 | thread_loop.stop() 146 | thread_loop.close() 147 | gc.collect() 148 | 149 | fut = asyncio.Future(loop=loop) 150 | server_thread = threading.Thread(target=run, args=(loop, fut)) 151 | server_thread.start() 152 | 153 | thread_loop, waiter, addr = loop.run_until_complete(fut) 154 | try: 155 | yield HttpRequestHandler(addr) 156 | finally: 157 | thread_loop.call_soon_threadsafe(waiter.set_result, None) 158 | server_thread.join() 159 | 160 | 161 | class Router: 162 | 163 | _response_version = "1.1" 164 | _responses = http.server.BaseHTTPRequestHandler.responses 165 | 166 | def __init__(self, srv, props, transport, message, payload): 167 | # headers 168 | self._headers = http.client.HTTPMessage() 169 | for hdr, val in message.headers.items(): 170 | self._headers.add_header(hdr, val) 171 | 172 | self._srv = srv 173 | self._props = props 174 | self._transport = transport 175 | self._method = message.method 176 | self._uri = message.path 177 | self._version = message.version 178 | self._compression = message.compression 179 | self._body = payload 180 | 181 | url = urllib.parse.urlsplit(self._uri) 182 | self._path = url.path 183 | self._query = url.query 184 | 185 | @staticmethod 186 | def define(rmatch): 187 | def wrapper(fn): 188 | f_locals = sys._getframe(1).f_locals 189 | mapping = f_locals.setdefault('_mapping', []) 190 | mapping.append((re.compile(rmatch), fn.__name__)) 191 | return fn 192 | 193 | return wrapper 194 | 195 | def dispatch(self): # pragma: no cover 196 | for route, fn in self._mapping: 197 | match = route.match(self._path) 198 | if match is not None: 199 | try: 200 | return getattr(self, fn)(match) 201 | except Exception: 202 | out = io.StringIO() 203 | traceback.print_exc(file=out) 204 | self._response(500, out.getvalue()) 205 | 206 | return 207 | 208 | return self._response(self._start_response(404)) 209 | 210 | def _start_response(self, code): 211 | return aiohttp.Response(self._srv.writer, code) 212 | 213 | def _response(self, response, body=None, 214 | headers=None, chunked=False, write_body=None): 215 | r_headers = {} 216 | for key, val in self._headers.items(): 217 | key = '-'.join(p.capitalize() for p in key.split('-')) 218 | r_headers[key] = val 219 | 220 | encoding = self._headers.get('content-encoding', '').lower() 221 | if 'gzip' in encoding: # pragma: no cover 222 | cmod = 'gzip' 223 | elif 'deflate' in encoding: 224 | cmod = 'deflate' 225 | else: 226 | cmod = '' 227 | 228 | resp = { 229 | 'method': self._method, 230 | 'version': '%s.%s' % self._version, 231 | 'path': self._uri, 232 | 'headers': r_headers, 233 | 'origin': self._transport.get_extra_info('addr', ' ')[0], 234 | 'query': self._query, 235 | 'form': {}, 236 | 'compression': cmod, 237 | 'multipart-data': [] 238 | } 239 | if body: # pragma: no cover 240 | resp['content'] = body 241 | else: 242 | resp['content'] = self._body.decode('utf-8', 'ignore') 243 | 244 | ct = self._headers.get('content-type', '').lower() 245 | 246 | # application/x-www-form-urlencoded 247 | if ct == 'application/x-www-form-urlencoded': 248 | resp['form'] = urllib.parse.parse_qs(self._body.decode('latin1')) 249 | 250 | # multipart/form-data 251 | elif ct.startswith('multipart/form-data'): # pragma: no cover 252 | out = io.BytesIO() 253 | for key, val in self._headers.items(): 254 | out.write(bytes('{}: {}\r\n'.format(key, val), 'latin1')) 255 | 256 | out.write(b'\r\n') 257 | out.write(self._body) 258 | out.write(b'\r\n') 259 | out.seek(0) 260 | 261 | message = email.parser.BytesParser().parse(out) 262 | if message.is_multipart(): 263 | for msg in message.get_payload(): 264 | if msg.is_multipart(): 265 | logging.warning('multipart msg is not expected') 266 | else: 267 | key, params = cgi.parse_header( 268 | msg.get('content-disposition', '')) 269 | params['data'] = msg.get_payload() 270 | params['content-type'] = msg.get_content_type() 271 | cte = msg.get('content-transfer-encoding') 272 | if cte is not None: 273 | resp['content-transfer-encoding'] = cte 274 | resp['multipart-data'].append(params) 275 | body = json.dumps(resp, indent=4, sort_keys=True) 276 | 277 | # default headers 278 | hdrs = [('Connection', 'close'), 279 | ('Content-Type', 'application/json')] 280 | if chunked: 281 | hdrs.append(('Transfer-Encoding', 'chunked')) 282 | else: 283 | hdrs.append(('Content-Length', str(len(body)))) 284 | 285 | # extra headers 286 | if headers: 287 | hdrs.extend(headers.items()) 288 | 289 | if chunked: 290 | response.enable_chunked_encoding() 291 | 292 | # headers 293 | response.add_headers(*hdrs) 294 | response.send_headers() 295 | 296 | # write payload 297 | if write_body: 298 | try: 299 | write_body(response, body) 300 | except: 301 | return 302 | else: 303 | response.write(helpers.str_to_bytes(body)) 304 | 305 | response.write_eof() 306 | 307 | # keep-alive 308 | if response.keep_alive(): 309 | self._srv.keep_alive(True) 310 | -------------------------------------------------------------------------------- /blender/aiohttp/helpers.py: -------------------------------------------------------------------------------- 1 | """Various helper functions""" 2 | import base64 3 | import io 4 | import os 5 | import urllib.parse 6 | from collections import namedtuple 7 | from wsgiref.handlers import format_date_time 8 | 9 | from . import hdrs, multidict 10 | 11 | __all__ = ('BasicAuth', 'FormData', 'parse_mimetype') 12 | 13 | 14 | class BasicAuth(namedtuple('BasicAuth', ['login', 'password', 'encoding'])): 15 | """Http basic authentication helper. 16 | 17 | :param str login: Login 18 | :param str password: Password 19 | :param str encoding: (optional) encoding ('latin1' by default) 20 | """ 21 | 22 | def __new__(cls, login, password='', encoding='latin1'): 23 | if login is None: 24 | raise ValueError('None is not allowed as login value') 25 | 26 | if password is None: 27 | raise ValueError('None is not allowed as password value') 28 | 29 | return super().__new__(cls, login, password, encoding) 30 | 31 | def encode(self): 32 | """Encode credentials.""" 33 | creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding) 34 | return 'Basic %s' % base64.b64encode(creds).decode(self.encoding) 35 | 36 | 37 | class FormData: 38 | """Helper class for multipart/form-data and 39 | application/x-www-form-urlencoded body generation.""" 40 | 41 | def __init__(self, fields=()): 42 | from . import multipart 43 | self._writer = multipart.MultipartWriter('form-data') 44 | self._fields = [] 45 | self._is_multipart = False 46 | 47 | if isinstance(fields, dict): 48 | fields = list(fields.items()) 49 | elif not isinstance(fields, (list, tuple)): 50 | fields = (fields,) 51 | self.add_fields(*fields) 52 | 53 | @property 54 | def is_multipart(self): 55 | return self._is_multipart 56 | 57 | @property 58 | def content_type(self): 59 | if self._is_multipart: 60 | return self._writer.headers[hdrs.CONTENT_TYPE] 61 | else: 62 | return 'application/x-www-form-urlencoded' 63 | 64 | def add_field(self, name, value, *, content_type=None, filename=None, 65 | content_transfer_encoding=None): 66 | 67 | if isinstance(value, io.IOBase): 68 | self._is_multipart = True 69 | elif isinstance(value, (bytes, bytearray, memoryview)): 70 | if filename is None and content_transfer_encoding is None: 71 | filename = name 72 | 73 | type_options = multidict.MultiDict({'name': name}) 74 | if filename is not None and not isinstance(filename, str): 75 | raise TypeError('filename must be an instance of str. ' 76 | 'Got: %s' % filename) 77 | if filename is None and isinstance(value, io.IOBase): 78 | filename = guess_filename(value, name) 79 | if filename is not None: 80 | type_options['filename'] = filename 81 | self._is_multipart = True 82 | 83 | headers = {} 84 | if content_type is not None: 85 | if not isinstance(content_type, str): 86 | raise TypeError('content_type must be an instance of str. ' 87 | 'Got: %s' % content_type) 88 | headers[hdrs.CONTENT_TYPE] = content_type 89 | self._is_multipart = True 90 | if content_transfer_encoding is not None: 91 | if not isinstance(content_transfer_encoding, str): 92 | raise TypeError('content_transfer_encoding must be an instance' 93 | ' of str. Got: %s' % content_transfer_encoding) 94 | headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding 95 | self._is_multipart = True 96 | 97 | self._fields.append((type_options, headers, value)) 98 | 99 | def add_fields(self, *fields): 100 | to_add = list(fields) 101 | 102 | while to_add: 103 | rec = to_add.pop(0) 104 | 105 | if isinstance(rec, io.IOBase): 106 | k = guess_filename(rec, 'unknown') 107 | self.add_field(k, rec) 108 | 109 | elif isinstance(rec, 110 | (multidict.MultiDictProxy, 111 | multidict.MultiDict)): 112 | to_add.extend(rec.items()) 113 | 114 | elif isinstance(rec, (list, tuple)) and len(rec) == 2: 115 | k, fp = rec 116 | self.add_field(k, fp) 117 | 118 | else: 119 | raise TypeError('Only io.IOBase, multidict and (name, file) ' 120 | 'pairs allowed, use .add_field() for passing ' 121 | 'more complex parameters') 122 | 123 | def _gen_form_urlencoded(self, encoding): 124 | # form data (x-www-form-urlencoded) 125 | data = [] 126 | for type_options, _, value in self._fields: 127 | data.append((type_options['name'], value)) 128 | 129 | data = urllib.parse.urlencode(data, doseq=True) 130 | return data.encode(encoding) 131 | 132 | def _gen_form_data(self, *args, **kwargs): 133 | """Encode a list of fields using the multipart/form-data MIME format""" 134 | for dispparams, headers, value in self._fields: 135 | part = self._writer.append(value, headers) 136 | if dispparams: 137 | part.set_content_disposition('form-data', **dispparams) 138 | # FIXME cgi.FieldStorage doesn't likes body parts with 139 | # Content-Length which were sent via chunked transfer encoding 140 | part.headers.pop(hdrs.CONTENT_LENGTH, None) 141 | yield from self._writer.serialize() 142 | 143 | def __call__(self, encoding): 144 | if self._is_multipart: 145 | return self._gen_form_data(encoding) 146 | else: 147 | return self._gen_form_urlencoded(encoding) 148 | 149 | 150 | def parse_mimetype(mimetype): 151 | """Parses a MIME type into its components. 152 | 153 | :param str mimetype: MIME type 154 | 155 | :returns: 4 element tuple for MIME type, subtype, suffix and parameters 156 | :rtype: tuple 157 | 158 | Example: 159 | 160 | >>> parse_mimetype('text/html; charset=utf-8') 161 | ('text', 'html', '', {'charset': 'utf-8'}) 162 | 163 | """ 164 | if not mimetype: 165 | return '', '', '', {} 166 | 167 | parts = mimetype.split(';') 168 | params = [] 169 | for item in parts[1:]: 170 | if not item: 171 | continue 172 | key, value = item.split('=', 1) if '=' in item else (item, '') 173 | params.append((key.lower().strip(), value.strip(' "'))) 174 | params = dict(params) 175 | 176 | fulltype = parts[0].strip().lower() 177 | if fulltype == '*': 178 | fulltype = '*/*' 179 | 180 | mtype, stype = fulltype.split('/', 1) \ 181 | if '/' in fulltype else (fulltype, '') 182 | stype, suffix = stype.split('+', 1) if '+' in stype else (stype, '') 183 | 184 | return mtype, stype, suffix, params 185 | 186 | 187 | def str_to_bytes(s, encoding='utf-8'): 188 | if isinstance(s, str): 189 | return s.encode(encoding) 190 | return s 191 | 192 | 193 | def guess_filename(obj, default=None): 194 | name = getattr(obj, 'name', None) 195 | if name and name[0] != '<' and name[-1] != '>': 196 | return os.path.split(name)[-1] 197 | return default 198 | 199 | 200 | def parse_remote_addr(forward): 201 | if isinstance(forward, str): 202 | # we only took the last one 203 | # http://en.wikipedia.org/wiki/X-Forwarded-For 204 | if ',' in forward: 205 | forward = forward.rsplit(',', 1)[-1].strip() 206 | 207 | # find host and port on ipv6 address 208 | if '[' in forward and ']' in forward: 209 | host = forward.split(']')[0][1:].lower() 210 | elif ':' in forward and forward.count(':') == 1: 211 | host = forward.split(':')[0].lower() 212 | else: 213 | host = forward 214 | 215 | forward = forward.split(']')[-1] 216 | if ':' in forward and forward.count(':') == 1: 217 | port = forward.split(':', 1)[1] 218 | else: 219 | port = 80 220 | 221 | remote = (host, port) 222 | else: 223 | remote = forward 224 | 225 | return remote[0], str(remote[1]) 226 | 227 | 228 | def atoms(message, environ, response, transport, request_time): 229 | """Gets atoms for log formatting.""" 230 | if message: 231 | r = '{} {} HTTP/{}.{}'.format( 232 | message.method, message.path, 233 | message.version[0], message.version[1]) 234 | headers = message.headers 235 | else: 236 | r = '' 237 | headers = {} 238 | 239 | if transport is not None: 240 | remote_addr = parse_remote_addr( 241 | transport.get_extra_info('addr', '127.0.0.1')) 242 | else: 243 | remote_addr = ('',) 244 | 245 | atoms = { 246 | 'h': remote_addr[0], 247 | 'l': '-', 248 | 'u': '-', 249 | 't': format_date_time(None), 250 | 'r': r, 251 | 's': str(getattr(response, 'status', '')), 252 | 'b': str(getattr(response, 'output_length', '')), 253 | 'f': headers.get(hdrs.REFERER, '-'), 254 | 'a': headers.get(hdrs.USER_AGENT, '-'), 255 | 'T': str(int(request_time)), 256 | 'D': str(request_time).split('.', 1)[-1][:5], 257 | 'p': "<%s>" % os.getpid() 258 | } 259 | 260 | return atoms 261 | 262 | 263 | class SafeAtoms(dict): 264 | """Copy from gunicorn""" 265 | 266 | def __init__(self, atoms, i_headers, o_headers): 267 | dict.__init__(self) 268 | 269 | self._i_headers = i_headers 270 | self._o_headers = o_headers 271 | 272 | for key, value in atoms.items(): 273 | self[key] = value.replace('"', '\\"') 274 | 275 | def __getitem__(self, k): 276 | if k.startswith('{'): 277 | if k.endswith('}i'): 278 | headers = self._i_headers 279 | elif k.endswith('}o'): 280 | headers = self._o_headers 281 | else: 282 | headers = None 283 | 284 | if headers is not None: 285 | return headers.get(k[1:-2], '-') 286 | 287 | if k in self: 288 | return super(SafeAtoms, self).__getitem__(k) 289 | else: 290 | return '-' 291 | 292 | 293 | _marker = object() 294 | 295 | 296 | class reify: 297 | """Use as a class method decorator. It operates almost exactly like 298 | the Python ``@property`` decorator, but it puts the result of the 299 | method it decorates into the instance dict after the first call, 300 | effectively replacing the function it decorates with an instance 301 | variable. It is, in Python parlance, a non-data descriptor. 302 | 303 | """ 304 | 305 | def __init__(self, wrapped): 306 | self.wrapped = wrapped 307 | try: 308 | self.__doc__ = wrapped.__doc__ 309 | except: # pragma: no cover 310 | pass 311 | self.name = wrapped.__name__ 312 | 313 | def __get__(self, inst, owner, _marker=_marker): 314 | if inst is None: 315 | return self 316 | val = inst.__dict__.get(self.name, _marker) 317 | if val is not _marker: 318 | return val 319 | val = self.wrapped(inst) 320 | inst.__dict__[self.name] = val 321 | return val 322 | 323 | def __set__(self, inst, value): 324 | raise AttributeError("reified property is read-only") 325 | --------------------------------------------------------------------------------