├── .gitignore
├── README.md
├── app_webpy.py
├── apps.py
├── client.py
├── httputil.py
├── midware.py
├── serve.conf
├── serve.py
├── template.py
└── test.py
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | sessions
3 | TAGS
4 | tags
5 | .coverage
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # 简述 #
2 |
3 | 项目简单实现了一个webserver,包括分发系统,缓存系统,Session系统,模板系统。目前还没有实现form和orm系统。
4 |
5 | 项目没有使用任何第三方库,纯粹作为http原理教学用。和py-web-server属于同一用途。但是由于形成年代教前者晚,因此代码质量相对较高。而且和前者不同的是,其中没有包含greenlet和非阻塞编程框架,是基于纯粹的线程模型的。
6 |
7 | # 授权 #
8 |
9 | Copyright (C) 2012-2016 Shell Xu
10 |
11 | This program is free software: you can redistribute it and/or modify
12 | it under the terms of the GNU General Public License as published by
13 | the Free Software Foundation, either version 3 of the License, or
14 | (at your option) any later version.
15 |
16 | This program is distributed in the hope that it will be useful,
17 | but WITHOUT ANY WARRANTY; without even the implied warranty of
18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 | GNU General Public License for more details.
20 |
21 | You should have received a copy of the GNU General Public License
22 | along with this program. If not, see .
23 |
--------------------------------------------------------------------------------
/app_webpy.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # -*- coding: utf-8 -*-
3 | '''
4 | @date: 2013-12-30
5 | @author: shell.xu
6 | @license: BSD-3-clause
7 | '''
8 | from __future__ import absolute_import, division,\
9 | print_function, unicode_literals
10 | import os
11 | import copy
12 | import stat
13 | import httputil
14 | import urllib
15 | import logging
16 | import unittest
17 | from os import path
18 | import web
19 | from template import Template
20 |
21 |
22 | class Main(object):
23 |
24 | def GET(self, name):
25 | logging.info('main url count: {}', session.count)
26 | logging.info('main url match: {}', name)
27 | body = 'main page, count: {}, match: {}'.format(
28 | session.count, name)
29 | session.count += 1
30 | return body
31 |
32 |
33 | class Post(object):
34 |
35 | def POST(self, name):
36 | l = str(len(web.data()))
37 | logging.info('test post: {}', l)
38 | return str(l)
39 |
40 |
41 | class Path(object):
42 | tplstr = '''{%import os%}{%from os import path%}
43 |
44 | file name | file mode |
45 | file size |
{%for name in namelist:%}
46 | {%stat_info = os.lstat(path.join(real_path, name))%}\
47 | \
48 | {%=name%} | {%=get_stat_str(stat_info.st_mode)%} | \
49 | {%=stat_info.st_size%} |
{%end%}
50 |
'''
51 | tpl = Template(template=tplstr)
52 | index_set = ['index.html', ]
53 |
54 | def file_app(self, filepath):
55 | with open(filepath, 'rb') as fi:
56 | for b in httputil.file_source(fi):
57 | yield b
58 |
59 | def get_stat_str(self, mode):
60 | stat_map = [
61 | (stat.S_ISDIR, 'd'),
62 | (stat.S_ISREG, 'f'),
63 | (stat.S_ISLNK, 'l'),
64 | (stat.S_ISSOCK, 's')]
65 | return ''.join([s for f, s in stat_map if f(mode)])
66 |
67 | def GET(self, filepath):
68 | url_path = urllib.unquote(filepath)
69 | real_path = path.join(self.basedir, url_path.lstrip('/'))
70 | real_path = path.abspath(path.realpath(real_path))
71 | if not real_path.startswith(self.basedir):
72 | raise web.forbidden()
73 | if not path.isdir(real_path):
74 | return self.file_app(real_path)
75 | for i in self.index_set:
76 | test_path = path.join(real_path, i)
77 | if os.access(test_path, os.R_OK):
78 | return self.file_app(real_path)
79 | namelist = os.listdir(real_path)
80 | namelist.sort()
81 | return self.tpl.render({
82 | 'namelist': namelist, 'get_stat_str': self.get_stat_str,
83 | 'real_path': real_path, 'url_path': url_path})
84 |
85 |
86 | def StaticPath(basedir):
87 | p = copy.copy(Path)
88 | p.basedir = path.abspath(path.realpath(path.expanduser(basedir)))
89 | return p
90 |
91 | app = web.application((
92 | '/post/(.*)', Post,
93 | '/self/(.*)', StaticPath('.'),
94 | '/(.*)', Main))
95 |
96 | session = web.session.Session(
97 | app, web.session.DiskStore('sessions'), initializer={'count': 0})
98 |
99 |
100 | class TestAppWebpy(unittest.TestCase):
101 |
102 | def test_main(self):
103 | resp = app.request('/urlmatch')
104 | self.assertEqual(resp.status, '200 OK')
105 | self.assertEqual(
106 | resp.data,
107 | b'main page, count: 0, match: urlmatch')
108 | self.assertIn('Set-Cookie', resp.headers)
109 |
110 | def test_post(self):
111 | resp = app.request('/post/postmatch', method='POST', data='postinfo')
112 | self.assertEqual(resp.status, '200 OK')
113 | self.assertEqual(resp.data, '8')
114 |
115 | def test_path(self):
116 | resp = app.request('/self/')
117 | self.assertEqual(resp.status, '200 OK')
118 | self.assertIn('httputil.py', resp.data)
119 |
--------------------------------------------------------------------------------
/apps.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # -*- coding: utf-8 -*-
3 | '''
4 | @date: 2012-09-04
5 | @author: shell.xu
6 | @license: BSD-3-clause
7 | '''
8 | from __future__ import absolute_import, division,\
9 | print_function, unicode_literals
10 | import os
11 | import stat
12 | import time
13 | import json
14 | import logging
15 | import unittest
16 | from os import path
17 | try:
18 | from urllib import unquote
19 | except ImportError:
20 | from urllib.parse import unquote
21 | import httputil
22 | import midware
23 | from template import Template
24 |
25 |
26 | def url_main(req):
27 | count = req.session.get('count', 0)
28 | logging.info('main path: {}'.format(req.path))
29 | logging.info('main url count: {}'.format(count))
30 | logging.info('main url match: {}'.format(req.url_match))
31 | logging.info('main url param: {}'.format(req.url_param))
32 | body = json.dumps({
33 | 'page': 'main',
34 | 'path': req.path,
35 | 'count': count,
36 | 'match': req.url_match,
37 | 'param': req.url_param
38 | })
39 | req.session['count'] = count + 1
40 | res = httputil.Response.create(200, body=body)
41 | return res
42 |
43 |
44 | def url_cached(req):
45 | res = httputil.Response.create(200, body='cached')
46 | res.cache = 0.1
47 | return res
48 |
49 |
50 | def url_post(req):
51 | l = str(len(req.readbody()))
52 | logging.info('test post: {}'.format(l))
53 | return httputil.Response.create(200, body=l)
54 |
55 |
56 | class url_path(object):
57 | tplstr = '''{%import os%}{%from os import path%}
58 |
59 | file name | file mode |
60 | file size |
{%for name in namelist:%}
61 | {%stat_info = os.lstat(path.join(real_path, name))%}\
62 | \
63 | {%=name%} | {%=get_stat_str(stat_info.st_mode)%} | \
64 | {%=stat_info.st_size%} |
{%end%}
65 |
'''
66 | tpl = Template(template=tplstr)
67 | index_set = ['index.html', ]
68 |
69 | def __init__(self, basedir):
70 | self.basedir = path.abspath(path.realpath(path.expanduser(basedir)))
71 |
72 | def calc_path(self, filepath):
73 | url_path = unquote(filepath)
74 | real_path = path.join(self.basedir, url_path.lstrip('/'))
75 | real_path = path.abspath(path.realpath(real_path))
76 | if not real_path.startswith(self.basedir):
77 | raise httputil.HttpException(403)
78 | return url_path, real_path
79 |
80 | def get_stat_str(self, mode):
81 | stat_map = [
82 | (stat.S_ISDIR, 'd'),
83 | (stat.S_ISREG, 'f'),
84 | (stat.S_ISLNK, 'l'),
85 | (stat.S_ISSOCK, 's')]
86 | return ''.join([s for f, s in stat_map if f(mode)])
87 |
88 | def file_app(self, req, filename):
89 | def on_body():
90 | with open(filename, 'rb') as fi:
91 | for d in httputil.file_source(fi):
92 | yield d
93 | return httputil.Response.create(200, body=on_body)
94 |
95 | def __call__(self, req):
96 | url_path, real_path = self.calc_path(req.path)
97 | if not path.isdir(real_path):
98 | return self.file_app(req, real_path)
99 | for i in self.index_set:
100 | test_path = path.join(real_path, i)
101 | if os.access(test_path, os.R_OK):
102 | return self.file_app(req, test_path)
103 | namelist = os.listdir(real_path)
104 | namelist.sort()
105 | body = self.tpl.render({
106 | 'namelist': namelist, 'get_stat_str': self.get_stat_str,
107 | 'real_path': real_path, 'url_path': url_path})
108 | return httputil.Response.create(200, body=body)
109 |
110 | dis_chain = midware.Dispatch((
111 | ('/chain2/', url_main, {'param2': 2}),
112 | ))
113 | dis = midware.Dispatch((
114 | ('/chain', dis_chain, {'param1': 1}),
115 | ('/test/', url_main, {'test param': 2}),
116 | ('/cached/', url_cached),
117 | ('/post/', url_post),
118 | ('/self/', url_path('.')),
119 | ('/', url_main, {'main param': 1})
120 | ))
121 | dis = midware.MemoryCache(2)(dis)
122 | dis = midware.MemorySession(600)(dis)
123 |
124 |
125 | class TestApp(unittest.TestCase):
126 | template = u'''{%=r%}
127 | col1 | col2 |
128 | {%for i in objs:%}{%=i[0]%} | {%=i[1]%} |
129 | {%end%}
'''
130 | result = u'''test
131 | col1 | col2 |
132 | 1 | 2 |
133 | 3 | 4 |
134 |
'''
135 |
136 | def setUp(self):
137 | self.ws = httputil.WebServer(dis)
138 |
139 | def test_main(self):
140 | req = httputil.Request.create('/urlmatch')
141 | resp = self.ws.http_handler(req)
142 | self.assertEqual(resp.code, 200)
143 | self.assertEqual(
144 | json.loads(resp.body.decode('utf-8')),
145 | {
146 | 'page': 'main',
147 | 'path': 'urlmatch',
148 | 'count': 0,
149 | 'match': {},
150 | 'param': {'main param': 1}
151 | })
152 | self.assertIn('Set-Cookie', resp.headers)
153 |
154 | req = httputil.Request.create('/urlmatch')
155 | req.headers['Cookie'] = resp.headers['Set-Cookie']
156 | resp = self.ws.http_handler(req)
157 | self.assertEqual(resp.code, 200)
158 | self.assertEqual(
159 | json.loads(resp.body.decode('utf-8')),
160 | {
161 | 'page': 'main',
162 | 'path': 'urlmatch',
163 | 'count': 1,
164 | 'match': {},
165 | 'param': {'main param': 1}
166 | })
167 |
168 | def test_cached(self):
169 | for i in range(12):
170 | req = httputil.Request.create('/cached/{}'.format(int(i/3)))
171 | resp = self.ws.http_handler(req)
172 | self.assertEqual(resp.code, 200)
173 | self.assertEqual(resp.body, b'cached')
174 |
175 | time.sleep(0.2)
176 | req = httputil.Request.create('/cached/abc')
177 | resp = self.ws.http_handler(req)
178 | self.assertEqual(resp.code, 200)
179 | self.assertEqual(resp.body, b'cached')
180 |
181 | def test_test(self):
182 | req = httputil.Request.create('/test/testmatch')
183 | resp = self.ws.http_handler(req)
184 | self.assertEqual(resp.code, 200)
185 | self.assertEqual(
186 | json.loads(resp.body.decode('utf-8')),
187 | {
188 | 'page': 'main',
189 | 'path': 'testmatch',
190 | 'count': 0,
191 | 'match': {},
192 | 'param': {'test param': 2}
193 | })
194 |
195 | def test_chain(self):
196 | req = httputil.Request.create('/chain/chain2/chainmatch')
197 | resp = self.ws.http_handler(req)
198 | self.assertEqual(resp.code, 200)
199 | self.assertEqual(
200 | json.loads(resp.body.decode('utf-8')),
201 | {
202 | 'page': 'main',
203 | 'path': 'chainmatch',
204 | 'count': 0,
205 | 'match': {},
206 | 'param': {'param1': 1, 'param2': 2}
207 | })
208 | self.assertIn('Set-Cookie', resp.headers)
209 |
210 | def test_post(self):
211 | req = httputil.Request.create('/post/postmatch', body='postinfo')
212 | resp = self.ws.http_handler(req)
213 | self.assertEqual(resp.code, 200)
214 | self.assertEqual(resp.body, b'8')
215 |
216 | def test_path(self):
217 | req = httputil.Request.create('/self/')
218 | resp = self.ws.http_handler(req)
219 | self.assertEqual(resp.code, 200)
220 | self.assertIn(b'httputil.py', resp.body)
221 |
--------------------------------------------------------------------------------
/client.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # -*- coding: utf-8 -*-
3 | '''
4 | @date: 2013-10-17
5 | @author: shell.xu
6 | @license: BSD-3-clause
7 | '''
8 | from __future__ import absolute_import, division,\
9 | print_function, unicode_literals
10 | import sys
11 | import json
12 | import time
13 | import httputil
14 | import unittest
15 | try:
16 | from StringIO import StringIO
17 | except ImportError:
18 | from io import StringIO
19 | from contextlib import closing
20 |
21 | if sys.version_info.major == 3:
22 | unicode = str
23 | else:
24 | bytes = str
25 |
26 |
27 | def download(url):
28 | with closing(httputil.download(url)) as resp:
29 | return resp.readbody()
30 |
31 |
32 | def prepare_apps():
33 | import apps
34 | ws = httputil.WebServer(apps.dis, StringIO())
35 | ts = httputil.ThreadServer(('', 18080), ws.handler, poolsize=1)
36 | ts.start()
37 | time.sleep(0.1)
38 |
39 |
40 | prepare_apps()
41 |
42 |
43 | class TestClientApp(unittest.TestCase):
44 | target = 'http://localhost:18080'
45 |
46 | def test_main(self):
47 | body = download(self.target + '/urlmatch')
48 | self.assertEqual(
49 | json.loads(body.decode('utf-8')),
50 | {
51 | 'page': 'main',
52 | 'path': 'urlmatch',
53 | 'count': 0,
54 | 'match': {},
55 | 'param': {'main param': 1}
56 | })
57 |
58 | def test_getfile(self):
59 | with httputil.download(self.target + '/urlmatch').makefile() as f:
60 | body = f.read()
61 | self.assertEqual(
62 | json.loads(body.decode('utf-8')),
63 | {
64 | 'page': 'main',
65 | 'path': 'urlmatch',
66 | 'count': 0,
67 | 'match': {},
68 | 'param': {'main param': 1}
69 | })
70 |
71 | def test_cached(self):
72 | for i in range(12):
73 | body = download(
74 | self.target + '/cached/{}'.format(int(i/3)))
75 | self.assertEqual(body, b'cached')
76 |
77 | time.sleep(0.2)
78 | body = download(self.target + '/cached/abc')
79 | self.assertEqual(body, b'cached')
80 |
81 | def test_test(self):
82 | body = download(self.target + '/test/testmatch')
83 | self.assertEqual(
84 | json.loads(body.decode('utf-8')),
85 | {
86 | 'page': 'main',
87 | 'path': 'testmatch',
88 | 'count': 0,
89 | 'match': {},
90 | 'param': {'test param': 2}
91 | })
92 |
93 | def test_post(self):
94 | with open('httputil.py', 'rb') as fi:
95 | data = fi.read()
96 | with httputil.download(
97 | self.target + '/post/postmatch',
98 | data=data
99 | ).makefile() as f:
100 | body = f.read()
101 | self.assertEqual(body, str(len(data)).encode(httputil.ENCODING))
102 |
103 | def test_post_file(self):
104 | with open('httputil.py', 'rb') as fi:
105 | with httputil.download(self.target + '/post/postmatch',
106 | data=fi).makefile() as f:
107 | body = f.read()
108 | with open('httputil.py', 'rb') as fi:
109 | data = fi.read()
110 | self.assertEqual(body, str(len(data)).encode(httputil.ENCODING))
111 |
112 | def test_upload(self):
113 | with open('httputil.py', 'rb') as fi:
114 | data = fi.read()
115 | with httputil.upload(self.target + '/post/postmatch') as f:
116 | f.write(data)
117 | with closing(f.get_response()) as resp:
118 | self.assertEqual(
119 | resp.readbody(),
120 | str(len(data)).encode(httputil.ENCODING))
121 |
122 | def test_path(self):
123 | body = download(self.target + '/self/')
124 | self.assertIn(b'httputil.py', body)
125 |
126 |
127 | def prepare_webpy():
128 | try:
129 | import app_webpy
130 | except ImportError:
131 | global TestClientWebpy
132 | TestClientWebpy = None
133 | return
134 | ws = httputil.WSGIServer(app_webpy.app.wsgifunc())
135 | ts = httputil.ThreadServer(('', 18081), ws.handler, poolsize=1)
136 | ts.start()
137 | time.sleep(0.1)
138 |
139 |
140 | class TestClientWebpy(unittest.TestCase):
141 | target = 'http://localhost:18081'
142 |
143 | def test_main(self):
144 | body = download(self.target + '/urlmatch')
145 | self.assertEqual(
146 | body,
147 | b'main page, count: 0, match: urlmatch')
148 |
149 | def test_post(self):
150 | with open('httputil.py', 'rb') as fi:
151 | data = fi.read()
152 | with httputil.download(
153 | self.target + '/post/postmatch',
154 | data=data
155 | ).makefile() as f:
156 | body = f.read()
157 | self.assertEqual(body, str(len(data)).encode(httputil.ENCODING))
158 |
159 | def test_path(self):
160 | body = download(self.target + '/self/')
161 | self.assertIn(b'httputil.py', body)
162 |
163 |
164 | prepare_webpy()
165 |
--------------------------------------------------------------------------------
/httputil.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # -*- coding: utf-8 -*-
3 | '''
4 | @date: 2012-04-26
5 | @author: shell.xu
6 | @license: BSD-3-clause
7 | '''
8 | from __future__ import absolute_import, division,\
9 | print_function, unicode_literals
10 | import sys
11 | import socket
12 | import logging
13 | import datetime
14 | import threading
15 | try:
16 | from urlparse import urlparse
17 | except ImportError:
18 | from urllib.parse import urlparse
19 |
20 |
21 | __all__ = [
22 | 'ENCODING', 'CHUNK_MIN', 'BUFSIZE', 'CODE_NOBODY', 'DEFAULT_PAGES',
23 | 'file_source', 'chunked', 'BufferedFile', 'HttpMessage', 'Request',
24 | 'RequestWriteFile', 'ResponseFile', 'Response', 'connector', 'download',
25 | 'upload', 'ThreadServer', 'WebServer', 'WSGIServer'
26 | ]
27 |
28 |
29 | if sys.version_info.major == 3:
30 | unicode = str
31 | else:
32 | bytes = str
33 |
34 | ENCODING = 'utf-8'
35 | CHUNK_MIN = 1024
36 | BUFSIZE = 8192
37 | CODE_NOBODY = [100, 101, 204, 304]
38 |
39 | DEFAULT_PAGES = {
40 | 100: ('Continue', 'Request received, please continue'),
41 | 101: ('Switching Protocols',
42 | 'Switching to new protocol; obey Upgrade header'),
43 |
44 | 200: ('OK', ''),
45 | 201: ('Created', 'Document created, URL follows'),
46 | 202: ('Accepted', 'Request accepted, processing continues off-line'),
47 | 203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
48 | 204: ('No Content', 'Request fulfilled, nothing follows'),
49 | 205: ('Reset Content', 'Clear input form for further input.'),
50 | 206: ('Partial Content', 'Partial content follows.'),
51 |
52 | 300: ('Multiple Choices', 'Object has several resources -- see URI list'),
53 | 301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
54 | 302: ('Found', 'Object moved temporarily -- see URI list'),
55 | 303: ('See Other', 'Object moved -- see Method and URL list'),
56 | 304: ('Not Modified', 'Document has not changed since given time'),
57 | 305: ('Use Proxy',
58 | 'You must use proxy specified in Location to access this resource.'),
59 | 307: ('Temporary Redirect', 'Object moved temporarily -- see URI list'),
60 |
61 | 400: ('Bad Request', 'Bad request syntax or unsupported method'),
62 | 401: ('Unauthorized', 'No permission -- see authorization schemes'),
63 | 402: ('Payment Required', 'No payment -- see charging schemes'),
64 | 403: ('Forbidden', 'Request forbidden -- authorization will not help'),
65 | 404: ('Not Found', 'Nothing matches the given URI'),
66 | 405: ('Method Not Allowed',
67 | 'Specified method is invalid for this server.'),
68 | 406: ('Not Acceptable', 'URI not available in preferred format.'),
69 | 407: ('Proxy Authentication Required',
70 | 'You must authenticate with this proxy before proceeding.'),
71 | 408: ('Request Timeout', 'Request timed out; try again later.'),
72 | 409: ('Conflict', 'Request conflict.'),
73 | 410: ('Gone', 'URI no longer exists and has been permanently removed.'),
74 | 411: ('Length Required', 'Client must specify Content-Length.'),
75 | 412: ('Precondition Failed', 'Precondition in headers is false.'),
76 | 413: ('Request Entity Too Large', 'Entity is too large.'),
77 | 414: ('Request-URI Too Long', 'URI is too long.'),
78 | 415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
79 | 416: ('Requested Range Not Satisfiable', 'Cannot satisfy request range.'),
80 | 417: ('Expectation Failed', 'Expect condition could not be satisfied.'),
81 |
82 | 500: ('Internal Server Error', 'Server got itself in trouble'),
83 | 501: ('Not Implemented', 'Server does not support this operation'),
84 | 502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
85 | 503: ('Service Unavailable',
86 | 'The server cannot process the request due to a high load'),
87 | 504: ('Gateway Timeout',
88 | 'The gateway server did not receive a timely response'),
89 | 505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
90 | }
91 |
92 | # HTTPTIMEFMT = '%a, %d %b %Y %H:%M:%S %Z'
93 |
94 |
95 | def file_source(stream, size=BUFSIZE):
96 | data = stream.read(size)
97 | while data:
98 | yield data
99 | data = stream.read(size)
100 |
101 |
102 | def chunked(f):
103 | for data in f:
104 | yield b'%X\r\n%s\r\n' % (len(data), data)
105 | yield b'0\r\n\r\n'
106 |
107 |
108 | class BufferedFile(object):
109 |
110 | def __init__(self, iterator):
111 | self.iterator = iterator
112 | self.buf = b''
113 |
114 | def read(self, size=-1):
115 | try:
116 | while size == -1 or len(self.buf) < size:
117 | self.buf += next(self.iterator)
118 | except StopIteration:
119 | size = len(self.buf)
120 | data, self.buf = self.buf[:size], self.buf[size:]
121 | return data
122 |
123 |
124 | class HttpMessage(object):
125 |
126 | def __init__(self):
127 | self.headers = {}
128 | self.sent = False
129 | self.length = None
130 | self.body = None
131 | self.keepalive = True
132 | self.cache = 0
133 |
134 | def add(self, k, v):
135 | self.headers.setdefault(k, [])
136 | self.headers[k].append(v)
137 |
138 | def __setitem__(self, k, v):
139 | self.headers[k] = [v, ]
140 |
141 | def __getitem__(self, k):
142 | if k not in self:
143 | raise KeyError
144 | return self.headers[k][0]
145 |
146 | def header_from_dict(self, d):
147 | if not d:
148 | return
149 | for k, v in d.items():
150 | self.headers[k] = [v, ]
151 |
152 | def get(self, k, v=None):
153 | if k not in self:
154 | return v
155 | return self.headers[k][0]
156 |
157 | def get_headers(self, k):
158 | return self.headers.get(k, [])
159 |
160 | def __contains__(self, k):
161 | return self.headers.get(k)
162 |
163 | def __delitem__(self, k):
164 | del self.headers[k]
165 |
166 | def __iter__(self):
167 | for k, l in self.headers.items():
168 | for v in l:
169 | yield k, v
170 |
171 | def send_header(self, stream):
172 | stream.write((self.get_startline() + '\r\n').encode(ENCODING))
173 | for k, v in self:
174 | stream.write(("%s: %s\r\n" % (k, v)).encode(ENCODING))
175 | stream.write(b'\r\n')
176 | stream.flush()
177 | self.sent = True
178 |
179 | def recv_header(self, stream):
180 | while True:
181 | line = stream.readline()
182 | if not line:
183 | raise EOFError()
184 | line = line.strip()
185 | if not line:
186 | break
187 | if line[0] not in (' ', '\t'):
188 | h, v = line.decode(ENCODING).split(':', 1)
189 | self.add(h.strip(), v.strip())
190 | else:
191 | self.add(h.strip(), line.strip())
192 |
193 | def debug(self):
194 | logging.debug(self.direction + self.get_startline())
195 | for k, v in self:
196 | logging.debug('%s%s: %s', self.direction, k, v)
197 | logging.debug('')
198 |
199 | def recvdone(self):
200 | if self.version == 'HTTP/1.1':
201 | self.keepalive = self.get('Connection') != 'close'
202 | else:
203 | self.keepalive = self.get('Connection') == 'keep-alive'
204 |
205 | def beforesend(self):
206 | self['Connection'] = 'keep-alive' if self.keepalive else 'close'
207 |
208 | def recv_length_body(self):
209 | for i in range(0, self.length, BUFSIZE):
210 | data = self.stream.read(min(self.length - i, BUFSIZE))
211 | if not data:
212 | raise EOFError
213 | yield data
214 |
215 | def recv_chunked_body(self):
216 | while True:
217 | chunk = self.stream.readline().decode(ENCODING).rstrip().split(';')
218 | chunk_size = int(chunk[0], 16)
219 | if not chunk_size:
220 | return
221 | data = self.stream.read(chunk_size+2)
222 | if not data:
223 | raise EOFError
224 | data = data[:-2]
225 | if not data:
226 | break
227 | yield data
228 |
229 | @classmethod
230 | def recvfrom(cls, stream, sock=None):
231 | line = stream.readline().strip()
232 | if not line:
233 | raise EOFError()
234 | r = line.decode(ENCODING).split(' ', 2)
235 | if len(r) < 2:
236 | raise ValueError('unknown format', r)
237 | if len(r) < 3:
238 | r.append(DEFAULT_PAGES[int(r[1])][0])
239 | msg = cls(*r)
240 | msg.recv_header(stream)
241 | msg.stream, msg.sock = stream, sock
242 | if msg.get('Transfer-Encoding', 'identity') != 'identity':
243 | msg.body = msg.recv_chunked_body()
244 | logging.debug('recv body on chunk mode')
245 | elif 'Content-Length' in msg:
246 | msg.length = int(msg['Content-Length'])
247 | msg.body = msg.recv_length_body()
248 | logging.debug('recv body on length mode, size: %s', msg.length)
249 | elif msg.hasbody():
250 | msg.body = file_source(stream)
251 | logging.debug('recv body on close mode')
252 | else:
253 | logging.debug('recv body on nobody mode')
254 | msg.recvdone()
255 | return msg
256 |
257 | def readbody(self):
258 | if hasattr(self.body, '__iter__') and not isinstance(self.body, bytes):
259 | self.body = b''.join(self.body)
260 | if hasattr(self.body, 'read'):
261 | self.body = self.body.read()
262 | return self.body
263 |
264 | def readform(self):
265 | return dict(i.split('=', 1) for i in self.readbody().split('&'))
266 |
267 | def set_body(self):
268 | if isinstance(self.body, unicode):
269 | raise TypeError('body is an unicode, bytes excepted.')
270 | if hasattr(self.body, 'read'): # transfer file to chunk
271 | self.body = file_source(self.body)
272 | elif isinstance(self.body, bytes):
273 | self.length = len(self.body)
274 | if self.length is not None: # length fit for data and stream
275 | self['Content-Length'] = str(self.length)
276 | elif self.body is not None: # set chunked if use chunk mode
277 | self['Transfer-Encoding'] = 'chunked'
278 | self.body = chunked(self.body)
279 |
280 | # CAUTION: encoding has been locked to utf-8
281 | def sendto(self, stream):
282 | self.beforesend()
283 | self.set_body()
284 | self.send_header(stream)
285 | if self.body is None:
286 | return
287 | if isinstance(self.body, bytes):
288 | stream.write(self.body)
289 | elif hasattr(self.body, '__iter__'):
290 | for block in self.body:
291 | stream.write(block)
292 | else:
293 | raise Exception('unknown body')
294 | stream.flush()
295 |
296 |
297 | class FileBase(object):
298 |
299 | def __enter__(self):
300 | return self
301 |
302 | def __exit__(self, exc_type, exc_value, traceback):
303 | return self.close()
304 |
305 |
306 | class Request(HttpMessage):
307 | direction = '> '
308 |
309 | def __init__(self, method, uri, version):
310 | HttpMessage.__init__(self)
311 | self.method, self.uri, self.version = method, uri, version
312 |
313 | def get_startline(self):
314 | return ' '.join((self.method, self.uri, self.version))
315 |
316 | def hasbody(self):
317 | return False
318 |
319 | @classmethod
320 | def create(cls, uri, method=None, version=None, headers=None, body=None):
321 | if not method:
322 | method = 'GET' if body is None else 'POST'
323 | if not version:
324 | version = 'HTTP/1.1'
325 | req = cls(method, uri, version)
326 | req.header_from_dict(headers)
327 | if isinstance(body, unicode):
328 | body = body.encode(ENCODING)
329 | if body:
330 | req.body = body
331 | return req
332 |
333 |
334 | class RequestWriteFile(FileBase):
335 |
336 | def __init__(self, stream):
337 | self.stream = stream
338 |
339 | def write(self, s):
340 | if isinstance(s, unicode):
341 | s = s.decode(ENCODING)
342 | self.stream.write(b'%x\r\n%s\r\n' % (len(s), s,))
343 |
344 | def close(self):
345 | self.stream.write(b'0\r\n\r\n')
346 | self.stream.flush()
347 |
348 | def get_response(self):
349 | return Response.recvfrom(self.stream)
350 |
351 |
352 | class ResponseFile(FileBase):
353 |
354 | def __init__(self, resp):
355 | self.resp = resp
356 | self.f = BufferedFile(resp.body)
357 | self.read = self.f.read
358 | self.close = resp.close
359 |
360 | def getcode(self):
361 | return int(self.resp.code)
362 |
363 |
364 | class Response(HttpMessage):
365 | direction = '< '
366 |
367 | def __init__(self, version, code, phrase):
368 | HttpMessage.__init__(self)
369 | self.version, self.code, self.phrase = version, int(code), phrase
370 |
371 | def __nonzero__(self):
372 | return self.keepalive
373 |
374 | def close(self):
375 | if hasattr(self, 'stream'):
376 | return self.stream.close()
377 |
378 | def get_startline(self):
379 | return ' '.join((self.version, str(self.code), self.phrase))
380 |
381 | def hasbody(self):
382 | return self.code not in CODE_NOBODY
383 |
384 | def makefile(self):
385 | return ResponseFile(self)
386 |
387 | @classmethod
388 | def create(cls, code, phrase=None, version=None, headers=None, body=None):
389 | if not phrase:
390 | phrase = DEFAULT_PAGES[code][0]
391 | if not version:
392 | version = 'HTTP/1.1'
393 | res = cls(version, code, phrase)
394 | res.header_from_dict(headers)
395 | if isinstance(body, unicode):
396 | body = body.encode(ENCODING)
397 | if body:
398 | res.body = body
399 | return res
400 |
401 |
402 | # ================== client part ==================
403 |
404 |
405 | def connector(addr):
406 | s = socket.socket()
407 | s.connect(addr)
408 | stream = s.makefile('rwb')
409 | # You need to close all files and socket to really close the socket.
410 | s.close()
411 | return stream
412 |
413 | # class SocketPool(object):
414 |
415 | # def __init__(self, max_addr=-1):
416 | # self._lock = threading.RLock()
417 | # self.buf, self.max_addr = {}, max_addr
418 |
419 | # def setmax(self, max_addr=-1):
420 | # self.max_addr = max_addr
421 |
422 | # def __call__(self, addr):
423 | # host = addr[0]
424 | # addr = (socket.gethostbyname(host), addr[1])
425 | # stream = None
426 | # with self._lock:
427 | # if self.buf.get(addr):
428 | # stream = self.buf[addr].pop(0)
429 | # logging.debug(
430 | # 'acquire conn %s:%d size %d',
431 | # host, addr[1], len(self.buf[addr]))
432 | # if stream is None:
433 | # logging.debug('create new conn: %s:%d', host, addr[1])
434 | # stream = connect_addr(addr)
435 | # stream._close = stream.close
436 | # stream.close = lambda: self.release(stream)
437 | # return stream
438 |
439 | # def release(self, stream):
440 | # try:
441 | # addr = stream._sock.getpeername()
442 | # except socket.error:
443 | # logging.debug('free conn.')
444 | # return
445 | # with self._lock:
446 | # self.buf.setdefault(addr, [])
447 | # if self.max_addr < 0 or len(self.buf[addr]) < self.max_addr:
448 | # self.buf[addr].append(stream)
449 | # logging.debug(
450 | # 'release conn %s:%d back size %d',
451 | # addr[0], addr[1], len(self.buf[addr]))
452 | # return
453 | # logging.debug('free conn %s:%d.', addr[0], addr[1])
454 | # stream._close()
455 |
456 | # connector = SocketPool()
457 |
458 |
459 | # In Python2, close of req.stream will not harm for resp.stream.read.
460 | # But in python3, it not work. So leave req.stream there. Use resp.close
461 | # to close stream.
462 | def round_trip(req):
463 | req.stream = connector(req.remote)
464 | req.sendto(req.stream)
465 | req.stream.flush()
466 | return Response.recvfrom(req.stream)
467 |
468 |
469 | def parseurl(url):
470 | u = urlparse(url)
471 | uri = u.path
472 | if u.query:
473 | uri += '?' + u.query
474 | if ':' not in u.netloc:
475 | host, port = u.netloc, 443 if u.scheme == 'https' else 80
476 | else:
477 | host, port = u.netloc.split(':', 1)
478 | return host, int(port), uri
479 |
480 |
481 | def download(url, method=None, headers=None, data=None):
482 | host, port, uri = parseurl(url)
483 | if not uri:
484 | uri = '/'
485 | req = Request.create(uri, method, headers=headers, body=data)
486 | req.remote = (host, port)
487 | req['Host'] = host
488 | return round_trip(req)
489 |
490 |
491 | def upload(url, method='POST', headers=None):
492 | host, port, uri = parseurl(url)
493 | if not uri:
494 | uri = '/'
495 | req = Request.create(uri, method, headers=headers)
496 | req.remote = (host, port)
497 | req['Host'] = host
498 | req['Transfer-Encoding'] = 'chunked'
499 | stream = connector(req.remote)
500 | try:
501 | req.send_header(stream)
502 | return RequestWriteFile(stream)
503 | except:
504 | stream.close()
505 | raise
506 |
507 |
508 | # ================== server part ==================
509 |
510 |
511 | class ThreadServer(object):
512 | MAX_CONN = 10000
513 | import signal
514 |
515 | def __init__(self, addr, handler, poolsize=2):
516 | self.go = True
517 | self.addr = addr
518 | self.poolsize = poolsize
519 | self.handler = handler
520 |
521 | def run(self):
522 | try:
523 | while self.go:
524 | sock, addr = self.listen_socket.accept()
525 | self.handler(sock, addr)
526 | except KeyboardInterrupt:
527 | return
528 | except Exception:
529 | logging.exception('unknown')
530 |
531 | siglist = [signal.SIGTERM, signal.SIGINT]
532 |
533 | def signal_handler(self, signum, frame):
534 | if signum in self.siglist:
535 | self.go = False
536 | raise KeyboardInterrupt()
537 |
538 | def start(self):
539 | logging.info('WebServer started at %s:%d', self.addr)
540 | self.listen_socket = socket.socket()
541 | try:
542 | self.listen_socket.setsockopt(
543 | socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
544 | self.listen_socket.bind(self.addr)
545 | self.listen_socket.listen(self.MAX_CONN)
546 | for si in self.siglist:
547 | self.signal.signal(si, self.signal_handler)
548 | self.pool = []
549 | for _ in range(self.poolsize):
550 | th = threading.Thread(target=self.run)
551 | th.setDaemon(1)
552 | th.start()
553 | self.pool.append(th)
554 | except Exception:
555 | self.listen_socket.close()
556 | raise
557 |
558 | def join(self):
559 | for th in self.pool:
560 | th.join()
561 |
562 | def serve_forever(self):
563 | self.start()
564 | try:
565 | self.join()
566 | finally:
567 | logging.info('system exit')
568 | self.listen_socket.close()
569 |
570 |
571 | class WebServer(object):
572 |
573 | def __init__(self, application, accesslog=None):
574 | self.application = application
575 | if accesslog is None:
576 | return
577 | if accesslog == '':
578 | self.accessfile = sys.stdout
579 | elif isinstance(accesslog, unicode):
580 | self.accessfile = open(accesslog, 'a')
581 | else:
582 | self.accessfile = accesslog
583 |
584 | def record_access(self, req, res, addr):
585 | if not hasattr(self, 'accessfile'):
586 | return
587 | if res is None:
588 | code, length = 500, None
589 | else:
590 | code, length = res.code, res.length
591 | length = '-' if length is None else str(length)
592 | self.accessfile.write(
593 | '%s:%d - - [%s] "%s" %d %s "-" %s\n' % (
594 | addr[0], addr[1], datetime.datetime.now().isoformat(),
595 | req.get_startline(), code, length, req.get('User-Agent')))
596 | self.accessfile.flush()
597 |
598 | def http_handler(self, req):
599 | req.url = urlparse(req.uri)
600 | req.path = req.url.path
601 | return self.application(req)
602 |
603 | def handler(self, sock, addr):
604 | # You need to close all files and socket to really close the socket.
605 | stream, res = sock.makefile('rwb'), True
606 | sock.close()
607 | try:
608 | while res:
609 | req, res = None, None
610 | try:
611 | req = Request.recvfrom(stream)
612 | except (EOFError, socket.error):
613 | break
614 | req.remote = addr
615 | try:
616 | res = self.http_handler(req)
617 | res.sendto(req.stream)
618 | finally:
619 | self.record_access(req, res, addr)
620 | except Exception:
621 | logging.exception('unknown')
622 | finally:
623 | stream.close()
624 |
625 |
626 | class WSGIServer(WebServer):
627 |
628 | @staticmethod
629 | def req2env(req):
630 | env = dict(('HTTP_' + k.upper().replace('-', '_'), v)
631 | for k, v in req)
632 | env['REQUEST_METHOD'] = req.method
633 | env['SCRIPT_NAME'] = ''
634 | env['PATH_INFO'] = req.url.path
635 | env['QUERY_STRING'] = req.url.query
636 | env['CONTENT_TYPE'] = req.get('Content-Type')
637 | env['CONTENT_LENGTH'] = req.get('Content-Length', 0)
638 | env['SERVER_PROTOCOL'] = req.version
639 | if req.method in set(['POST', 'PUT']):
640 | env['wsgi.input'] = BufferedFile(req.body)
641 | return env
642 |
643 | def http_handler(self, req):
644 | req.url = urlparse(req.uri)
645 | env = self.req2env(req)
646 |
647 | res = Response.create(500)
648 |
649 | def start_response(status, headers):
650 | r = status.split(' ', 1)
651 | res.code = int(r[0])
652 | if len(r) > 1:
653 | res.phrase = r[1]
654 | else:
655 | res.phrase = DEFAULT_PAGES[res.code][0]
656 | for k, v in headers:
657 | res.add(k, v)
658 | res.add('Transfer-Encoding', 'chunked')
659 | res.send_header(req.stream)
660 |
661 | try:
662 | for b in chunked(self.application(env, start_response)):
663 | req.stream.write(b)
664 | req.stream.flush()
665 | finally:
666 | if not res.sent:
667 | res.send_header(req.stream)
668 | # empty all send body if exists
669 | if req.body:
670 | for b in req.body:
671 | pass
672 | return res
673 |
--------------------------------------------------------------------------------
/midware.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # -*- coding: utf-8 -*-
3 | '''
4 | @date: 2012-09-03
5 | @author: shell.xu
6 | @license: BSD-3-clause
7 | '''
8 | from __future__ import absolute_import, division,\
9 | print_function, unicode_literals
10 | import re
11 | import time
12 | import random
13 | import pickle
14 | import string
15 | import logging
16 | import unittest
17 | try:
18 | from urllib import quote, unquote
19 | except ImportError:
20 | from urllib.parse import quote, unquote
21 | import httputil
22 |
23 |
24 | class Dispatch(object):
25 |
26 | def __init__(self, urlmap=None):
27 | self.urlmap = [[re.compile(i[0]), ] + list(i[1:]) for i in urlmap]
28 |
29 | def __call__(self, req):
30 | if not hasattr(req, 'url_match'):
31 | req.url_match = {}
32 | if not hasattr(req, 'url_param'):
33 | req.url_param = {}
34 | for rule in self.urlmap:
35 | m = rule[0].match(req.path)
36 | if not m:
37 | continue
38 | # this make dispatch chain possible.
39 | req.path = req.path[len(m.group()):]
40 | req.url_match.update(m.groupdict())
41 | if len(rule) > 2:
42 | req.url_param.update(rule[2])
43 | return rule[1](req)
44 | return self.default_handler(req)
45 |
46 | @staticmethod
47 | def default_handler(req):
48 | return httputil.Response.create(404, body='File Not Found')
49 |
50 |
51 | class Cache(object):
52 |
53 | def __call__(self, func):
54 | def inner(req):
55 | pickled_data = self.get_data(req.url.path)
56 | if pickled_data:
57 | logging.info('cache hit in %s', req.url.path)
58 | return pickle.loads(pickled_data)
59 | res = func(req)
60 | if res is not None and res.cache and res.body:
61 | res['Cache-Control'] = 'max-age=%d' % res.cache
62 | pickled_data = pickle.dumps(res, 2)
63 | self.set_data(req.url.path, pickled_data, res.cache)
64 | return res
65 | return inner
66 |
67 |
68 | class ObjHeap(object):
69 | '''\
70 | 使用lru算法的对象缓存容器,感谢Evan Prodromou 。
71 | 注意:非线程安全。
72 | thx for Evan Prodromou .
73 | CAUTION: not satisfy with thread.
74 | '''
75 | import heapq
76 |
77 | class __node(object):
78 |
79 | def __init__(self, k, v, freq):
80 | self.k = k
81 | self.v = v
82 | self.freq = freq
83 |
84 | def __lt__(self, o):
85 | return self.freq < o.freq
86 |
87 | def __init__(self, size):
88 | self.size = size
89 | self.freq = 0
90 | self.__dict = {}
91 | self.__heap = []
92 |
93 | def __len__(self):
94 | return len(self.__dict)
95 |
96 | def __contains__(self, k):
97 | return k in self.__dict
98 |
99 | def __setitem__(self, k, v):
100 | if k in self.__dict:
101 | n = self.__dict[k]
102 | n.v = v
103 | self.freq += 1
104 | n.freq = self.freq
105 | self.heapq.heapify(self.__heap)
106 | else:
107 | while len(self.__heap) >= self.size:
108 | del self.__dict[self.heapq.heappop(self.__heap).k]
109 | self.freq = 0
110 | for n in self.__heap:
111 | n.freq = 0
112 | n = self.__node(k, v, self.freq)
113 | self.__dict[k] = n
114 | self.heapq.heappush(self.__heap, n)
115 |
116 | def __getitem__(self, k):
117 | n = self.__dict[k]
118 | self.freq += 1
119 | n.freq = self.freq
120 | self.heapq.heapify(self.__heap)
121 | return n.v
122 |
123 | def __delitem__(self, k):
124 | n = self.__dict[k]
125 | del self.__dict[k]
126 | self.__heap.remove(n)
127 | self.heapq.heapify(self.__heap)
128 | return n.v
129 |
130 | def __iter__(self):
131 | c = self.__heap[:]
132 | while len(c):
133 | yield self.heapq.heappop(c).k
134 | raise StopIteration
135 |
136 |
137 | # CAUTION: Although MC has expire time, but it will not work until
138 | # trying to get data back after timeout. So maybe in some case, LRU will
139 | # squeeze out some data not expired, when some other has expired but
140 | # used more frequently.
141 | #
142 | # To fix this problem, we need another heap to trace when will those data
143 | # timeout. It's more complex, and far as I see, not necessary.
144 | class MemoryCache(Cache):
145 |
146 | def __init__(self, size):
147 | super(MemoryCache, self).__init__()
148 | self.oh = ObjHeap(size)
149 |
150 | def get_data(self, k):
151 | try:
152 | o = self.oh[k]
153 | except KeyError:
154 | return None
155 | if o[1] >= time.time():
156 | return o[0]
157 | del self.oh[k]
158 | return None
159 |
160 | def set_data(self, k, v, exp):
161 | self.oh[k] = (v, time.time() + exp)
162 |
163 |
164 | class TestHeap(unittest.TestCase):
165 |
166 | def test_CRUD(self):
167 | oh = ObjHeap(2)
168 | oh[1] = 10
169 | self.assertEqual(oh[1], 10)
170 | oh[1] = 20
171 | self.assertEqual(oh[1], 20)
172 | del oh[1]
173 | self.assertNotIn(1, oh)
174 |
175 | def test_LRU(self):
176 | oh = ObjHeap(2)
177 | oh[1] = 10
178 | oh[2] = 20
179 | oh[3] = 30
180 | self.assertNotIn(1, oh)
181 |
182 | def test_MC(self):
183 | mc = MemoryCache(2)
184 | mc.set_data(1, 10, 1)
185 | mc.set_data(2, 20, 1)
186 | mc.set_data(3, 30, 1)
187 | self.assertEqual(mc.get_data(1), None)
188 |
189 | def test_timeout(self):
190 | mc = MemoryCache(2)
191 | mc.set_data(1, 10, 0.01)
192 | time.sleep(0.1)
193 | self.assertEqual(mc.get_data(1), None)
194 |
195 |
196 | random.seed()
197 | ALPHABET = string.ascii_letters + string.digits
198 |
199 |
200 | def get_rnd_sess():
201 | return ''.join(random.sample(ALPHABET, 32))
202 |
203 |
204 | def get_params_dict(data, delimiter='&'):
205 | if not data:
206 | return {}
207 | rslt = {}
208 | for p in data.split(delimiter):
209 | i = p.strip().split('=', 1)
210 | rslt[i[0]] = unquote(i[1])
211 | return rslt
212 |
213 |
214 | class Cookie(object):
215 |
216 | def __init__(self, cookie):
217 | if not cookie:
218 | self.__cookies = {}
219 | else:
220 | self.__cookies = get_params_dict(cookie, ';')
221 | self.__modified = set()
222 |
223 | def get(self, k, d):
224 | return self.__cookies.get(k, d)
225 |
226 | def __contains__(self, k):
227 | return k in self.__cookies
228 |
229 | def __getitem__(self, k):
230 | return self.__cookies[k]
231 |
232 | def __delitem__(self, k):
233 | self.__modified.add(k)
234 | del self.__cookies[k]
235 |
236 | def __setitem__(self, k, v):
237 | self.__modified.add(k)
238 | self.__cookies[k] = v
239 |
240 | def set_cookie(self, res):
241 | for k in self.__modified:
242 | res.add('Set-Cookie', '%s=%s' % (k, quote(self.__cookies[k])))
243 |
244 |
245 | class Session(object):
246 |
247 | def __init__(self, timeout):
248 | self.exp = timeout
249 |
250 | def __call__(self, func):
251 | def inner(req):
252 | req.cookie = Cookie(req.get('Cookie'))
253 | sessionid = req.cookie.get('sessionid', '')
254 | if not sessionid:
255 | sessionid = get_rnd_sess()
256 | req.cookie['sessionid'] = sessionid
257 | data = None
258 | else:
259 | data = self.get_data(sessionid)
260 | req.session = {}
261 | if data:
262 | req.session = pickle.loads(data)
263 | logging.info('sessionid: %s', sessionid)
264 | logging.info('session: %s', str(req.session))
265 | res = func(req)
266 | self.set_data(sessionid, pickle.dumps(req.session, 2))
267 | req.cookie.set_cookie(res)
268 | return res
269 | return inner
270 |
271 |
272 | class MemorySession(Session):
273 |
274 | def __init__(self, timeout):
275 | super(MemorySession, self).__init__(timeout)
276 | self.sessions = {}
277 |
278 | def get_data(self, sessionid):
279 | return self.sessions.get(sessionid, None)
280 |
281 | def set_data(self, sessionid, data):
282 | self.sessions[sessionid] = data
283 |
--------------------------------------------------------------------------------
/serve.conf:
--------------------------------------------------------------------------------
1 | [main]
2 | addr=
3 | port=8080
4 |
5 | [log]
6 | loglevel=DEBUG
7 | logfile=
8 | access=
9 |
10 | [server]
11 | engine=apps
12 | server=gevent
13 |
--------------------------------------------------------------------------------
/serve.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # -*- coding: utf-8 -*-
3 | '''
4 | @date: 2012-09-03
5 | @author: shell.xu
6 | @license: BSD-3-clause
7 | '''
8 | from __future__ import absolute_import, division, \
9 | print_function, unicode_literals
10 | import sys
11 | import logging
12 | import httputil
13 |
14 | if sys.version_info.major == 3:
15 | basestring = str
16 |
17 |
18 | LOGFMT = '%(asctime)s.%(msecs)03d[%(levelname)s]\
19 | (%(module)s:%(lineno)d): %(message)s'
20 |
21 |
22 | def initlog(lv, logfile=None, stream=None, longdate=False):
23 | if logfile and logfile.startswith('syslog:'):
24 | from logging import handlers
25 | handler = handlers.SysLogHandler(logfile[7:])
26 | elif logfile:
27 | handler = logging.FileHandler(logfile)
28 | elif stream:
29 | handler = logging.StreamHandler(stream)
30 | else:
31 | handler = logging.StreamHandler(sys.stderr)
32 |
33 | datefmt = '%H:%M:%S'
34 | if longdate:
35 | datefmt = '%Y-%m-%d %H:%M:%S'
36 | handler.setFormatter(logging.Formatter(LOGFMT, datefmt))
37 |
38 | logger = logging.getLogger()
39 | if isinstance(lv, basestring):
40 | lv = getattr(logging, lv)
41 |
42 | logger.setLevel(lv)
43 | logger.addHandler(handler)
44 |
45 |
46 | def getcfg(cfgpathes):
47 | try:
48 | from ConfigParser import SafeConfigParser
49 | except ImportError:
50 | from configparser import SafeConfigParser
51 | cp = SafeConfigParser()
52 | cp.read(cfgpathes)
53 | return cp
54 |
55 |
56 | def main():
57 | cfg = getcfg([
58 | 'serve.conf', '~/.webserver/serve.conf', '/etc/webserver/serve.conf'])
59 | initlog(cfg.get('log', 'loglevel'), cfg.get('log', 'logfile'))
60 | addr = (cfg.get('main', 'addr'), cfg.getint('main', 'port'))
61 |
62 | engine = cfg.get('server', 'engine')
63 | if engine == 'apps':
64 | import apps
65 | ws = httputil.WebServer(apps.dis, cfg.get('log', 'access'))
66 | elif engine == 'wsgi':
67 | import app_webpy
68 | ws = httputil.WSGIServer(app_webpy.app.wsgifunc(),
69 | cfg.get('log', 'access'))
70 | else:
71 | raise Exception('invaild engine %s' % engine)
72 |
73 | server = cfg.get('server', 'server')
74 | if server == 'gevent':
75 | from gevent.server import StreamServer
76 | ws = StreamServer(addr, ws.handler)
77 | elif server == 'thread':
78 | ws = httputil.ThreadServer(addr, ws.handler)
79 | else:
80 | raise Exception('invaild server %s' % server)
81 |
82 | try:
83 | ws.serve_forever()
84 | except KeyboardInterrupt:
85 | pass
86 |
87 | if __name__ == '__main__':
88 | main()
89 |
--------------------------------------------------------------------------------
/template.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # -*- coding: utf-8 -*-
3 | '''
4 | @date: 2010-09-27
5 | @author: shell.xu
6 | @license: BSD-3-clause
7 | '''
8 | from __future__ import absolute_import, division,\
9 | print_function, unicode_literals
10 | import os
11 | import sys
12 | import codecs
13 | import logging
14 | import unittest
15 |
16 |
17 | if sys.version_info.major == 3:
18 | unicode = str
19 |
20 |
21 | class TemplateCode(object):
22 | TAB = u' '
23 |
24 | def __init__(self):
25 | self.deep, self.rslt, self.defs = 0, [], []
26 |
27 | def str(self, s):
28 | if not s:
29 | return
30 | self.rslt.append(
31 | u'{}write(u\'\'\'{}\'\'\')'.format(self.TAB * self.deep, s))
32 |
33 | def code(self, s):
34 | r = self.map_code(s)
35 | if not r:
36 | return
37 | self.rslt.append(r)
38 |
39 | def map_code(self, s):
40 | s, tab = s.strip(), self.deep
41 | if s.startswith(u'='):
42 | s = u'write(%s)' % s[1:]
43 | elif s.startswith(u'end'):
44 | self.deep -= 1
45 | return
46 | elif s.startswith(u'for') or s.startswith(u'if'):
47 | self.deep += 1
48 | elif s.startswith(u'el'):
49 | tab -= 1
50 | elif s.startswith(u'def'):
51 | self.defs.append(s + u'\n')
52 | return
53 | elif s.startswith(u'include'):
54 | self.include(s[8:])
55 | return
56 | elif s.startswith(u'import'):
57 | self.defs.append(s + u'\n')
58 | return
59 | return self.TAB * tab + s
60 |
61 | def include(self, filepath):
62 | with open(filepath, 'r') as tfile:
63 | self.process(tfile.read().decode('utf-8'))
64 |
65 | def process(self, s):
66 | while True:
67 | i = s.partition(u'{%')
68 | if not i[1]:
69 | break
70 | if i[0]:
71 | self.str(i[0])
72 | t = i[2].partition(u'%}')
73 | if not t[1]:
74 | raise Exception('not match')
75 | self.code(t[0])
76 | s = t[2]
77 | self.str(s)
78 |
79 | def get_code(self):
80 | return u'\n'.join(self.rslt)
81 |
82 |
83 | class Template(object):
84 | '''
85 | 模板对象,用于生成模板
86 | 代码:
87 | info = {'r': r, 'objs': [(1, 2), (3, 4)]}
88 | response.append_body(tpl.render(info))
89 | 模板:
90 | {%=r.get('a', 'this is title')%}
91 | col1 | col2 |
92 | {%for i in objs:%}{%=i[0]%} | {%=i[1]%} |
{%end%}
93 |
94 | '''
95 |
96 | def __init__(self, filepath=None, template=None, env=None):
97 | '''
98 | @param filepath: 文件路径,直接从文件中load
99 | @param template: 字符串,直接编译字符串
100 | '''
101 | if not env:
102 | env = globals()
103 | self.env = env
104 | if filepath:
105 | self.loadfile(filepath)
106 | elif template:
107 | self.loadstr(template)
108 |
109 | def loadfile(self, filepath):
110 | ''' 从文件中读取字符串编译 '''
111 | self.modify_time = os.stat(filepath).st_mtime
112 | with codecs.open(filepath, 'r', 'utf-8') as tfile:
113 | self.loadstr(tfile.read())
114 |
115 | def loadstr(self, template):
116 | ''' 编译字符串成为可执行的内容 '''
117 | tc = TemplateCode()
118 | tc.process(template)
119 | code = tc.get_code()
120 | logging.debug(code)
121 | self.htmlcode, self.defcodes = compile(code, '', 'exec'), {}
122 | for i in tc.defs:
123 | eval(compile(i, '', 'exec'), self.env, self.defcodes)
124 |
125 | def reload(self, filepath):
126 | ''' 如果读取文件,测试文件是否更新。 '''
127 | if not hasattr(self, 'modify_time') or \
128 | os.stat(filepath).st_mtime > self.modify_time:
129 | self.loadfile(filepath)
130 |
131 | def render(self, kargs):
132 | ''' 根据参数渲染模板 '''
133 | b = []
134 | kargs['write'] = lambda x: b.append(unicode(x))
135 | eval(self.htmlcode, self.defcodes, kargs)
136 | return u''.join(b)
137 |
138 |
139 | class TestTemplate(unittest.TestCase):
140 | template = u'''{%=r%}
141 | col1 | col2 |
142 | {%for i in objs:%}{%=i[0]%} | {%=i[1]%} |
143 | {%end%}
'''
144 | result = u'''test
145 | col1 | col2 |
146 | 1 | 2 |
147 | 3 | 4 |
148 |
'''
149 |
150 | def test_render(self):
151 | t = Template(template=self.template)
152 | info = {u'r': u'test', u'objs': [(1, 2), (3, 4)]}
153 | self.assertEqual(t.render(info), self.result)
154 |
--------------------------------------------------------------------------------
/test.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # -*- coding: utf-8 -*-
3 | '''
4 | @date: 2016-08-24
5 | @author: Shell.Xu
6 | @copyright: 2016, Shell.Xu
7 | @license: BSD-3-clause
8 | '''
9 | from __future__ import absolute_import, division,\
10 | print_function, unicode_literals
11 | import unittest
12 | from apps import TestApp
13 | try:
14 | from app_webpy import TestAppWebpy
15 | except ImportError:
16 | pass
17 | from client import TestClientApp, TestClientWebpy
18 | from midware import TestHeap
19 | from template import TestTemplate
20 |
21 |
22 | if __name__ == '__main__':
23 | unittest.main()
24 |
--------------------------------------------------------------------------------