47 | # Also test default server.instance = builtin server
48 | 'server.yetanother.socket_port': 9878,
49 | })
50 | setup_server = staticmethod(setup_server)
51 |
52 | PORT = 9876
53 |
54 | def testBasicConfig(self):
55 | self.getPage("/")
56 | self.assertBody(str(self.PORT))
57 |
58 | def testAdditionalServers(self):
59 | if self.scheme == 'https':
60 | return self.skip("not available under ssl")
61 | self.PORT = 9877
62 | self.getPage("/")
63 | self.assertBody(str(self.PORT))
64 | self.PORT = 9878
65 | self.getPage("/")
66 | self.assertBody(str(self.PORT))
67 |
68 | def testMaxRequestSizePerHandler(self):
69 | if getattr(cherrypy.server, "using_apache", False):
70 | return self.skip("skipped due to known Apache differences... ")
71 |
72 | self.getPage('/tinyupload', method="POST",
73 | headers=[('Content-Type', 'text/plain'),
74 | ('Content-Length', '100')],
75 | body="x" * 100)
76 | self.assertStatus(200)
77 | self.assertBody("x" * 100)
78 |
79 | self.getPage('/tinyupload', method="POST",
80 | headers=[('Content-Type', 'text/plain'),
81 | ('Content-Length', '101')],
82 | body="x" * 101)
83 | self.assertStatus(413)
84 |
85 | def testMaxRequestSize(self):
86 | if getattr(cherrypy.server, "using_apache", False):
87 | return self.skip("skipped due to known Apache differences... ")
88 |
89 | for size in (500, 5000, 50000):
90 | self.getPage("/", headers=[('From', "x" * 500)])
91 | self.assertStatus(413)
92 |
93 | # Test for http://www.cherrypy.org/ticket/421
94 | # (Incorrect border condition in readline of SizeCheckWrapper).
95 | # This hangs in rev 891 and earlier.
96 | lines256 = "x" * 248
97 | self.getPage("/",
98 | headers=[('Host', '%s:%s' % (self.HOST, self.PORT)),
99 | ('From', lines256)])
100 |
101 | # Test upload
102 | body = '\r\n'.join([
103 | '--x',
104 | 'Content-Disposition: form-data; name="file"; filename="hello.txt"',
105 | 'Content-Type: text/plain',
106 | '',
107 | '%s',
108 | '--x--'])
109 | partlen = 200 - len(body)
110 | b = body % ("x" * partlen)
111 | h = [("Content-type", "multipart/form-data; boundary=x"),
112 | ("Content-Length", "%s" % len(b))]
113 | self.getPage('/upload', h, "POST", b)
114 | self.assertBody('Size: %d' % partlen)
115 |
116 | b = body % ("x" * 200)
117 | h = [("Content-type", "multipart/form-data; boundary=x"),
118 | ("Content-Length", "%s" % len(b))]
119 | self.getPage('/upload', h, "POST", b)
120 | self.assertStatus(413)
121 |
122 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/test/test_etags.py:
--------------------------------------------------------------------------------
1 | import cherrypy
2 | from cherrypy._cpcompat import ntou
3 | from cherrypy.test import helper
4 |
5 |
6 | class ETagTest(helper.CPWebCase):
7 |
8 | def setup_server():
9 | class Root:
10 | def resource(self):
11 | return "Oh wah ta goo Siam."
12 | resource.exposed = True
13 |
14 | def fail(self, code):
15 | code = int(code)
16 | if 300 <= code <= 399:
17 | raise cherrypy.HTTPRedirect([], code)
18 | else:
19 | raise cherrypy.HTTPError(code)
20 | fail.exposed = True
21 |
22 | def unicoded(self):
23 | return ntou('I am a \u1ee4nicode string.', 'escape')
24 | unicoded.exposed = True
25 | # In Python 3, tools.encode is on by default
26 | unicoded._cp_config = {'tools.encode.on': True}
27 |
28 | conf = {'/': {'tools.etags.on': True,
29 | 'tools.etags.autotags': True,
30 | }}
31 | cherrypy.tree.mount(Root(), config=conf)
32 | setup_server = staticmethod(setup_server)
33 |
34 | def test_etags(self):
35 | self.getPage("/resource")
36 | self.assertStatus('200 OK')
37 | self.assertHeader('Content-Type', 'text/html;charset=utf-8')
38 | self.assertBody('Oh wah ta goo Siam.')
39 | etag = self.assertHeader('ETag')
40 |
41 | # Test If-Match (both valid and invalid)
42 | self.getPage("/resource", headers=[('If-Match', etag)])
43 | self.assertStatus("200 OK")
44 | self.getPage("/resource", headers=[('If-Match', "*")])
45 | self.assertStatus("200 OK")
46 | self.getPage("/resource", headers=[('If-Match', "*")], method="POST")
47 | self.assertStatus("200 OK")
48 | self.getPage("/resource", headers=[('If-Match', "a bogus tag")])
49 | self.assertStatus("412 Precondition Failed")
50 |
51 | # Test If-None-Match (both valid and invalid)
52 | self.getPage("/resource", headers=[('If-None-Match', etag)])
53 | self.assertStatus(304)
54 | self.getPage("/resource", method='POST', headers=[('If-None-Match', etag)])
55 | self.assertStatus("412 Precondition Failed")
56 | self.getPage("/resource", headers=[('If-None-Match', "*")])
57 | self.assertStatus(304)
58 | self.getPage("/resource", headers=[('If-None-Match', "a bogus tag")])
59 | self.assertStatus("200 OK")
60 |
61 | def test_errors(self):
62 | self.getPage("/resource")
63 | self.assertStatus(200)
64 | etag = self.assertHeader('ETag')
65 |
66 | # Test raising errors in page handler
67 | self.getPage("/fail/412", headers=[('If-Match', etag)])
68 | self.assertStatus(412)
69 | self.getPage("/fail/304", headers=[('If-Match', etag)])
70 | self.assertStatus(304)
71 | self.getPage("/fail/412", headers=[('If-None-Match', "*")])
72 | self.assertStatus(412)
73 | self.getPage("/fail/304", headers=[('If-None-Match', "*")])
74 | self.assertStatus(304)
75 |
76 | def test_unicode_body(self):
77 | self.getPage("/unicoded")
78 | self.assertStatus(200)
79 | etag1 = self.assertHeader('ETag')
80 | self.getPage("/unicoded", headers=[('If-Match', etag1)])
81 | self.assertStatus(200)
82 | self.assertHeader('ETag', etag1)
83 |
84 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/test/test_httplib.py:
--------------------------------------------------------------------------------
1 | """Tests for cherrypy/lib/httputil.py."""
2 |
3 | import unittest
4 | from cherrypy.lib import httputil
5 |
6 |
7 | class UtilityTests(unittest.TestCase):
8 |
9 | def test_urljoin(self):
10 | # Test all slash+atom combinations for SCRIPT_NAME and PATH_INFO
11 | self.assertEqual(httputil.urljoin("/sn/", "/pi/"), "/sn/pi/")
12 | self.assertEqual(httputil.urljoin("/sn/", "/pi"), "/sn/pi")
13 | self.assertEqual(httputil.urljoin("/sn/", "/"), "/sn/")
14 | self.assertEqual(httputil.urljoin("/sn/", ""), "/sn/")
15 | self.assertEqual(httputil.urljoin("/sn", "/pi/"), "/sn/pi/")
16 | self.assertEqual(httputil.urljoin("/sn", "/pi"), "/sn/pi")
17 | self.assertEqual(httputil.urljoin("/sn", "/"), "/sn/")
18 | self.assertEqual(httputil.urljoin("/sn", ""), "/sn")
19 | self.assertEqual(httputil.urljoin("/", "/pi/"), "/pi/")
20 | self.assertEqual(httputil.urljoin("/", "/pi"), "/pi")
21 | self.assertEqual(httputil.urljoin("/", "/"), "/")
22 | self.assertEqual(httputil.urljoin("/", ""), "/")
23 | self.assertEqual(httputil.urljoin("", "/pi/"), "/pi/")
24 | self.assertEqual(httputil.urljoin("", "/pi"), "/pi")
25 | self.assertEqual(httputil.urljoin("", "/"), "/")
26 | self.assertEqual(httputil.urljoin("", ""), "/")
27 |
28 | if __name__ == '__main__':
29 | unittest.main()
30 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/test/test_json.py:
--------------------------------------------------------------------------------
1 | import cherrypy
2 | from cherrypy.test import helper
3 |
4 | from cherrypy._cpcompat import json
5 |
6 | class JsonTest(helper.CPWebCase):
7 | def setup_server():
8 | class Root(object):
9 | def plain(self):
10 | return 'hello'
11 | plain.exposed = True
12 |
13 | def json_string(self):
14 | return 'hello'
15 | json_string.exposed = True
16 | json_string._cp_config = {'tools.json_out.on': True}
17 |
18 | def json_list(self):
19 | return ['a', 'b', 42]
20 | json_list.exposed = True
21 | json_list._cp_config = {'tools.json_out.on': True}
22 |
23 | def json_dict(self):
24 | return {'answer': 42}
25 | json_dict.exposed = True
26 | json_dict._cp_config = {'tools.json_out.on': True}
27 |
28 | def json_post(self):
29 | if cherrypy.request.json == [13, 'c']:
30 | return 'ok'
31 | else:
32 | return 'nok'
33 | json_post.exposed = True
34 | json_post._cp_config = {'tools.json_in.on': True}
35 |
36 | root = Root()
37 | cherrypy.tree.mount(root)
38 | setup_server = staticmethod(setup_server)
39 |
40 | def test_json_output(self):
41 | if json is None:
42 | self.skip("json not found ")
43 | return
44 |
45 | self.getPage("/plain")
46 | self.assertBody("hello")
47 |
48 | self.getPage("/json_string")
49 | self.assertBody('"hello"')
50 |
51 | self.getPage("/json_list")
52 | self.assertBody('["a", "b", 42]')
53 |
54 | self.getPage("/json_dict")
55 | self.assertBody('{"answer": 42}')
56 |
57 | def test_json_input(self):
58 | if json is None:
59 | self.skip("json not found ")
60 | return
61 |
62 | body = '[13, "c"]'
63 | headers = [('Content-Type', 'application/json'),
64 | ('Content-Length', str(len(body)))]
65 | self.getPage("/json_post", method="POST", headers=headers, body=body)
66 | self.assertBody('ok')
67 |
68 | body = '[13, "c"]'
69 | headers = [('Content-Type', 'text/plain'),
70 | ('Content-Length', str(len(body)))]
71 | self.getPage("/json_post", method="POST", headers=headers, body=body)
72 | self.assertStatus(415, 'Expected an application/json content type')
73 |
74 | body = '[13, -]'
75 | headers = [('Content-Type', 'application/json'),
76 | ('Content-Length', str(len(body)))]
77 | self.getPage("/json_post", method="POST", headers=headers, body=body)
78 | self.assertStatus(400, 'Invalid JSON document')
79 |
80 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/test/test_mime.py:
--------------------------------------------------------------------------------
1 | """Tests for various MIME issues, including the safe_multipart Tool."""
2 |
3 | import cherrypy
4 | from cherrypy._cpcompat import ntob, ntou, sorted
5 |
6 | def setup_server():
7 |
8 | class Root:
9 |
10 | def multipart(self, parts):
11 | return repr(parts)
12 | multipart.exposed = True
13 |
14 | def multipart_form_data(self, **kwargs):
15 | return repr(list(sorted(kwargs.items())))
16 | multipart_form_data.exposed = True
17 |
18 | def flashupload(self, Filedata, Upload, Filename):
19 | return ("Upload: %s, Filename: %s, Filedata: %r" %
20 | (Upload, Filename, Filedata.file.read()))
21 | flashupload.exposed = True
22 |
23 | cherrypy.config.update({'server.max_request_body_size': 0})
24 | cherrypy.tree.mount(Root())
25 |
26 |
27 | # Client-side code #
28 |
29 | from cherrypy.test import helper
30 |
31 | class MultipartTest(helper.CPWebCase):
32 | setup_server = staticmethod(setup_server)
33 |
34 | def test_multipart(self):
35 | text_part = ntou("This is the text version")
36 | html_part = ntou("""
37 |
38 |
39 |
40 |
41 |
42 |
43 | This is the HTML version
44 |
45 |
46 | """)
47 | body = '\r\n'.join([
48 | "--123456789",
49 | "Content-Type: text/plain; charset='ISO-8859-1'",
50 | "Content-Transfer-Encoding: 7bit",
51 | "",
52 | text_part,
53 | "--123456789",
54 | "Content-Type: text/html; charset='ISO-8859-1'",
55 | "",
56 | html_part,
57 | "--123456789--"])
58 | headers = [
59 | ('Content-Type', 'multipart/mixed; boundary=123456789'),
60 | ('Content-Length', str(len(body))),
61 | ]
62 | self.getPage('/multipart', headers, "POST", body)
63 | self.assertBody(repr([text_part, html_part]))
64 |
65 | def test_multipart_form_data(self):
66 | body='\r\n'.join(['--X',
67 | 'Content-Disposition: form-data; name="foo"',
68 | '',
69 | 'bar',
70 | '--X',
71 | # Test a param with more than one value.
72 | # See http://www.cherrypy.org/ticket/1028
73 | 'Content-Disposition: form-data; name="baz"',
74 | '',
75 | '111',
76 | '--X',
77 | 'Content-Disposition: form-data; name="baz"',
78 | '',
79 | '333',
80 | '--X--'])
81 | self.getPage('/multipart_form_data', method='POST',
82 | headers=[("Content-Type", "multipart/form-data;boundary=X"),
83 | ("Content-Length", str(len(body))),
84 | ],
85 | body=body),
86 | self.assertBody(repr([('baz', [ntou('111'), ntou('333')]), ('foo', ntou('bar'))]))
87 |
88 |
89 | class SafeMultipartHandlingTest(helper.CPWebCase):
90 | setup_server = staticmethod(setup_server)
91 |
92 | def test_Flash_Upload(self):
93 | headers = [
94 | ('Accept', 'text/*'),
95 | ('Content-Type', 'multipart/form-data; '
96 | 'boundary=----------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6'),
97 | ('User-Agent', 'Shockwave Flash'),
98 | ('Host', 'www.example.com:54583'),
99 | ('Content-Length', '499'),
100 | ('Connection', 'Keep-Alive'),
101 | ('Cache-Control', 'no-cache'),
102 | ]
103 | filedata = ntob('\r\n'
104 | '\r\n'
105 | '\r\n')
106 | body = (ntob(
107 | '------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n'
108 | 'Content-Disposition: form-data; name="Filename"\r\n'
109 | '\r\n'
110 | '.project\r\n'
111 | '------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n'
112 | 'Content-Disposition: form-data; '
113 | 'name="Filedata"; filename=".project"\r\n'
114 | 'Content-Type: application/octet-stream\r\n'
115 | '\r\n')
116 | + filedata +
117 | ntob('\r\n'
118 | '------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n'
119 | 'Content-Disposition: form-data; name="Upload"\r\n'
120 | '\r\n'
121 | 'Submit Query\r\n'
122 | # Flash apps omit the trailing \r\n on the last line:
123 | '------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6--'
124 | ))
125 | self.getPage('/flashupload', headers, "POST", body)
126 | self.assertBody("Upload: Submit Query, Filename: .project, "
127 | "Filedata: %r" % filedata)
128 |
129 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/test/test_refleaks.py:
--------------------------------------------------------------------------------
1 | """Tests for refleaks."""
2 |
3 | from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
4 | import threading
5 |
6 | import cherrypy
7 |
8 |
9 | data = object()
10 |
11 |
12 | from cherrypy.test import helper
13 |
14 |
15 | class ReferenceTests(helper.CPWebCase):
16 |
17 | def setup_server():
18 |
19 | class Root:
20 | def index(self, *args, **kwargs):
21 | cherrypy.request.thing = data
22 | return "Hello world!"
23 | index.exposed = True
24 |
25 | cherrypy.tree.mount(Root())
26 | setup_server = staticmethod(setup_server)
27 |
28 | def test_threadlocal_garbage(self):
29 | success = []
30 |
31 | def getpage():
32 | host = '%s:%s' % (self.interface(), self.PORT)
33 | if self.scheme == 'https':
34 | c = HTTPSConnection(host)
35 | else:
36 | c = HTTPConnection(host)
37 | try:
38 | c.putrequest('GET', '/')
39 | c.endheaders()
40 | response = c.getresponse()
41 | body = response.read()
42 | self.assertEqual(response.status, 200)
43 | self.assertEqual(body, ntob("Hello world!"))
44 | finally:
45 | c.close()
46 | success.append(True)
47 |
48 | ITERATIONS = 25
49 | ts = []
50 | for _ in range(ITERATIONS):
51 | t = threading.Thread(target=getpage)
52 | ts.append(t)
53 | t.start()
54 |
55 | for t in ts:
56 | t.join()
57 |
58 | self.assertEqual(len(success), ITERATIONS)
59 |
60 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/test/test_routes.py:
--------------------------------------------------------------------------------
1 | import os
2 | curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
3 |
4 | import cherrypy
5 |
6 | from cherrypy.test import helper
7 | import nose
8 |
9 | class RoutesDispatchTest(helper.CPWebCase):
10 |
11 | def setup_server():
12 |
13 | try:
14 | import routes
15 | except ImportError:
16 | raise nose.SkipTest('Install routes to test RoutesDispatcher code')
17 |
18 | class Dummy:
19 | def index(self):
20 | return "I said good day!"
21 |
22 | class City:
23 |
24 | def __init__(self, name):
25 | self.name = name
26 | self.population = 10000
27 |
28 | def index(self, **kwargs):
29 | return "Welcome to %s, pop. %s" % (self.name, self.population)
30 | index._cp_config = {'tools.response_headers.on': True,
31 | 'tools.response_headers.headers': [('Content-Language', 'en-GB')]}
32 |
33 | def update(self, **kwargs):
34 | self.population = kwargs['pop']
35 | return "OK"
36 |
37 | d = cherrypy.dispatch.RoutesDispatcher()
38 | d.connect(action='index', name='hounslow', route='/hounslow',
39 | controller=City('Hounslow'))
40 | d.connect(name='surbiton', route='/surbiton', controller=City('Surbiton'),
41 | action='index', conditions=dict(method=['GET']))
42 | d.mapper.connect('/surbiton', controller='surbiton',
43 | action='update', conditions=dict(method=['POST']))
44 | d.connect('main', ':action', controller=Dummy())
45 |
46 | conf = {'/': {'request.dispatch': d}}
47 | cherrypy.tree.mount(root=None, config=conf)
48 | setup_server = staticmethod(setup_server)
49 |
50 | def test_Routes_Dispatch(self):
51 | self.getPage("/hounslow")
52 | self.assertStatus("200 OK")
53 | self.assertBody("Welcome to Hounslow, pop. 10000")
54 |
55 | self.getPage("/foo")
56 | self.assertStatus("404 Not Found")
57 |
58 | self.getPage("/surbiton")
59 | self.assertStatus("200 OK")
60 | self.assertBody("Welcome to Surbiton, pop. 10000")
61 |
62 | self.getPage("/surbiton", method="POST", body="pop=1327")
63 | self.assertStatus("200 OK")
64 | self.assertBody("OK")
65 | self.getPage("/surbiton")
66 | self.assertStatus("200 OK")
67 | self.assertHeader("Content-Language", "en-GB")
68 | self.assertBody("Welcome to Surbiton, pop. 1327")
69 |
70 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/test/test_sessionauthenticate.py:
--------------------------------------------------------------------------------
1 | import cherrypy
2 | from cherrypy.test import helper
3 |
4 |
5 | class SessionAuthenticateTest(helper.CPWebCase):
6 |
7 | def setup_server():
8 |
9 | def check(username, password):
10 | # Dummy check_username_and_password function
11 | if username != 'test' or password != 'password':
12 | return 'Wrong login/password'
13 |
14 | def augment_params():
15 | # A simple tool to add some things to request.params
16 | # This is to check to make sure that session_auth can handle request
17 | # params (ticket #780)
18 | cherrypy.request.params["test"] = "test"
19 |
20 | cherrypy.tools.augment_params = cherrypy.Tool('before_handler',
21 | augment_params, None, priority=30)
22 |
23 | class Test:
24 |
25 | _cp_config = {'tools.sessions.on': True,
26 | 'tools.session_auth.on': True,
27 | 'tools.session_auth.check_username_and_password': check,
28 | 'tools.augment_params.on': True,
29 | }
30 |
31 | def index(self, **kwargs):
32 | return "Hi %s, you are logged in" % cherrypy.request.login
33 | index.exposed = True
34 |
35 | cherrypy.tree.mount(Test())
36 | setup_server = staticmethod(setup_server)
37 |
38 |
39 | def testSessionAuthenticate(self):
40 | # request a page and check for login form
41 | self.getPage('/')
42 | self.assertInBody(''''
20 | index.exposed = True
21 |
22 | def greetUser(self, name = None):
23 | # CherryPy passes all GET and POST variables as method parameters.
24 | # It doesn't make a difference where the variables come from, how
25 | # large their contents are, and so on.
26 | #
27 | # You can define default parameter values as usual. In this
28 | # example, the "name" parameter defaults to None so we can check
29 | # if a name was actually specified.
30 |
31 | if name:
32 | # Greet the user!
33 | return "Hey %s, what's up?" % name
34 | else:
35 | if name is None:
36 | # No name was specified
37 | return 'Please enter your name here.'
38 | else:
39 | return 'No, really, enter your name here.'
40 | greetUser.exposed = True
41 |
42 |
43 | import os.path
44 | tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
45 |
46 | if __name__ == '__main__':
47 | # CherryPy always starts with app.root when trying to map request URIs
48 | # to objects, so we need to mount a request handler root. A request
49 | # to '/' will be mapped to HelloWorld().index().
50 | cherrypy.quickstart(WelcomePage(), config=tutconf)
51 | else:
52 | # This branch is for the test suite; you can ignore it.
53 | cherrypy.tree.mount(WelcomePage(), config=tutconf)
54 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/tutorial/tut04_complex_site.py:
--------------------------------------------------------------------------------
1 | """
2 | Tutorial - Multiple objects
3 |
4 | This tutorial shows you how to create a site structure through multiple
5 | possibly nested request handler objects.
6 | """
7 |
8 | import cherrypy
9 |
10 |
11 | class HomePage:
12 | def index(self):
13 | return '''
14 | Hi, this is the home page! Check out the other
15 | fun stuff on this site:
16 |
17 | '''
21 | index.exposed = True
22 |
23 |
24 | class JokePage:
25 | def index(self):
26 | return '''
27 | "In Python, how do you create a string of random
28 | characters?" -- "Read a Perl file!"
29 | [Return]
'''
30 | index.exposed = True
31 |
32 |
33 | class LinksPage:
34 | def __init__(self):
35 | # Request handler objects can create their own nested request
36 | # handler objects. Simply create them inside their __init__
37 | # methods!
38 | self.extra = ExtraLinksPage()
39 |
40 | def index(self):
41 | # Note the way we link to the extra links page (and back).
42 | # As you can see, this object doesn't really care about its
43 | # absolute position in the site tree, since we use relative
44 | # links exclusively.
45 | return '''
46 | Here are some useful links:
47 |
48 |
52 |
53 | You can check out some extra useful
54 | links here.
55 |
56 | [Return]
57 | '''
58 | index.exposed = True
59 |
60 |
61 | class ExtraLinksPage:
62 | def index(self):
63 | # Note the relative link back to the Links page!
64 | return '''
65 | Here are some extra useful links:
66 |
67 |
71 |
72 | [Return to links page]
'''
73 | index.exposed = True
74 |
75 |
76 | # Of course we can also mount request handler objects right here!
77 | root = HomePage()
78 | root.joke = JokePage()
79 | root.links = LinksPage()
80 |
81 | # Remember, we don't need to mount ExtraLinksPage here, because
82 | # LinksPage does that itself on initialization. In fact, there is
83 | # no reason why you shouldn't let your root object take care of
84 | # creating all contained request handler objects.
85 |
86 |
87 | import os.path
88 | tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
89 |
90 | if __name__ == '__main__':
91 | # CherryPy always starts with app.root when trying to map request URIs
92 | # to objects, so we need to mount a request handler root. A request
93 | # to '/' will be mapped to HelloWorld().index().
94 | cherrypy.quickstart(root, config=tutconf)
95 | else:
96 | # This branch is for the test suite; you can ignore it.
97 | cherrypy.tree.mount(root, config=tutconf)
98 |
99 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/tutorial/tut05_derived_objects.py:
--------------------------------------------------------------------------------
1 | """
2 | Tutorial - Object inheritance
3 |
4 | You are free to derive your request handler classes from any base
5 | class you wish. In most real-world applications, you will probably
6 | want to create a central base class used for all your pages, which takes
7 | care of things like printing a common page header and footer.
8 | """
9 |
10 | import cherrypy
11 |
12 |
13 | class Page:
14 | # Store the page title in a class attribute
15 | title = 'Untitled Page'
16 |
17 | def header(self):
18 | return '''
19 |
20 |
21 | %s
22 |
23 |
24 | %s
25 | ''' % (self.title, self.title)
26 |
27 | def footer(self):
28 | return '''
29 |
30 |
31 | '''
32 |
33 | # Note that header and footer don't get their exposed attributes
34 | # set to True. This isn't necessary since the user isn't supposed
35 | # to call header or footer directly; instead, we'll call them from
36 | # within the actually exposed handler methods defined in this
37 | # class' subclasses.
38 |
39 |
40 | class HomePage(Page):
41 | # Different title for this page
42 | title = 'Tutorial 5'
43 |
44 | def __init__(self):
45 | # create a subpage
46 | self.another = AnotherPage()
47 |
48 | def index(self):
49 | # Note that we call the header and footer methods inherited
50 | # from the Page class!
51 | return self.header() + '''
52 |
53 | Isn't this exciting? There's
54 | another page, too!
55 |
56 | ''' + self.footer()
57 | index.exposed = True
58 |
59 |
60 | class AnotherPage(Page):
61 | title = 'Another Page'
62 |
63 | def index(self):
64 | return self.header() + '''
65 |
66 | And this is the amazing second page!
67 |
68 | ''' + self.footer()
69 | index.exposed = True
70 |
71 |
72 | import os.path
73 | tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
74 |
75 | if __name__ == '__main__':
76 | # CherryPy always starts with app.root when trying to map request URIs
77 | # to objects, so we need to mount a request handler root. A request
78 | # to '/' will be mapped to HelloWorld().index().
79 | cherrypy.quickstart(HomePage(), config=tutconf)
80 | else:
81 | # This branch is for the test suite; you can ignore it.
82 | cherrypy.tree.mount(HomePage(), config=tutconf)
83 |
84 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/tutorial/tut06_default_method.py:
--------------------------------------------------------------------------------
1 | """
2 | Tutorial - The default method
3 |
4 | Request handler objects can implement a method called "default" that
5 | is called when no other suitable method/object could be found.
6 | Essentially, if CherryPy2 can't find a matching request handler object
7 | for the given request URI, it will use the default method of the object
8 | located deepest on the URI path.
9 |
10 | Using this mechanism you can easily simulate virtual URI structures
11 | by parsing the extra URI string, which you can access through
12 | cherrypy.request.virtualPath.
13 |
14 | The application in this tutorial simulates an URI structure looking
15 | like /users/. Since the bit will not be found (as
16 | there are no matching methods), it is handled by the default method.
17 | """
18 |
19 | import cherrypy
20 |
21 |
22 | class UsersPage:
23 |
24 | def index(self):
25 | # Since this is just a stupid little example, we'll simply
26 | # display a list of links to random, made-up users. In a real
27 | # application, this could be generated from a database result set.
28 | return '''
29 | Remi Delon
30 | Hendrik Mans
31 | Lorenzo Lamas
32 | '''
33 | index.exposed = True
34 |
35 | def default(self, user):
36 | # Here we react depending on the virtualPath -- the part of the
37 | # path that could not be mapped to an object method. In a real
38 | # application, we would probably do some database lookups here
39 | # instead of the silly if/elif/else construct.
40 | if user == 'remi':
41 | out = "Remi Delon, CherryPy lead developer"
42 | elif user == 'hendrik':
43 | out = "Hendrik Mans, CherryPy co-developer & crazy German"
44 | elif user == 'lorenzo':
45 | out = "Lorenzo Lamas, famous actor and singer!"
46 | else:
47 | out = "Unknown user. :-("
48 |
49 | return '%s (back)' % out
50 | default.exposed = True
51 |
52 |
53 | import os.path
54 | tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
55 |
56 | if __name__ == '__main__':
57 | # CherryPy always starts with app.root when trying to map request URIs
58 | # to objects, so we need to mount a request handler root. A request
59 | # to '/' will be mapped to HelloWorld().index().
60 | cherrypy.quickstart(UsersPage(), config=tutconf)
61 | else:
62 | # This branch is for the test suite; you can ignore it.
63 | cherrypy.tree.mount(UsersPage(), config=tutconf)
64 |
65 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/tutorial/tut07_sessions.py:
--------------------------------------------------------------------------------
1 | """
2 | Tutorial - Sessions
3 |
4 | Storing session data in CherryPy applications is very easy: cherrypy
5 | provides a dictionary called "session" that represents the session
6 | data for the current user. If you use RAM based sessions, you can store
7 | any kind of object into that dictionary; otherwise, you are limited to
8 | objects that can be pickled.
9 | """
10 |
11 | import cherrypy
12 |
13 |
14 | class HitCounter:
15 |
16 | _cp_config = {'tools.sessions.on': True}
17 |
18 | def index(self):
19 | # Increase the silly hit counter
20 | count = cherrypy.session.get('count', 0) + 1
21 |
22 | # Store the new value in the session dictionary
23 | cherrypy.session['count'] = count
24 |
25 | # And display a silly hit count message!
26 | return '''
27 | During your current session, you've viewed this
28 | page %s times! Your life is a patio of fun!
29 | ''' % count
30 | index.exposed = True
31 |
32 |
33 | import os.path
34 | tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
35 |
36 | if __name__ == '__main__':
37 | # CherryPy always starts with app.root when trying to map request URIs
38 | # to objects, so we need to mount a request handler root. A request
39 | # to '/' will be mapped to HelloWorld().index().
40 | cherrypy.quickstart(HitCounter(), config=tutconf)
41 | else:
42 | # This branch is for the test suite; you can ignore it.
43 | cherrypy.tree.mount(HitCounter(), config=tutconf)
44 |
45 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/tutorial/tut08_generators_and_yield.py:
--------------------------------------------------------------------------------
1 | """
2 | Bonus Tutorial: Using generators to return result bodies
3 |
4 | Instead of returning a complete result string, you can use the yield
5 | statement to return one result part after another. This may be convenient
6 | in situations where using a template package like CherryPy or Cheetah
7 | would be overkill, and messy string concatenation too uncool. ;-)
8 | """
9 |
10 | import cherrypy
11 |
12 |
13 | class GeneratorDemo:
14 |
15 | def header(self):
16 | return "Generators rule!
"
17 |
18 | def footer(self):
19 | return ""
20 |
21 | def index(self):
22 | # Let's make up a list of users for presentation purposes
23 | users = ['Remi', 'Carlos', 'Hendrik', 'Lorenzo Lamas']
24 |
25 | # Every yield line adds one part to the total result body.
26 | yield self.header()
27 | yield "List of users:
"
28 |
29 | for user in users:
30 | yield "%s
" % user
31 |
32 | yield self.footer()
33 | index.exposed = True
34 |
35 |
36 | import os.path
37 | tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
38 |
39 | if __name__ == '__main__':
40 | # CherryPy always starts with app.root when trying to map request URIs
41 | # to objects, so we need to mount a request handler root. A request
42 | # to '/' will be mapped to HelloWorld().index().
43 | cherrypy.quickstart(GeneratorDemo(), config=tutconf)
44 | else:
45 | # This branch is for the test suite; you can ignore it.
46 | cherrypy.tree.mount(GeneratorDemo(), config=tutconf)
47 |
48 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/tutorial/tut09_files.py:
--------------------------------------------------------------------------------
1 | """
2 |
3 | Tutorial: File upload and download
4 |
5 | Uploads
6 | -------
7 |
8 | When a client uploads a file to a CherryPy application, it's placed
9 | on disk immediately. CherryPy will pass it to your exposed method
10 | as an argument (see "myFile" below); that arg will have a "file"
11 | attribute, which is a handle to the temporary uploaded file.
12 | If you wish to permanently save the file, you need to read()
13 | from myFile.file and write() somewhere else.
14 |
15 | Note the use of 'enctype="multipart/form-data"' and 'input type="file"'
16 | in the HTML which the client uses to upload the file.
17 |
18 |
19 | Downloads
20 | ---------
21 |
22 | If you wish to send a file to the client, you have two options:
23 | First, you can simply return a file-like object from your page handler.
24 | CherryPy will read the file and serve it as the content (HTTP body)
25 | of the response. However, that doesn't tell the client that
26 | the response is a file to be saved, rather than displayed.
27 | Use cherrypy.lib.static.serve_file for that; it takes four
28 | arguments:
29 |
30 | serve_file(path, content_type=None, disposition=None, name=None)
31 |
32 | Set "name" to the filename that you expect clients to use when they save
33 | your file. Note that the "name" argument is ignored if you don't also
34 | provide a "disposition" (usually "attachement"). You can manually set
35 | "content_type", but be aware that if you also use the encoding tool, it
36 | may choke if the file extension is not recognized as belonging to a known
37 | Content-Type. Setting the content_type to "application/x-download" works
38 | in most cases, and should prompt the user with an Open/Save dialog in
39 | popular browsers.
40 |
41 | """
42 |
43 | import os
44 | localDir = os.path.dirname(__file__)
45 | absDir = os.path.join(os.getcwd(), localDir)
46 |
47 | import cherrypy
48 | from cherrypy.lib import static
49 |
50 |
51 | class FileDemo(object):
52 |
53 | def index(self):
54 | return """
55 |
56 | Upload a file
57 |
61 | Download a file
62 | This one
63 |
64 | """
65 | index.exposed = True
66 |
67 | def upload(self, myFile):
68 | out = """
69 |
70 | myFile length: %s
71 | myFile filename: %s
72 | myFile mime-type: %s
73 |
74 | """
75 |
76 | # Although this just counts the file length, it demonstrates
77 | # how to read large files in chunks instead of all at once.
78 | # CherryPy reads the uploaded file into a temporary file;
79 | # myFile.file.read reads from that.
80 | size = 0
81 | while True:
82 | data = myFile.file.read(8192)
83 | if not data:
84 | break
85 | size += len(data)
86 |
87 | return out % (size, myFile.filename, myFile.content_type)
88 | upload.exposed = True
89 |
90 | def download(self):
91 | path = os.path.join(absDir, "pdf_file.pdf")
92 | return static.serve_file(path, "application/x-download",
93 | "attachment", os.path.basename(path))
94 | download.exposed = True
95 |
96 |
97 | import os.path
98 | tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
99 |
100 | if __name__ == '__main__':
101 | # CherryPy always starts with app.root when trying to map request URIs
102 | # to objects, so we need to mount a request handler root. A request
103 | # to '/' will be mapped to HelloWorld().index().
104 | cherrypy.quickstart(FileDemo(), config=tutconf)
105 | else:
106 | # This branch is for the test suite; you can ignore it.
107 | cherrypy.tree.mount(FileDemo(), config=tutconf)
108 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/tutorial/tut10_http_errors.py:
--------------------------------------------------------------------------------
1 | """
2 |
3 | Tutorial: HTTP errors
4 |
5 | HTTPError is used to return an error response to the client.
6 | CherryPy has lots of options regarding how such errors are
7 | logged, displayed, and formatted.
8 |
9 | """
10 |
11 | import os
12 | localDir = os.path.dirname(__file__)
13 | curpath = os.path.normpath(os.path.join(os.getcwd(), localDir))
14 |
15 | import cherrypy
16 |
17 |
18 | class HTTPErrorDemo(object):
19 |
20 | # Set a custom response for 403 errors.
21 | _cp_config = {'error_page.403' : os.path.join(curpath, "custom_error.html")}
22 |
23 | def index(self):
24 | # display some links that will result in errors
25 | tracebacks = cherrypy.request.show_tracebacks
26 | if tracebacks:
27 | trace = 'off'
28 | else:
29 | trace = 'on'
30 |
31 | return """
32 |
33 | Toggle tracebacks %s
34 | Click me; I'm a broken link!
35 | Use a custom error page from a file.
36 | These errors are explicitly raised by the application:
37 |
43 | You can also set the response body
44 | when you raise an error.
45 |
46 | """ % trace
47 | index.exposed = True
48 |
49 | def toggleTracebacks(self):
50 | # simple function to toggle tracebacks on and off
51 | tracebacks = cherrypy.request.show_tracebacks
52 | cherrypy.config.update({'request.show_tracebacks': not tracebacks})
53 |
54 | # redirect back to the index
55 | raise cherrypy.HTTPRedirect('/')
56 | toggleTracebacks.exposed = True
57 |
58 | def error(self, code):
59 | # raise an error based on the get query
60 | raise cherrypy.HTTPError(status = code)
61 | error.exposed = True
62 |
63 | def messageArg(self):
64 | message = ("If you construct an HTTPError with a 'message' "
65 | "argument, it wil be placed on the error page "
66 | "(underneath the status line by default).")
67 | raise cherrypy.HTTPError(500, message=message)
68 | messageArg.exposed = True
69 |
70 |
71 | import os.path
72 | tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
73 |
74 | if __name__ == '__main__':
75 | # CherryPy always starts with app.root when trying to map request URIs
76 | # to objects, so we need to mount a request handler root. A request
77 | # to '/' will be mapped to HelloWorld().index().
78 | cherrypy.quickstart(HTTPErrorDemo(), config=tutconf)
79 | else:
80 | # This branch is for the test suite; you can ignore it.
81 | cherrypy.tree.mount(HTTPErrorDemo(), config=tutconf)
82 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/tutorial/tutorial.conf:
--------------------------------------------------------------------------------
1 | [global]
2 | server.socket_host = "127.0.0.1"
3 | server.socket_port = 8080
4 | server.thread_pool = 10
5 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/wsgiserver/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ['HTTPRequest', 'HTTPConnection', 'HTTPServer',
2 | 'SizeCheckWrapper', 'KnownLengthRFile', 'ChunkedRFile',
3 | 'MaxSizeExceeded', 'NoSSLError', 'FatalSSLAlert',
4 | 'WorkerThread', 'ThreadPool', 'SSLAdapter',
5 | 'CherryPyWSGIServer',
6 | 'Gateway', 'WSGIGateway', 'WSGIGateway_10', 'WSGIGateway_u0',
7 | 'WSGIPathInfoDispatcher', 'get_ssl_adapter_class']
8 |
9 | import sys
10 | if sys.version_info < (3, 0):
11 | from wsgiserver2 import *
12 | else:
13 | # Le sigh. Boo for backward-incompatible syntax.
14 | exec('from .wsgiserver3 import *')
15 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/cherrypy/wsgiserver/ssl_builtin.py:
--------------------------------------------------------------------------------
1 | """A library for integrating Python's builtin ``ssl`` library with CherryPy.
2 |
3 | The ssl module must be importable for SSL functionality.
4 |
5 | To use this module, set ``CherryPyWSGIServer.ssl_adapter`` to an instance of
6 | ``BuiltinSSLAdapter``.
7 | """
8 |
9 | try:
10 | import ssl
11 | except ImportError:
12 | ssl = None
13 |
14 | try:
15 | from _pyio import DEFAULT_BUFFER_SIZE
16 | except ImportError:
17 | try:
18 | from io import DEFAULT_BUFFER_SIZE
19 | except ImportError:
20 | DEFAULT_BUFFER_SIZE = -1
21 |
22 | import sys
23 |
24 | from cherrypy import wsgiserver
25 |
26 |
27 | class BuiltinSSLAdapter(wsgiserver.SSLAdapter):
28 | """A wrapper for integrating Python's builtin ssl module with CherryPy."""
29 |
30 | certificate = None
31 | """The filename of the server SSL certificate."""
32 |
33 | private_key = None
34 | """The filename of the server's private key file."""
35 |
36 | def __init__(self, certificate, private_key, certificate_chain=None):
37 | if ssl is None:
38 | raise ImportError("You must install the ssl module to use HTTPS.")
39 | self.certificate = certificate
40 | self.private_key = private_key
41 | self.certificate_chain = certificate_chain
42 |
43 | def bind(self, sock):
44 | """Wrap and return the given socket."""
45 | return sock
46 |
47 | def wrap(self, sock):
48 | """Wrap and return the given socket, plus WSGI environ entries."""
49 | try:
50 | s = ssl.wrap_socket(sock, do_handshake_on_connect=True,
51 | server_side=True, certfile=self.certificate,
52 | keyfile=self.private_key, ssl_version=ssl.PROTOCOL_SSLv23)
53 | except ssl.SSLError:
54 | e = sys.exc_info()[1]
55 | if e.errno == ssl.SSL_ERROR_EOF:
56 | # This is almost certainly due to the cherrypy engine
57 | # 'pinging' the socket to assert it's connectable;
58 | # the 'ping' isn't SSL.
59 | return None, {}
60 | elif e.errno == ssl.SSL_ERROR_SSL:
61 | if e.args[1].endswith('http request'):
62 | # The client is speaking HTTP to an HTTPS server.
63 | raise wsgiserver.NoSSLError
64 | elif e.args[1].endswith('unknown protocol'):
65 | # The client is speaking some non-HTTP protocol.
66 | # Drop the conn.
67 | return None, {}
68 | raise
69 | return s, self.get_environ(s)
70 |
71 | # TODO: fill this out more with mod ssl env
72 | def get_environ(self, sock):
73 | """Create WSGI environ entries to be merged into each request."""
74 | cipher = sock.cipher()
75 | ssl_environ = {
76 | "wsgi.url_scheme": "https",
77 | "HTTPS": "on",
78 | 'SSL_PROTOCOL': cipher[1],
79 | 'SSL_CIPHER': cipher[0]
80 | ## SSL_VERSION_INTERFACE string The mod_ssl program version
81 | ## SSL_VERSION_LIBRARY string The OpenSSL program version
82 | }
83 | return ssl_environ
84 |
85 | if sys.version_info >= (3, 0):
86 | def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
87 | return wsgiserver.CP_makefile(sock, mode, bufsize)
88 | else:
89 | def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
90 | return wsgiserver.CP_fileobject(sock, mode, bufsize)
91 |
92 |
--------------------------------------------------------------------------------
/CherryPy-3.2.4/setup.cfg:
--------------------------------------------------------------------------------
1 | [sdist]
2 | formats = gztar
3 |
4 | [egg_info]
5 | tag_build =
6 | tag_date = 0
7 | tag_svn_revision = 0
8 |
9 |
--------------------------------------------------------------------------------
/PCV/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2012, Jan Erik Solem
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions are met:
6 |
7 | 1. Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 | 2. Redistributions in binary form must reproduce the above copyright notice,
10 | this list of conditions and the following disclaimer in the documentation
11 | and/or other materials provided with the distribution.
12 |
13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
14 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
15 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
16 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
17 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
18 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
19 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
20 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
21 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
22 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
/PCV/PCV/__init__.py:
--------------------------------------------------------------------------------
1 | from PCV.classifiers import *
2 | from PCV.clustering import *
3 | from PCV.geometry import *
4 | from PCV.imagesearch import *
5 | from PCV.localdescriptors import *
6 | from PCV.tools import *
--------------------------------------------------------------------------------
/PCV/PCV/classifiers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/PCV/PCV/classifiers/__init__.py
--------------------------------------------------------------------------------
/PCV/PCV/classifiers/bayes.py:
--------------------------------------------------------------------------------
1 | from numpy import *
2 |
3 |
4 | class BayesClassifier(object):
5 |
6 | def __init__(self):
7 | """ Initialize classifier with training data. """
8 |
9 | self.labels = [] # class labels
10 | self.mean = [] # class mean
11 | self.var = [] # class variances
12 | self.n = 0 # nbr of classes
13 |
14 | def train(self,data,labels=None):
15 | """ Train on data (list of arrays n*dim).
16 | Labels are optional, default is 0...n-1. """
17 |
18 | if labels==None:
19 | labels = range(len(data))
20 | self.labels = labels
21 | self.n = len(labels)
22 |
23 | for c in data:
24 | self.mean.append(mean(c,axis=0))
25 | self.var.append(var(c,axis=0))
26 |
27 | def classify(self,points):
28 | """ Classify the points by computing probabilities
29 | for each class and return most probable label. """
30 |
31 | # compute probabilities for each class
32 | est_prob = array([gauss(m,v,points) for m,v in zip(self.mean,self.var)])
33 |
34 | print 'est prob',est_prob.shape,self.labels
35 | # get index of highest probability, this gives class label
36 | ndx = est_prob.argmax(axis=0)
37 |
38 | est_labels = array([self.labels[n] for n in ndx])
39 |
40 | return est_labels, est_prob
41 |
42 |
43 | def gauss(m,v,x):
44 | """ Evaluate Gaussian in d-dimensions with independent
45 | mean m and variance v at the points in (the rows of) x.
46 | http://en.wikipedia.org/wiki/Multivariate_normal_distribution """
47 |
48 | if len(x.shape)==1:
49 | n,d = 1,x.shape[0]
50 | else:
51 | n,d = x.shape
52 |
53 | # covariance matrix, subtract mean
54 | S = diag(1/v)
55 | x = x-m
56 | # product of probabilities
57 | y = exp(-0.5*diag(dot(x,dot(S,x.T))))
58 |
59 | # normalize and return
60 | return y * (2*pi)**(-d/2.0) / ( sqrt(prod(v)) + 1e-6)
61 |
62 |
63 |
64 |
--------------------------------------------------------------------------------
/PCV/PCV/classifiers/knn.py:
--------------------------------------------------------------------------------
1 | from numpy import *
2 |
3 | class KnnClassifier(object):
4 |
5 | def __init__(self,labels,samples):
6 | """ Initialize classifier with training data. """
7 |
8 | self.labels = labels
9 | self.samples = samples
10 |
11 | def classify(self,point,k=3):
12 | """ Classify a point against k nearest
13 | in the training data, return label. """
14 |
15 | # compute distance to all training points
16 | dist = array([L2dist(point,s) for s in self.samples])
17 |
18 | # sort them
19 | ndx = dist.argsort()
20 |
21 | # use dictionary to store the k nearest
22 | votes = {}
23 | for i in range(k):
24 | label = self.labels[ndx[i]]
25 | votes.setdefault(label,0)
26 | votes[label] += 1
27 |
28 | return max(votes, key=lambda x: votes.get(x))
29 |
30 |
31 | def L2dist(p1,p2):
32 | return sqrt( sum( (p1-p2)**2) )
33 |
34 | def L1dist(v1,v2):
35 | return sum(abs(v1-v2))
--------------------------------------------------------------------------------
/PCV/PCV/clustering/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/PCV/PCV/clustering/__init__.py
--------------------------------------------------------------------------------
/PCV/PCV/clustering/hcluster.py:
--------------------------------------------------------------------------------
1 | from numpy import *
2 | from itertools import combinations
3 |
4 |
5 | class ClusterNode(object):
6 | def __init__(self,vec,left,right,distance=0.0,count=1):
7 | self.left = left
8 | self.right = right
9 | self.vec = vec
10 | self.distance = distance
11 | self.count = count # only used for weighted average
12 |
13 | def extract_clusters(self,dist):
14 | """ Extract list of sub-tree clusters from
15 | hcluster tree with distance1:
99 | closest = float('Inf')
100 |
101 | # loop through every pair looking for the smallest distance
102 | for ni,nj in combinations(node,2):
103 | if (ni,nj) not in distances:
104 | distances[ni,nj] = distfcn(ni.vec,nj.vec)
105 |
106 | d = distances[ni,nj]
107 | if d 0)*1 ,axis=0)
40 |
41 | self.idf = log( (1.0*nbr_images) / (1.0*nbr_occurences+1) )
42 | self.trainingdata = featurefiles
43 |
44 | def project(self,descriptors):
45 | """ Project descriptors on the vocabulary
46 | to create a histogram of words. """
47 |
48 | # histogram of image words
49 | imhist = zeros((self.nbr_words))
50 | words,distance = vq(descriptors,self.voc)
51 | for w in words:
52 | imhist[w] += 1
53 |
54 | return imhist
55 |
56 | def get_words(self,descriptors):
57 | """ Convert descriptors to words. """
58 | return vq(descriptors,self.voc)[0]
59 |
--------------------------------------------------------------------------------
/PCV/PCV/localdescriptors/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/PCV/PCV/localdescriptors/__init__.py
--------------------------------------------------------------------------------
/PCV/PCV/localdescriptors/dsift.py:
--------------------------------------------------------------------------------
1 | from PIL import Image
2 | from numpy import *
3 | import os
4 |
5 | from PCV.localdescriptors import sift
6 |
7 |
8 | def process_image_dsift(imagename,resultname,size=20,steps=10,force_orientation=False,resize=None):
9 | """ Process an image with densely sampled SIFT descriptors
10 | and save the results in a file. Optional input: size of features,
11 | steps between locations, forcing computation of descriptor orientation
12 | (False means all are oriented upwards), tuple for resizing the image."""
13 |
14 | im = Image.open(imagename).convert('L')
15 | if resize!=None:
16 | im = im.resize(resize)
17 | m,n = im.size
18 |
19 | if imagename[-3:] != 'pgm':
20 | #create a pgm file
21 | im.save('tmp.pgm')
22 | imagename = 'tmp.pgm'
23 |
24 | # create frames and save to temporary file
25 | scale = size/3.0
26 | x,y = meshgrid(range(steps,m,steps),range(steps,n,steps))
27 | xx,yy = x.flatten(),y.flatten()
28 | frame = array([xx,yy,scale*ones(xx.shape[0]),zeros(xx.shape[0])])
29 | savetxt('tmp.frame',frame.T,fmt='%03.3f')
30 |
31 | if force_orientation:
32 | cmmd = str("D:\mltools\win32vlfeat\sift.exe "+imagename+" --output="+resultname+
33 | " --read-frames=tmp.frame --orientations")
34 | else:
35 | cmmd = str("D:\mltools\win32vlfeat\sift.exe "+imagename+" --output="+resultname+
36 | " --read-frames=tmp.frame")
37 | os.system(cmmd)
38 | print 'processed', imagename, 'to', resultname
39 |
40 |
--------------------------------------------------------------------------------
/PCV/PCV/localdescriptors/sift.py:
--------------------------------------------------------------------------------
1 | from PIL import Image
2 | import os
3 | from numpy import *
4 | from pylab import *
5 |
6 |
7 | def process_image(imagename,resultname,params="--edge-thresh 10 --peak-thresh 5"):
8 | """ process an image and save the results in a file"""
9 | # path = os.path.abspath(os.path.join(os.path.dirname("__file__"),os.path.pardir))
10 | path = os.path.abspath(os.path.join(os.path.dirname("__file__")))
11 | path = path+"\\utils\\win32vlfeat\\sift.exe "
12 | if imagename[-3:] != 'pgm':
13 | #create a pgm file
14 | im = Image.open(imagename).convert('L')
15 | im.save('tmp.pgm')
16 | imagename = 'tmp.pgm'
17 | cmmd = str(path+imagename+" --output="+resultname+
18 | " "+params)
19 | os.system(cmmd)
20 | print 'processed', imagename, 'to', resultname
21 |
22 |
23 | def read_features_from_file(filename):
24 | """ read feature properties and return in matrix form"""
25 | f = loadtxt(filename)
26 | return f[:,:4],f[:,4:] # feature locations, descriptors
27 |
28 |
29 | def write_features_to_file(filename,locs,desc):
30 | """ save feature location and descriptor to file"""
31 | savetxt(filename,hstack((locs,desc)))
32 |
33 |
34 | def plot_features(im,locs,circle=False):
35 | """ show image with features. input: im (image as array),
36 | locs (row, col, scale, orientation of each feature) """
37 |
38 | def draw_circle(c,r):
39 | t = arange(0,1.01,.01)*2*pi
40 | x = r*cos(t) + c[0]
41 | y = r*sin(t) + c[1]
42 | plot(x,y,'b',linewidth=2)
43 |
44 | imshow(im)
45 | if circle:
46 | [draw_circle([p[0],p[1]],p[2]) for p in locs]
47 | else:
48 | plot(locs[:,0],locs[:,1],'ob')
49 | axis('off')
50 |
51 |
52 | def match(desc1,desc2):
53 | """ for each descriptor in the first image,
54 | select its match in the second image.
55 | input: desc1 (descriptors for the first image),
56 | desc2 (same for second image). """
57 |
58 | desc1 = array([d/linalg.norm(d) for d in desc1])
59 | desc2 = array([d/linalg.norm(d) for d in desc2])
60 |
61 | dist_ratio = 0.6
62 | desc1_size = desc1.shape
63 |
64 | matchscores = zeros((desc1_size[0],1))
65 | desc2t = desc2.T #precompute matrix transpose
66 | for i in range(desc1_size[0]):
67 | dotprods = dot(desc1[i,:],desc2t) #vector of dot products
68 | dotprods = 0.9999*dotprods
69 | #inverse cosine and sort, return index for features in second image
70 | indx = argsort(arccos(dotprods))
71 |
72 | #check if nearest neighbor has angle less than dist_ratio times 2nd
73 | if arccos(dotprods)[indx[0]] < dist_ratio * arccos(dotprods)[indx[1]]:
74 | matchscores[i] = int(indx[0])
75 |
76 | return matchscores
77 |
78 |
79 | def appendimages(im1,im2):
80 | """ return a new image that appends the two images side-by-side."""
81 |
82 | #select the image with the fewest rows and fill in enough empty rows
83 | rows1 = im1.shape[0]
84 | rows2 = im2.shape[0]
85 |
86 | if rows1 < rows2:
87 | im1 = concatenate((im1,zeros((rows2-rows1,im1.shape[1]))), axis=0)
88 | elif rows1 > rows2:
89 | im2 = concatenate((im2,zeros((rows1-rows2,im2.shape[1]))), axis=0)
90 | #if none of these cases they are equal, no filling needed.
91 |
92 | return concatenate((im1,im2), axis=1)
93 |
94 |
95 | def plot_matches(im1,im2,locs1,locs2,matchscores,show_below=True):
96 | """ show a figure with lines joining the accepted matches
97 | input: im1,im2 (images as arrays), locs1,locs2 (location of features),
98 | matchscores (as output from 'match'), show_below (if images should be shown below). """
99 |
100 | im3 = appendimages(im1,im2)
101 | if show_below:
102 | im3 = vstack((im3,im3))
103 |
104 | # show image
105 | imshow(im3)
106 |
107 | # draw lines for matches
108 | cols1 = im1.shape[1]
109 | for i in range(len(matchscores)):
110 | if matchscores[i] > 0:
111 | plot([locs1[i,0], locs2[matchscores[i,0],0]+cols1], [locs1[i,1], locs2[matchscores[i,0],1]], 'c')
112 | axis('off')
113 |
114 |
115 | def match_twosided(desc1,desc2):
116 | """ two-sided symmetric version of match(). """
117 |
118 | matches_12 = match(desc1,desc2)
119 | matches_21 = match(desc2,desc1)
120 |
121 | ndx_12 = matches_12.nonzero()[0]
122 |
123 | #remove matches that are not symmetric
124 | for n in ndx_12:
125 | if matches_21[int(matches_12[n])] != n:
126 | matches_12[n] = 0
127 |
128 | return matches_12
129 |
130 |
131 | if __name__ == "__main__":
132 |
133 | process_image('box.pgm','tmp.sift')
134 | l,d = read_features_from_file('tmp.sift')
135 |
136 | im = array(Image.open('box.pgm'))
137 | figure()
138 | plot_features(im,l,True)
139 | gray()
140 |
141 | process_image('scene.pgm','tmp2.sift')
142 | l2,d2 = read_features_from_file('tmp2.sift')
143 | im2 = array(Image.open('scene.pgm'))
144 |
145 | m = match_twosided(d,d2)
146 | figure()
147 | plot_matches(im,im2,l,l2,m)
148 |
149 | gray()
150 | show()
151 |
152 |
153 |
154 |
--------------------------------------------------------------------------------
/PCV/PCV/tools/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/PCV/PCV/tools/__init__.py
--------------------------------------------------------------------------------
/PCV/PCV/tools/graphcut.py:
--------------------------------------------------------------------------------
1 | from pylab import *
2 | from numpy import *
3 |
4 | from pygraph.classes.digraph import digraph
5 | from pygraph.algorithms.minmax import maximum_flow
6 |
7 | from PCV.classifiers import bayes
8 |
9 | """
10 | Graph Cut image segmentation using max-flow/min-cut.
11 | """
12 |
13 | def build_bayes_graph(im,labels,sigma=1e2,kappa=1):
14 | """ Build a graph from 4-neighborhood of pixels.
15 | Foreground and background is determined from
16 | labels (1 for foreground, -1 for background, 0 otherwise)
17 | and is modeled with naive Bayes classifiers."""
18 |
19 | m,n = im.shape[:2]
20 |
21 | # RGB vector version (one pixel per row)
22 | vim = im.reshape((-1,3))
23 |
24 | # RGB for foreground and background
25 | foreground = im[labels==1].reshape((-1,3))
26 | background = im[labels==-1].reshape((-1,3))
27 | train_data = [foreground,background]
28 |
29 | # train naive Bayes classifier
30 | bc = bayes.BayesClassifier()
31 | bc.train(train_data)
32 |
33 | # get probabilities for all pixels
34 | bc_lables,prob = bc.classify(vim)
35 | prob_fg = prob[0]
36 | prob_bg = prob[1]
37 |
38 | # create graph with m*n+2 nodes
39 | gr = digraph()
40 | gr.add_nodes(range(m*n+2))
41 |
42 | source = m*n # second to last is source
43 | sink = m*n+1 # last node is sink
44 |
45 | # normalize
46 | for i in range(vim.shape[0]):
47 | vim[i] = vim[i] / (linalg.norm(vim[i]) + 1e-9)
48 |
49 | # go through all nodes and add edges
50 | for i in range(m*n):
51 | # add edge from source
52 | gr.add_edge((source,i), wt=(prob_fg[i]/(prob_fg[i]+prob_bg[i])))
53 |
54 | # add edge to sink
55 | gr.add_edge((i,sink), wt=(prob_bg[i]/(prob_fg[i]+prob_bg[i])))
56 |
57 | # add edges to neighbors
58 | if i%n != 0: # left exists
59 | edge_wt = kappa*exp(-1.0*sum((vim[i]-vim[i-1])**2)/sigma)
60 | gr.add_edge((i,i-1), wt=edge_wt)
61 | if (i+1)%n != 0: # right exists
62 | edge_wt = kappa*exp(-1.0*sum((vim[i]-vim[i+1])**2)/sigma)
63 | gr.add_edge((i,i+1), wt=edge_wt)
64 | if i//n != 0: # up exists
65 | edge_wt = kappa*exp(-1.0*sum((vim[i]-vim[i-n])**2)/sigma)
66 | gr.add_edge((i,i-n), wt=edge_wt)
67 | if i//n != m-1: # down exists
68 | edge_wt = kappa*exp(-1.0*sum((vim[i]-vim[i+n])**2)/sigma)
69 | gr.add_edge((i,i+n), wt=edge_wt)
70 |
71 | return gr
72 |
73 |
74 | def cut_graph(gr,imsize):
75 | """ Solve max flow of graph gr and return binary
76 | labels of the resulting segmentation."""
77 |
78 | m,n = imsize
79 | source = m*n # second to last is source
80 | sink = m*n+1 # last is sink
81 |
82 | # cut the graph
83 | flows,cuts = maximum_flow(gr,source,sink)
84 |
85 | # convert graph to image with labels
86 | res = zeros(m*n)
87 | for pos,label in cuts.items()[:-2]: #don't add source/sink
88 | res[pos] = label
89 |
90 | return res.reshape((m,n))
91 |
92 |
93 | def save_as_pdf(gr,filename,show_weights=False):
94 |
95 | from pygraph.readwrite.dot import write
96 | import gv
97 | dot = write(gr, weighted=show_weights)
98 | gvv = gv.readstring(dot)
99 | gv.layout(gvv,'fdp')
100 | gv.render(gvv,'pdf',filename)
101 |
102 |
103 | def show_labeling(im,labels):
104 | """ Show image with foreground and background areas.
105 | labels = 1 for foreground, -1 for background, 0 otherwise."""
106 |
107 | imshow(im)
108 | contour(labels,[-0.5,0.5])
109 | contourf(labels,[-1,-0.5],colors='b',alpha=0.25)
110 | contourf(labels,[0.5,1],colors='r',alpha=0.25)
111 | #axis('off')
112 | xticks([])
113 | yticks([])
114 |
115 |
--------------------------------------------------------------------------------
/PCV/PCV/tools/imregistration.py:
--------------------------------------------------------------------------------
1 | from PIL import Image
2 | from xml.dom import minidom
3 | from numpy import *
4 | from pylab import *
5 |
6 | from scipy import ndimage, linalg
7 | from scipy.misc import imsave
8 | import os
9 |
10 | def read_points_from_xml(xmlFileName):
11 | """ Reads control points for face alignment. """
12 |
13 | xmldoc = minidom.parse(xmlFileName)
14 | facelist = xmldoc.getElementsByTagName('face')
15 | faces = {}
16 | for xmlFace in facelist:
17 | fileName = xmlFace.attributes['file'].value
18 | xf = int(xmlFace.attributes['xf'].value)
19 | yf = int(xmlFace.attributes['yf'].value)
20 | xs = int(xmlFace.attributes['xs'].value)
21 | ys = int(xmlFace.attributes['ys'].value)
22 | xm = int(xmlFace.attributes['xm'].value)
23 | ym = int(xmlFace.attributes['ym'].value)
24 | faces[fileName] = array([xf, yf, xs, ys, xm, ym])
25 | return faces
26 |
27 |
28 | def write_points_to_xml(faces, xmlFileName):
29 | xmldoc = minidom.Document()
30 | xmlFaces = xmldoc.createElement("faces")
31 |
32 | keys = faces.keys()
33 | for k in keys:
34 | xmlFace = xmldoc.createElement("face")
35 | xmlFace.setAttribute("file", k)
36 | xmlFace.setAttribute("xf", "%d" % faces[k][0])
37 | xmlFace.setAttribute("yf", "%d" % faces[k][1])
38 | xmlFace.setAttribute("xs", "%d" % faces[k][2])
39 | xmlFace.setAttribute("ys", "%d" % faces[k][3])
40 | xmlFace.setAttribute("xm", "%d" % faces[k][4])
41 | xmlFace.setAttribute("ym", "%d" % faces[k][5])
42 | xmlFaces.appendChild(xmlFace)
43 |
44 | xmldoc.appendChild(xmlFaces)
45 |
46 | fp = open(xmlFileName, "w")
47 | fp.write(xmldoc.toprettyxml(encoding='utf-8'))
48 | fp.close()
49 |
50 |
51 | def compute_rigid_transform(refpoints,points):
52 | """ Computes rotation, scale and translation for
53 | aligning points to refpoints. """
54 |
55 | A = array([ [points[0], -points[1], 1, 0],
56 | [points[1], points[0], 0, 1],
57 | [points[2], -points[3], 1, 0],
58 | [points[3], points[2], 0, 1],
59 | [points[4], -points[5], 1, 0],
60 | [points[5], points[4], 0, 1]])
61 |
62 | y = array([ refpoints[0],
63 | refpoints[1],
64 | refpoints[2],
65 | refpoints[3],
66 | refpoints[4],
67 | refpoints[5]])
68 |
69 | # least sq solution to mimimize ||Ax - y||
70 | a,b,tx,ty = linalg.lstsq(A,y)[0]
71 | R = array([[a, -b], [b, a]]) # rotation matrix incl scale
72 |
73 | return R,tx,ty
74 |
75 |
76 | def rigid_alignment(faces,path,plotflag=False):
77 | """ Align images rigidly and save as new images.
78 | path determines where the aligned images are saved
79 | set plotflag=True to plot the images. """
80 |
81 | # take the points in the first image as reference points
82 | refpoints = faces.values()[0]
83 |
84 | # warp each image using affine transform
85 | for face in faces:
86 | points = faces[face]
87 |
88 | R,tx,ty = compute_rigid_transform(refpoints, points)
89 | T = array([[R[1][1], R[1][0]], [R[0][1], R[0][0]]])
90 |
91 | im = array(Image.open(os.path.join(path,face)))
92 | im2 = zeros(im.shape, 'uint8')
93 |
94 | # warp each color channel
95 | for i in range(len(im.shape)):
96 | im2[:,:,i] = ndimage.affine_transform(im[:,:,i],linalg.inv(T),offset=[-ty,-tx])
97 |
98 | if plotflag:
99 | imshow(im2)
100 | show()
101 |
102 | # crop away border and save aligned images
103 | h,w = im2.shape[:2]
104 | border = (w+h)/20
105 |
106 | # crop away border
107 | imsave(os.path.join(path, 'aligned/'+face),im2[border:h-border,border:w-border,:])
108 |
109 |
--------------------------------------------------------------------------------
/PCV/PCV/tools/imtools.py:
--------------------------------------------------------------------------------
1 | import os
2 | from PIL import Image
3 | from pylab import *
4 | from numpy import *
5 |
6 |
7 |
8 | def get_imlist(path):
9 | """ Returns a list of filenames for
10 | all jpg images in a directory. """
11 |
12 | return [os.path.join(path,f) for f in os.listdir(path) if f.endswith('.jpg')]
13 |
14 |
15 | def compute_average(imlist):
16 | """ Compute the average of a list of images. """
17 |
18 | # open first image and make into array of type float
19 | averageim = array(Image.open(imlist[0]), 'f')
20 |
21 | skipped = 0
22 |
23 | for imname in imlist[1:]:
24 | try:
25 | averageim += array(Image.open(imname))
26 | except:
27 | print imname + "...skipped"
28 | skipped += 1
29 |
30 | averageim /= (len(imlist) - skipped)
31 |
32 | # return average as uint8
33 | return array(averageim, 'uint8')
34 |
35 |
36 | def convert_to_grayscale(imlist):
37 | """ Convert a set of images to grayscale. """
38 |
39 | for imname in imlist:
40 | im = Image.open(imname).convert("L")
41 | im.save(imname)
42 |
43 |
44 | def imresize(im,sz):
45 | """ Resize an image array using PIL. """
46 | pil_im = Image.fromarray(uint8(im))
47 |
48 | return array(pil_im.resize(sz))
49 |
50 |
51 | def histeq(im,nbr_bins=256):
52 | """ Histogram equalization of a grayscale image. """
53 |
54 | # get image histogram
55 | imhist,bins = histogram(im.flatten(),nbr_bins,normed=True)
56 | cdf = imhist.cumsum() # cumulative distribution function
57 | cdf = 255 * cdf / cdf[-1] # normalize
58 |
59 | # use linear interpolation of cdf to find new pixel values
60 | im2 = interp(im.flatten(),bins[:-1],cdf)
61 |
62 | return im2.reshape(im.shape), cdf
63 |
64 |
65 | def plot_2D_boundary(plot_range,points,decisionfcn,labels,values=[0]):
66 | """ Plot_range is (xmin,xmax,ymin,ymax), points is a list
67 | of class points, decisionfcn is a funtion to evaluate,
68 | labels is a list of labels that decisionfcn returns for each class,
69 | values is a list of decision contours to show. """
70 |
71 | clist = ['b','r','g','k','m','y'] # colors for the classes
72 |
73 | # evaluate on a grid and plot contour of decision function
74 | x = arange(plot_range[0],plot_range[1],.1)
75 | y = arange(plot_range[2],plot_range[3],.1)
76 | xx,yy = meshgrid(x,y)
77 | xxx,yyy = xx.flatten(),yy.flatten() # lists of x,y in grid
78 | zz = array(decisionfcn(xxx,yyy))
79 | zz = zz.reshape(xx.shape)
80 | # plot contour(s) at values
81 | contour(xx,yy,zz,values)
82 |
83 | # for each class, plot the points with '*' for correct, 'o' for incorrect
84 | for i in range(len(points)):
85 | d = decisionfcn(points[i][:,0],points[i][:,1])
86 | correct_ndx = labels[i]==d
87 | incorrect_ndx = labels[i]!=d
88 | plot(points[i][correct_ndx,0],points[i][correct_ndx,1],'*',color=clist[i])
89 | plot(points[i][incorrect_ndx,0],points[i][incorrect_ndx,1],'o',color=clist[i])
90 |
91 | axis('equal')
92 |
--------------------------------------------------------------------------------
/PCV/PCV/tools/ncut.py:
--------------------------------------------------------------------------------
1 | from PIL import Image
2 | from pylab import *
3 | from numpy import *
4 | from scipy.cluster.vq import *
5 |
6 |
7 | def cluster(S,k,ndim):
8 | """ Spectral clustering from a similarity matrix."""
9 |
10 | # check for symmetry
11 | if sum(abs(S-S.T)) > 1e-10:
12 | print 'not symmetric'
13 |
14 | # create Laplacian matrix
15 | rowsum = sum(abs(S),axis=0)
16 | D = diag(1 / sqrt(rowsum + 1e-6))
17 | L = dot(D,dot(S,D))
18 |
19 | # compute eigenvectors of L
20 | U,sigma,V = linalg.svd(L,full_matrices=False)
21 |
22 | # create feature vector from ndim first eigenvectors
23 | # by stacking eigenvectors as columns
24 | features = array(V[:ndim]).T
25 |
26 | # k-means
27 | features = whiten(features)
28 | centroids,distortion = kmeans(features,k)
29 | code,distance = vq(features,centroids)
30 |
31 | return code,V
32 |
33 |
34 | def ncut_graph_matrix(im,sigma_d=1e2,sigma_g=1e-2):
35 | """ Create matrix for normalized cut. The parameters are
36 | the weights for pixel distance and pixel similarity. """
37 |
38 | m,n = im.shape[:2]
39 | N = m*n
40 |
41 | # normalize and create feature vector of RGB or grayscale
42 | if len(im.shape)==3:
43 | for i in range(3):
44 | im[:,:,i] = im[:,:,i] / im[:,:,i].max()
45 | vim = im.reshape((-1,3))
46 | else:
47 | im = im / im.max()
48 | vim = im.flatten()
49 |
50 | # x,y coordinates for distance computation
51 | xx,yy = meshgrid(range(n),range(m))
52 | x,y = xx.flatten(),yy.flatten()
53 |
54 | # create matrix with edge weights
55 | W = zeros((N,N),'f')
56 | for i in range(N):
57 | for j in range(i,N):
58 | d = (x[i]-x[j])**2 + (y[i]-y[j])**2
59 | W[i,j] = W[j,i] = exp(-1.0*sum((vim[i]-vim[j])**2)/sigma_g) * exp(-d/sigma_d)
60 |
61 | return W
62 |
--------------------------------------------------------------------------------
/PCV/PCV/tools/pca.py:
--------------------------------------------------------------------------------
1 | from PIL import Image
2 | from numpy import *
3 |
4 |
5 | def pca(X):
6 | """ Principal Component Analysis
7 | input: X, matrix with training data stored as flattened arrays in rows
8 | return: projection matrix (with important dimensions first), variance and mean.
9 | """
10 |
11 | # get dimensions
12 | num_data,dim = X.shape
13 |
14 | # center data
15 | mean_X = X.mean(axis=0)
16 | X = X - mean_X
17 |
18 | if dim>num_data:
19 | # PCA - compact trick used
20 | M = dot(X,X.T) # covariance matrix
21 | e,EV = linalg.eigh(M) # eigenvalues and eigenvectors
22 | tmp = dot(X.T,EV).T # this is the compact trick
23 | V = tmp[::-1] # reverse since last eigenvectors are the ones we want
24 | S = sqrt(e)[::-1] # reverse since eigenvalues are in increasing order
25 | for i in range(V.shape[1]):
26 | V[:,i] /= S
27 | else:
28 | # PCA - SVD used
29 | U,S,V = linalg.svd(X)
30 | V = V[:num_data] # only makes sense to return the first num_data
31 |
32 | # return the projection matrix, the variance and the mean
33 | return V,S,mean_X
34 |
35 |
36 | def center(X):
37 | """ Center the square matrix X (subtract col and row means). """
38 |
39 | n,m = X.shape
40 | if n != m:
41 | raise Exception('Matrix is not square.')
42 |
43 | colsum = X.sum(axis=0) / n
44 | rowsum = X.sum(axis=1) / n
45 | totalsum = X.sum() / (n**2)
46 |
47 | #center
48 | Y = array([[ X[i,j]-rowsum[i]-colsum[j]+totalsum for i in range(n) ] for j in range(n)])
49 |
50 | return Y
--------------------------------------------------------------------------------
/PCV/PCV/tools/rof.py:
--------------------------------------------------------------------------------
1 | from numpy import *
2 |
3 |
4 | def denoise(im,U_init,tolerance=0.1,tau=0.125,tv_weight=100):
5 | """ An implementation of the Rudin-Osher-Fatemi (ROF) denoising model
6 | using the numerical procedure presented in Eq. (11) of A. Chambolle
7 | (2005). Implemented using periodic boundary conditions.
8 |
9 | Input: noisy input image (grayscale), initial guess for U, weight of
10 | the TV-regularizing term, steplength, tolerance for the stop criterion
11 |
12 | Output: denoised and detextured image, texture residual. """
13 |
14 | m,n = im.shape #size of noisy image
15 |
16 | # initialize
17 | U = U_init
18 | Px = zeros((m, n)) #x-component to the dual field
19 | Py = zeros((m, n)) #y-component of the dual field
20 | error = 1
21 |
22 | while (error > tolerance):
23 | Uold = U
24 |
25 | # gradient of primal variable
26 | GradUx = roll(U,-1,axis=1)-U # x-component of U's gradient
27 | GradUy = roll(U,-1,axis=0)-U # y-component of U's gradient
28 |
29 | # update the dual varible
30 | PxNew = Px + (tau/tv_weight)*GradUx # non-normalized update of x-component (dual)
31 | PyNew = Py + (tau/tv_weight)*GradUy # non-normalized update of y-component (dual)
32 | NormNew = maximum(1,sqrt(PxNew**2+PyNew**2))
33 |
34 | Px = PxNew/NormNew # update of x-component (dual)
35 | Py = PyNew/NormNew # update of y-component (dual)
36 |
37 | # update the primal variable
38 | RxPx = roll(Px,1,axis=1) # right x-translation of x-component
39 | RyPy = roll(Py,1,axis=0) # right y-translation of y-component
40 |
41 | DivP = (Px-RxPx)+(Py-RyPy) # divergence of the dual field.
42 | U = im + tv_weight*DivP # update of the primal variable
43 |
44 | # update of error
45 | error = linalg.norm(U-Uold)/sqrt(n*m);
46 |
47 | return U,im-U # denoised image and texture residual
48 |
--------------------------------------------------------------------------------
/PCV/README.md:
--------------------------------------------------------------------------------
1 | ## About PCV
2 | PCV is a pure Python library for computer vision based on the book "Programming Computer Vision with Python" by Jan Erik Solem.
3 |
4 | More details on the book (and a pdf version of the latest draft) can be found at [programmingcomputervision.com](http://programmingcomputervision.com/).
5 |
6 | ### Dependencies
7 | You need to have Python 2.6+ and as a minimum:
8 |
9 | * [NumPy](http://numpy.scipy.org/)
10 | * [Matplotlib](http://matplotlib.sourceforge.net/)
11 |
12 | Some parts use:
13 |
14 | * [SciPy](http://scipy.org/)
15 |
16 | Many sections show applications that require smaller specialized Python modules. See the book or the individual examples for full list of these dependencies.
17 |
18 | ### Structure
19 | 结构
20 |
21 | *PCV/* the code.
22 | 代码
23 |
24 | *pcv_book/* contains a clean folder with the code exactly as used in the book at time of publication.
25 | 原书发布时用到的代码,包含在一个干净的文件夹里
26 |
27 | *examples/* contains sample code. Some examples use data available at [programmingcomputervision.com](http://programmingcomputervision.com/).
28 | 包含的实例代码,实例中的一个数据可以访问[programmingcomputervision.com](http://programmingcomputervision.com/).
29 |
30 | ### Installation
31 | 安装
32 |
33 | Open a terminal in the PCV directory and run (with sudo if needed on your system):
34 | 在PCV目录下打开终端,并运行下面命令:
35 |
36 | python setup.py install
37 |
38 | Now you should be able to do
39 | 现在你可以通过下面命令在你的脚本导入PCV模块:
40 |
41 | import PCV
42 |
43 | in your Python session or script. Try one of the sample code examples to check that the installation works.
44 | 试着运行其中的实例代码,验证一下是否安装成功。
45 |
46 | ### License
47 |
48 | All code in this project is provided as open source under the BSD license (2-clause "Simplified BSD License"). See LICENSE.txt.
49 |
50 |
51 | ---
52 | -Jan Erik Solem
--------------------------------------------------------------------------------
/PCV/expand_tabs.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 |
4 | reg_pyfile=re.compile(r'.*\.py$',re.I)
5 |
6 | def expand_tabs(file0):
7 | '''
8 | This function takes the name of a python source file, expands all tabs to 4 spaces (for
9 | PEP 8 compliance), and rewrites the file in place.
10 | '''
11 | str_file_contents=open(file0,'rb').read()
12 | str_pep_contents=str_file_contents.replace('\x09',4*'\x20')
13 | open(file0,'wb').write(str_pep_contents)
14 | return None
15 |
16 | def pepify_directory(path_root):
17 | for (path,subdir,lst_file) in os.walk(path_root):
18 | for file0 in (file1 for file1 in lst_file if reg_pyfile.match(file1)):
19 | expand_tabs(os.path.join(path,file0))
20 | print(os.path.join(path,file0))
21 | pass
22 | pass
23 | return None
24 |
25 | if __name__=='__main__':
26 | pepify_directory('.')
27 | pass
28 |
29 |
--------------------------------------------------------------------------------
/PCV/get-data-files.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | URL="http://programmingcomputervision.com/downloads/pcv_data.zip"
4 |
5 | if /usr/bin/env wget $URL
6 | then
7 |
8 | mkdir data-unpack.tmp
9 | cd data-unpack.tmp
10 |
11 | unzip -q ../pcv_data.zip
12 | rm -rf __MACOSX
13 |
14 | cd ..
15 |
16 | if [ -d data/ ]
17 | then
18 | echo Directory 'pcv-data' already exists.
19 | echo Look in the data-unpack.tmp directory for the unzipped data.
20 | else
21 | mv data-unpack.tmp/data .
22 | rmdir data-unpack.tmp
23 | fi
24 |
25 | rm pcv_data.zip
26 | else
27 | echo Could not get data archive. Check URL and make sure you have wget.
28 | fi
29 |
--------------------------------------------------------------------------------
/PCV/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 |
5 | setup(name='PCV',
6 | version='1.0',
7 | author='Jan Erik Solem',
8 | url='https://github.com/jesolem/PCV',
9 | packages=['PCV', 'PCV.classifiers', 'PCV.clustering', 'PCV.geometry',
10 | 'PCV.imagesearch', 'PCV.localdescriptors', 'PCV.tools'],
11 | requires=['NumPy', 'Matplotlib', 'SciPy'],
12 | )
13 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## gosearch
2 |
3 | gosearch是一个web图像搜索应用, 不同于过去传统的基于文本的图像检索(TBIR), gosearch是基于内容的图像检索(CBIR)。
4 |
5 | ### 框架
6 |
7 | 1. 首先提取图像库first500中各个图像的sift特征,每幅图像会对应一个提取的特征文件。
8 | 2. 采用BOW模型构建词汇向量,并配以tf-idf权重。
9 | 3. 进行匹配。
10 |
11 | ### 安装
12 |
13 | - 由于`PCV`库依赖matplotlib、numpy等,建议你安装`python(x,y)`,在安装时推荐以**full**的形式安装。
14 | - 安装好`python(x,y)`后,进入到**gosearch**的`PCV`目录下,运行下面命令:
15 |
16 | ```python
17 | python setup.py install
18 | ```
19 | 安装好`PCV`库后,返回上一级目录,即**gosearch**目录,进入`CherryPy-3.2.4`目录下,同样运行上面的命令安装好CherryPy。
20 |
21 | ### 运行
22 |
23 | - 完成上面安装后,即可运行本应用。
24 |
--------------------------------------------------------------------------------
/addImage.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | from PCV.imagesearch import imagesearch
3 | from PCV.localdescriptors import sift
4 | from sqlite3 import dbapi2 as sqlite
5 | from PCV.tools.imtools import get_imlist
6 |
7 | imlist = get_imlist('./first500/')
8 | nbr_images = len(imlist)
9 | featlist = [imlist[i][:-3]+'sift' for i in range(nbr_images)]
10 | # load vocabulary
11 | with open('./first500/vocabulary.pkl', 'rb') as f:
12 | voc = pickle.load(f)
13 | # create indexer
14 | indx = imagesearch.Indexer('web.db',voc)
15 | indx.create_tables()
16 | # go through all images, project features on vocabulary and insert
17 | for i in range(nbr_images)[:500]:
18 | locs,descr = sift.read_features_from_file(featlist[i])
19 | indx.add_to_index(imlist[i],descr)
20 | # commit to database
21 | indx.db_commit()
22 |
23 | con = sqlite.connect('web.db')
24 | print con.execute('select count (filename) from imlist').fetchone()
25 | print con.execute('select * from imlist').fetchone()
26 |
--------------------------------------------------------------------------------
/candidates.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | from PCV.imagesearch import imagesearch
3 | from PCV.localdescriptors import sift
4 | from sqlite3 import dbapi2 as sqlite
5 | from PCV.tools.imtools import get_imlist
6 |
7 | imlist = get_imlist('./first500/')
8 | nbr_images = len(imlist)
9 | featlist = [imlist[i][:-3]+'sift' for i in range(nbr_images)]
10 |
11 |
12 | f = open('./first500/vocabulary.pkl', 'rb')
13 | voc = pickle.load(f)
14 | f.close()
15 |
16 | src = imagesearch.Searcher('web.db',voc)
17 | locs,descr = sift.read_features_from_file(featlist[0])
18 | iw = voc.project(descr)
19 |
20 | print 'ask using a histogram...'
21 | print src.candidates_from_histogram(iw)[:10]
22 |
23 | src = imagesearch.Searcher('web.db',voc)
24 | print 'try a query...'
25 |
26 | nbr_results = 12
27 | res = [w[1] for w in src.query(imlist[39])[:nbr_results]]
28 | imagesearch.plot_results(src,res)
--------------------------------------------------------------------------------
/cocabulary.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import pickle
3 | from PCV.imagesearch import vocabulary
4 | from PCV.tools.imtools import get_imlist
5 | from PCV.localdescriptors import sift
6 |
7 | imlist = get_imlist('./first500/')
8 | nbr_images = len(imlist)
9 | featlist = [imlist[i][:-3]+'sift' for i in range(nbr_images)]
10 |
11 | for i in range(nbr_images):
12 | sift.process_image(imlist[i], featlist[i])
13 |
14 | voc = vocabulary.Vocabulary('ukbench')
15 | voc.train(featlist, 1000, 10)
16 | # saving vocabulary
17 | with open('./first500/vocabulary.pkl', 'wb') as f:
18 | pickle.dump(voc, f)
19 | print 'vocabulary is:', voc.name, voc.nbr_words
--------------------------------------------------------------------------------
/first500/first500.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/first500/first500.zip
--------------------------------------------------------------------------------
/newVersion/111.py:
--------------------------------------------------------------------------------
1 | descr = []
2 | descr.append(sift.read_features_from_file(featurefiles[0])[1])
3 | descriptors = descr[0] #stack all features for k-means
4 | print "start vstack descriptors"
5 | for i in arange(1,nbr_images):
6 | descr.append(sift.read_features_from_file(featurefiles[i])[1])
7 | descriptors = vstack((descriptors,descr[i]))
8 |
9 | # k-means: last number determines number of runs
10 | print "start kmeans"
11 | voc,distortion = kmeans(descriptors[::subsampling,:],k,1)
12 | nbr_words = voc.shape[0]
13 |
14 | # go through all training images and project on vocabulary
15 | imwords = zeros((nbr_images, nbr_words))
16 | for i in range( nbr_images ):
17 | imwords[i] = project(array(descr[i]))
18 |
19 | nbr_occurences = sum( (imwords > 0)*1 ,axis=0)
20 |
21 | idf = log( (1.0*nbr_images) / (1.0*nbr_occurences+1) )
22 | trainingdata = featurefiles
23 |
24 | def project(descriptors):
25 | """ Project descriptors on the vocabulary
26 | to create a histogram of words. """
27 |
28 | # histogram of image words
29 | imhist = zeros((nbr_words))
30 | words,distance = vq(descriptors,voc)
31 | for w in words:
32 | imhist[w] += 1
33 |
34 | return imhist
--------------------------------------------------------------------------------
/newVersion/addImage.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | from PCV.imagesearch import imagesearch
3 | from PCV.localdescriptors import sift
4 | from sqlite3 import dbapi2 as sqlite
5 | from PCV.tools.imtools import get_imlist
6 |
7 | imlist = get_imlist('./first500/')
8 | nbr_images = len(imlist)
9 | featlist = [imlist[i][:-3]+'sift' for i in range(nbr_images)]
10 | # load vocabulary
11 | with open('./vocabulary.pkl', 'rb') as f:
12 | voc = pickle.load(f)
13 | # create indexer
14 | indx = imagesearch.Indexer('web.db',voc)
15 | indx.create_tables()
16 | # go through all images, project features on vocabulary and insert
17 | for i in range(nbr_images)[:500]:
18 | locs,descr = sift.read_features_from_file(featlist[i])
19 | indx.add_to_index(imlist[i],descr)
20 | # commit to database
21 | indx.db_commit()
22 |
23 | con = sqlite.connect('web.db')
24 | print con.execute('select count (filename) from imlist').fetchone()
25 | print con.execute('select * from imlist').fetchone()
26 |
--------------------------------------------------------------------------------
/newVersion/candidates.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | from numpy import *
3 | from imagesearch import imagesearch
4 | from localdescriptors import sift
5 | from sqlite3 import dbapi2 as sqlite
6 | from tools.imtools import get_imlist
7 |
8 | imlist = get_imlist('./first500/')
9 | nbr_images = len(imlist)
10 | featlist = [imlist[i][:-3]+'sift' for i in range(nbr_images)]
11 |
12 |
13 | f = open('./vocabulary.pkl', 'rb')
14 | voc = pickle.load(f)
15 | f.close()
16 |
17 | src = imagesearch.Searcher('web.db',voc)
18 | locs,descr = sift.read_features_from_file(featlist[0])
19 | iw = voc.project(array(descr))
20 |
21 | print 'ask using a histogram...'
22 | print src.candidates_from_histogram(iw)[:10]
23 |
24 | src = imagesearch.Searcher('web.db',voc)
25 | print 'try a query...'
26 |
27 | nbr_results = 12
28 | res = [w[1] for w in src.query(imlist[12])[:nbr_results]]
29 | imagesearch.plot_results(src,res)
--------------------------------------------------------------------------------
/newVersion/gabor.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | import matplotlib
4 | import matplotlib.pyplot as plt
5 | import numpy as np
6 | from scipy import ndimage as nd
7 |
8 | from skimage import data
9 | from skimage.util import img_as_float
10 | from skimage.filters import gabor_kernel
11 |
12 |
13 | def compute_feats(image, kernels):
14 | feats = np.zeros((len(kernels), 2), dtype=np.double)
15 | for k, kernel in enumerate(kernels):
16 | filtered = nd.convolve(image, kernel, mode='wrap')
17 | feats[k, 0] = filtered.mean()
18 | feats[k, 1] = filtered.var()
19 | return feats
20 |
21 |
22 | def match(feats, ref_feats):
23 | min_error = np.inf
24 | min_i = None
25 | for i in range(ref_feats.shape[0]):
26 | error = np.sum((feats - ref_feats[i, :])**2)
27 | if error < min_error:
28 | min_error = error
29 | min_i = i
30 | return min_i
31 |
32 |
33 | # prepare filter bank kernels
34 | kernels = []
35 | for theta in range(4):
36 | theta = theta / 4. * np.pi
37 | for sigma in (1, 3):
38 | for frequency in (0.05, 0.25):
39 | kernel = np.real(gabor_kernel(frequency, theta=theta,
40 | sigma_x=sigma, sigma_y=sigma))
41 | kernels.append(kernel)
42 |
43 |
44 | shrink = (slice(0, None, 3), slice(0, None, 3))
45 | brick = img_as_float(data.load('brick.png'))[shrink]
46 | grass = img_as_float(data.load('grass.png'))[shrink]
47 | wall = img_as_float(data.load('rough-wall.png'))[shrink]
48 | image_names = ('brick', 'grass', 'wall')
49 | images = (brick, grass, wall)
50 |
51 | # prepare reference features
52 | ref_feats = np.zeros((3, len(kernels), 2), dtype=np.double)
53 | ref_feats[0, :, :] = compute_feats(brick, kernels)
54 | ref_feats[1, :, :] = compute_feats(grass, kernels)
55 | ref_feats[2, :, :] = compute_feats(wall, kernels)
56 |
57 | print('Rotated images matched against references using Gabor filter banks:')
58 |
59 | print('original: brick, rotated: 30deg, match result: ', end='')
60 | feats = compute_feats(nd.rotate(brick, angle=190, reshape=False), kernels)
61 | print(image_names[match(feats, ref_feats)])
62 |
63 | print('original: brick, rotated: 70deg, match result: ', end='')
64 | feats = compute_feats(nd.rotate(brick, angle=70, reshape=False), kernels)
65 | print(image_names[match(feats, ref_feats)])
66 |
67 | print('original: grass, rotated: 145deg, match result: ', end='')
68 | feats = compute_feats(nd.rotate(grass, angle=145, reshape=False), kernels)
69 | print(image_names[match(feats, ref_feats)])
70 |
71 |
72 | def power(image, kernel):
73 | # Normalize images for better comparison.
74 | image = (image - image.mean()) / image.std()
75 | return np.sqrt(nd.convolve(image, np.real(kernel), mode='wrap')**2 +
76 | nd.convolve(image, np.imag(kernel), mode='wrap')**2)
77 |
78 | # Plot a selection of the filter bank kernels and their responses.
79 | results = []
80 | kernel_params = []
81 | for theta in (0, 1):
82 | theta = theta / 4. * np.pi
83 | for frequency in (0.1, 0.4):
84 | kernel = gabor_kernel(frequency, theta=theta)
85 | params = 'theta=%d,\nfrequency=%.2f' % (theta * 180 / np.pi, frequency)
86 | kernel_params.append(params)
87 | # Save kernel and the power image for each image
88 | results.append((kernel, [power(img, kernel) for img in images]))
89 |
90 | fig, axes = plt.subplots(nrows=5, ncols=4, figsize=(5, 6))
91 | plt.gray()
92 |
93 | fig.suptitle('Image responses for Gabor filter kernels', fontsize=12)
94 |
95 | axes[0][0].axis('off')
96 |
97 | # Plot original images
98 | for label, img, ax in zip(image_names, images, axes[0][1:]):
99 | ax.imshow(img)
100 | ax.set_title(label, fontsize=9)
101 | ax.axis('off')
102 |
103 | for label, (kernel, powers), ax_row in zip(kernel_params, results, axes[1:]):
104 | # Plot Gabor kernel
105 | ax = ax_row[0]
106 | ax.imshow(np.real(kernel), interpolation='nearest')
107 | ax.set_ylabel(label, fontsize=7)
108 | ax.set_xticks([])
109 | ax.set_yticks([])
110 |
111 | # Plot Gabor responses with the contrast normalized for each filter
112 | vmin = np.min(powers)
113 | vmax = np.max(powers)
114 | for patch, ax in zip(powers, ax_row[1:]):
115 | ax.imshow(patch, vmin=vmin, vmax=vmax)
116 | ax.axis('off')
117 |
118 | plt.show()
--------------------------------------------------------------------------------
/newVersion/getVocabulary.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import pickle
3 | from imagesearch import vocabulary
4 | from tools.imtools import get_imlist
5 | from localdescriptors import sift
6 |
7 | imlist = get_imlist('./first500/')
8 | nbr_images = len(imlist)
9 | featlist = [imlist[i][:-3]+'sift' for i in range(nbr_images)]
10 |
11 | #for i in range(nbr_images):
12 | # sift.process_image(imlist[i], featlist[i])
13 |
14 | voc = vocabulary.Vocabulary('ukbench')
15 | voc.train(featlist, 10000, 10)
16 | # saving vocabulary
17 | with open('vocabulary.pkl', 'wb') as f:
18 | pickle.dump(voc, f)
19 | print 'vocabulary is:', voc.name, voc.nbr_words
--------------------------------------------------------------------------------
/newVersion/imagesearch/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/newVersion/imagesearch/__init__.py
--------------------------------------------------------------------------------
/newVersion/imagesearch/vocabulary.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from numpy import *
3 | from scipy.cluster.vq import *
4 |
5 | from localdescriptors import sift
6 |
7 |
8 | class Vocabulary(object):
9 |
10 | def __init__(self,name):
11 | self.name = name
12 | self.voc = []
13 | self.idf = []
14 | self.trainingdata = []
15 | self.nbr_words = 0
16 |
17 | def train(self,featurefiles,k=100,subsampling=10):
18 | """ Train a vocabulary from features in files listed
19 | in featurefiles using k-means with k number of words.
20 | Subsampling of training data can be used for speedup. """
21 |
22 | nbr_images = len(featurefiles)
23 | # read the features from file
24 | descr = []
25 | descr.append(sift.read_features_from_file(featurefiles[0])[1])
26 | descriptors = descr[0] #stack all features for k-means
27 | print "start vstack descriptors"
28 | for i in arange(1,nbr_images):
29 | descr.append(sift.read_features_from_file(featurefiles[i])[1])
30 | descriptors = vstack((descriptors,descr[i]))
31 |
32 | # k-means: last number determines number of runs
33 | print "start kmeans"
34 | self.voc,distortion = kmeans(descriptors[::subsampling,:],k,1)
35 | self.nbr_words = self.voc.shape[0]
36 |
37 | # go through all training images and project on vocabulary
38 | imwords = zeros((nbr_images,self.nbr_words))
39 | for i in range( nbr_images ):
40 | imwords[i] = self.project(array(descr[i]))
41 |
42 | nbr_occurences = sum( (imwords > 0)*1 ,axis=0)
43 |
44 | #http://zhangjunhd.github.io/2014/09/30/text-clustering.html
45 |
46 | self.idf = log( (1.0*nbr_images) / (1.0*nbr_occurences+1) ) # 这里仅了idf,另外这里写得应该有些问题
47 | self.trainingdata = featurefiles
48 |
49 | def project(self,descriptors):
50 | """ Project descriptors on the vocabulary
51 | to create a histogram of words. """
52 |
53 | # histogram of image words
54 | imhist = zeros((self.nbr_words))
55 | words,distance = vq(descriptors,self.voc)
56 | for w in words:
57 | imhist[w] += 1
58 |
59 | return imhist
60 |
61 | def get_words(self,descriptors):
62 | """ Convert descriptors to words. """
63 | return vq(descriptors,self.voc)[0]
64 |
--------------------------------------------------------------------------------
/newVersion/localdescriptors/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/newVersion/localdescriptors/__init__.py
--------------------------------------------------------------------------------
/newVersion/localdescriptors/dsift.py:
--------------------------------------------------------------------------------
1 | from PIL import Image
2 | from numpy import *
3 | import os
4 |
5 | from PCV.localdescriptors import sift
6 |
7 |
8 | def process_image_dsift(imagename,resultname,size=20,steps=10,force_orientation=False,resize=None):
9 | """ Process an image with densely sampled SIFT descriptors
10 | and save the results in a file. Optional input: size of features,
11 | steps between locations, forcing computation of descriptor orientation
12 | (False means all are oriented upwards), tuple for resizing the image."""
13 |
14 | im = Image.open(imagename).convert('L')
15 | if resize!=None:
16 | im = im.resize(resize)
17 | m,n = im.size
18 |
19 | if imagename[-3:] != 'pgm':
20 | #create a pgm file
21 | im.save('tmp.pgm')
22 | imagename = 'tmp.pgm'
23 |
24 | # create frames and save to temporary file
25 | scale = size/3.0
26 | x,y = meshgrid(range(steps,m,steps),range(steps,n,steps))
27 | xx,yy = x.flatten(),y.flatten()
28 | frame = array([xx,yy,scale*ones(xx.shape[0]),zeros(xx.shape[0])])
29 | savetxt('tmp.frame',frame.T,fmt='%03.3f')
30 |
31 | if force_orientation:
32 | cmmd = str("D:\mltools\win32vlfeat\sift.exe "+imagename+" --output="+resultname+
33 | " --read-frames=tmp.frame --orientations")
34 | else:
35 | cmmd = str("D:\mltools\win32vlfeat\sift.exe "+imagename+" --output="+resultname+
36 | " --read-frames=tmp.frame")
37 | os.system(cmmd)
38 | print 'processed', imagename, 'to', resultname
39 |
40 |
--------------------------------------------------------------------------------
/newVersion/reRanking.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import pickle
3 | from PCV.localdescriptors import sift
4 | from PCV.imagesearch import imagesearch
5 | from PCV.geometry import homography
6 | from PCV.tools.imtools import get_imlist
7 |
8 | # 载入图像列表
9 | # load image list and vocabulary
10 | imlist = get_imlist('./first500/')
11 | nbr_images = len(imlist)
12 | featlist = [imlist[i][:-3]+'sift' for i in range(nbr_images)]
13 |
14 | # 载入词汇
15 | with open('./first500/vocabulary.pkl', 'rb') as f:
16 | voc = pickle.load(f)
17 |
18 | src = imagesearch.Searcher('web.db',voc)
19 |
20 | # 查询图线索引和返回的图像数
21 | # index of query image and number of results to return
22 | q_ind = 0
23 | nbr_results = 20
24 |
25 | # 常规查询
26 | # regular query
27 | res_reg = [w[1] for w in src.query(imlist[q_ind])[:nbr_results]]
28 | print 'top matches (regular):', res_reg # res_reg保存的是候选图像(欧式距离)
29 |
30 | # 载入查询图像特征
31 | # load image features for query image
32 | q_locs,q_descr = sift.read_features_from_file(featlist[q_ind])
33 | fp = homography.make_homog(q_locs[:,:2].T)
34 |
35 | # RANSAC model for homography fitting
36 | model = homography.RansacModel()
37 |
38 | rank = {}
39 | # load image features for result
40 | for ndx in res_reg[1:]:
41 | locs,descr = sift.read_features_from_file(featlist[ndx-1]) # because 'ndx' is a rowid of the DB that starts at 1.
42 | # locs,descr = sift.read_features_from_file(featlist[ndx])
43 | # get matches
44 | matches = sift.match(q_descr,descr)
45 | ind = matches.nonzero()[0]
46 | ind2 = matches[ind]
47 | tp = homography.make_homog(locs[:,:2].T)
48 | # compute homography, count inliers. if not enough matches return empty list
49 | try:
50 | H,inliers = homography.H_from_ransac(fp[:,ind],tp[:,ind2],model,match_theshold=4)
51 | except:
52 | inliers = []
53 | # store inlier count
54 | rank[ndx] = len(inliers)
55 |
56 | # sort dictionary to get the most inliers first
57 | sorted_rank = sorted(rank.items(), key=lambda t: t[1], reverse=True)
58 | res_geom = [res_reg[0]]+[s[0] for s in sorted_rank]
59 | print 'top matches (homography):', res_geom
60 |
61 |
62 | # plot the top results
63 | imagesearch.plot_results(src,res_reg[:8])
64 | imagesearch.plot_results(src,res_geom[:8])
--------------------------------------------------------------------------------
/newVersion/searchDemo.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import cherrypy
3 | import pickle
4 | import urllib
5 | import os
6 | from numpy import *
7 | from PCV.imagesearch import imagesearch
8 |
9 | """
10 | This is the image search demo.
11 | """
12 |
13 |
14 | class SearchDemo:
15 |
16 | def __init__(self):
17 | # load list of images
18 | self.path = './first500/'
19 | self.imlist = [os.path.join(self.path,f) for f in os.listdir(self.path) if f.endswith('.jpg')]
20 | self.nbr_images = len(self.imlist)
21 | self.ndx = range(self.nbr_images)
22 |
23 | # load vocabulary
24 | f = open('./first500/vocabulary.pkl', 'rb')
25 | self.voc = pickle.load(f)
26 | f.close()
27 |
28 | # set max number of results to show
29 | self.maxres = 49
30 |
31 | # header and footer html
32 | self.header = """
33 |
34 |
35 | Image search
36 |
37 |
38 | """
39 | self.footer = """
40 |
41 |
42 | """
43 |
44 | def index(self, query=None):
45 | self.src = imagesearch.Searcher('web.db', self.voc)
46 |
47 | html = self.header
48 | html += """
49 |
50 | Click an image to search. Random selection of images.
51 |
52 | """
53 | if query:
54 | # query the database and get top images
55 | res = self.src.query(query)[:self.maxres]
56 | for dist, ndx in res:
57 | imname = self.src.get_filename(ndx)
58 | html += ""
59 | html += "
"
60 | html += ""
61 | else:
62 | # show random selection if no query
63 | random.shuffle(self.ndx)
64 | for i in self.ndx[:self.maxres]:
65 | imname = self.imlist[i]
66 | html += ""
67 | html += "
"
68 | html += ""
69 |
70 | html += self.footer
71 | return html
72 |
73 | index.exposed = True
74 |
75 | cherrypy.quickstart(SearchDemo(), '/', config=os.path.join(os.path.dirname(__file__), 'service.conf'))
--------------------------------------------------------------------------------
/newVersion/service.conf:
--------------------------------------------------------------------------------
1 | [global]
2 | server.socket_host = "127.0.0.1"
3 | server.socket_port = 8080
4 | server.thread_pool = 10
5 | tools.sessions.on = True
6 | [/]
7 | tools.staticdir.root = "E:/python/gosearch"
8 | [/first500]
9 | tools.staticdir.on = True
10 | tools.staticdir.dir = "first500"
--------------------------------------------------------------------------------
/newVersion/sift:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/newVersion/sift
--------------------------------------------------------------------------------
/newVersion/siftVLfeat.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/newVersion/siftVLfeat.exe
--------------------------------------------------------------------------------
/newVersion/siftWin32.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/newVersion/siftWin32.exe
--------------------------------------------------------------------------------
/newVersion/tmp.pgm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/newVersion/tmp.pgm
--------------------------------------------------------------------------------
/newVersion/tools/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/newVersion/tools/__init__.py
--------------------------------------------------------------------------------
/newVersion/tools/imtools.py:
--------------------------------------------------------------------------------
1 | import os
2 | from PIL import Image
3 | from pylab import *
4 | from numpy import *
5 |
6 |
7 |
8 | def get_imlist(path):
9 | """ Returns a list of filenames for
10 | all jpg images in a directory. """
11 |
12 | return [os.path.join(path,f) for f in os.listdir(path) if f.endswith('.jpg')]
13 |
14 |
15 | def compute_average(imlist):
16 | """ Compute the average of a list of images. """
17 |
18 | # open first image and make into array of type float
19 | averageim = array(Image.open(imlist[0]), 'f')
20 |
21 | skipped = 0
22 |
23 | for imname in imlist[1:]:
24 | try:
25 | averageim += array(Image.open(imname))
26 | except:
27 | print imname + "...skipped"
28 | skipped += 1
29 |
30 | averageim /= (len(imlist) - skipped)
31 |
32 | # return average as uint8
33 | return array(averageim, 'uint8')
34 |
35 |
36 | def convert_to_grayscale(imlist):
37 | """ Convert a set of images to grayscale. """
38 |
39 | for imname in imlist:
40 | im = Image.open(imname).convert("L")
41 | im.save(imname)
42 |
43 |
44 | def imresize(im,sz):
45 | """ Resize an image array using PIL. """
46 | pil_im = Image.fromarray(uint8(im))
47 |
48 | return array(pil_im.resize(sz))
49 |
50 |
51 | def histeq(im,nbr_bins=256):
52 | """ Histogram equalization of a grayscale image. """
53 |
54 | # get image histogram
55 | imhist,bins = histogram(im.flatten(),nbr_bins,normed=True)
56 | cdf = imhist.cumsum() # cumulative distribution function
57 | cdf = 255 * cdf / cdf[-1] # normalize
58 |
59 | # use linear interpolation of cdf to find new pixel values
60 | im2 = interp(im.flatten(),bins[:-1],cdf)
61 |
62 | return im2.reshape(im.shape), cdf
63 |
64 |
65 | def plot_2D_boundary(plot_range,points,decisionfcn,labels,values=[0]):
66 | """ Plot_range is (xmin,xmax,ymin,ymax), points is a list
67 | of class points, decisionfcn is a funtion to evaluate,
68 | labels is a list of labels that decisionfcn returns for each class,
69 | values is a list of decision contours to show. """
70 |
71 | clist = ['b','r','g','k','m','y'] # colors for the classes
72 |
73 | # evaluate on a grid and plot contour of decision function
74 | x = arange(plot_range[0],plot_range[1],.1)
75 | y = arange(plot_range[2],plot_range[3],.1)
76 | xx,yy = meshgrid(x,y)
77 | xxx,yyy = xx.flatten(),yy.flatten() # lists of x,y in grid
78 | zz = array(decisionfcn(xxx,yyy))
79 | zz = zz.reshape(xx.shape)
80 | # plot contour(s) at values
81 | contour(xx,yy,zz,values)
82 |
83 | # for each class, plot the points with '*' for correct, 'o' for incorrect
84 | for i in range(len(points)):
85 | d = decisionfcn(points[i][:,0],points[i][:,1])
86 | correct_ndx = labels[i]==d
87 | incorrect_ndx = labels[i]!=d
88 | plot(points[i][correct_ndx,0],points[i][correct_ndx,1],'*',color=clist[i])
89 | plot(points[i][incorrect_ndx,0],points[i][incorrect_ndx,1],'o',color=clist[i])
90 |
91 | axis('equal')
92 |
--------------------------------------------------------------------------------
/reranking.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import pickle
3 | from PCV.localdescriptors import sift
4 | from PCV.imagesearch import imagesearch
5 | from PCV.geometry import homography
6 | from PCV.tools.imtools import get_imlist
7 |
8 | # 载入图像列表
9 | # load image list and vocabulary
10 | imlist = get_imlist('./first500/')
11 | nbr_images = len(imlist)
12 | featlist = [imlist[i][:-3]+'sift' for i in range(nbr_images)]
13 |
14 | # 载入词汇
15 | with open('./first500/vocabulary.pkl', 'rb') as f:
16 | voc = pickle.load(f)
17 |
18 | src = imagesearch.Searcher('web.db',voc)
19 |
20 | # 查询图线索引和返回的图像数
21 | # index of query image and number of results to return
22 | q_ind = 0
23 | nbr_results = 20
24 |
25 | # 常规查询
26 | # regular query
27 | res_reg = [w[1] for w in src.query(imlist[q_ind])[:nbr_results]]
28 | print 'top matches (regular):', res_reg # res_reg保存的是候选图像(欧式距离)
29 |
30 | # 载入查询图像特征
31 | # load image features for query image
32 | q_locs,q_descr = sift.read_features_from_file(featlist[q_ind])
33 | fp = homography.make_homog(q_locs[:,:2].T)
34 |
35 | # RANSAC model for homography fitting
36 | model = homography.RansacModel()
37 |
38 | rank = {}
39 | # load image features for result
40 | for ndx in res_reg[1:]:
41 | locs,descr = sift.read_features_from_file(featlist[ndx-1]) # because 'ndx' is a rowid of the DB that starts at 1.
42 | # locs,descr = sift.read_features_from_file(featlist[ndx])
43 | # get matches
44 | matches = sift.match(q_descr,descr)
45 | ind = matches.nonzero()[0]
46 | ind2 = matches[ind]
47 | tp = homography.make_homog(locs[:,:2].T)
48 | # compute homography, count inliers. if not enough matches return empty list
49 | try:
50 | H,inliers = homography.H_from_ransac(fp[:,ind],tp[:,ind2],model,match_theshold=4)
51 | except:
52 | inliers = []
53 | # store inlier count
54 | rank[ndx] = len(inliers)
55 |
56 | # sort dictionary to get the most inliers first
57 | sorted_rank = sorted(rank.items(), key=lambda t: t[1], reverse=True)
58 | res_geom = [res_reg[0]]+[s[0] for s in sorted_rank]
59 | print 'top matches (homography):', res_geom
60 |
61 |
62 | # plot the top results
63 | imagesearch.plot_results(src,res_reg[:8])
64 | imagesearch.plot_results(src,res_geom[:8])
--------------------------------------------------------------------------------
/searchdemo.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import cherrypy
3 | import pickle
4 | import urllib
5 | import os
6 | from numpy import *
7 | from PCV.imagesearch import imagesearch
8 |
9 | """
10 | This is the image search demo.
11 | """
12 |
13 |
14 | class SearchDemo:
15 |
16 | def __init__(self):
17 | # load list of images
18 | self.path = './first500/'
19 | self.imlist = [os.path.join(self.path,f) for f in os.listdir(self.path) if f.endswith('.jpg')]
20 | self.nbr_images = len(self.imlist)
21 | self.ndx = range(self.nbr_images)
22 |
23 | # load vocabulary
24 | f = open('./first500/vocabulary.pkl', 'rb')
25 | self.voc = pickle.load(f)
26 | f.close()
27 |
28 | # set max number of results to show
29 | self.maxres = 49
30 |
31 | # header and footer html
32 | self.header = """
33 |
34 |
35 | Image search
36 |
37 |
38 | """
39 | self.footer = """
40 |
41 |
42 | """
43 |
44 | def index(self, query=None):
45 | self.src = imagesearch.Searcher('web.db', self.voc)
46 |
47 | html = self.header
48 | html += """
49 |
50 | Click an image to search. Random selection of images.
51 |
52 | """
53 | if query:
54 | # query the database and get top images
55 | res = self.src.query(query)[:self.maxres]
56 | for dist, ndx in res:
57 | imname = self.src.get_filename(ndx)
58 | html += ""
59 | html += "
"
60 | html += ""
61 | else:
62 | # show random selection if no query
63 | random.shuffle(self.ndx)
64 | for i in self.ndx[:self.maxres]:
65 | imname = self.imlist[i]
66 | html += ""
67 | html += "
"
68 | html += ""
69 |
70 | html += self.footer
71 | return html
72 |
73 | index.exposed = True
74 |
75 | cherrypy.quickstart(SearchDemo(), '/', config=os.path.join(os.path.dirname(__file__), 'service.conf'))
--------------------------------------------------------------------------------
/service.conf:
--------------------------------------------------------------------------------
1 | [global]
2 | server.socket_host = "127.0.0.1"
3 | server.socket_port = 8080
4 | server.thread_pool = 10
5 | tools.sessions.on = True
6 | [/]
7 | tools.staticdir.root = "E:/python/gosearch"
8 | [/first500]
9 | tools.staticdir.on = True
10 | tools.staticdir.dir = "first500"
--------------------------------------------------------------------------------
/utils/win32vlfeat/Microsoft.VC90.CRT.manifest:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/utils/win32vlfeat/aib.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/aib.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/mser.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/mser.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/msvcr90.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/msvcr90.dll
--------------------------------------------------------------------------------
/utils/win32vlfeat/sift.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/sift.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_gauss_elimination.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_gauss_elimination.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_getopt_long.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_getopt_long.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_gmm.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_gmm.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_heap-def.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_heap-def.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_host.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_host.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_imopv.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_imopv.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_kmeans.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_kmeans.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_liop.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_liop.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_mathop.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_mathop.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_mathop_abs.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_mathop_abs.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_nan.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_nan.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_qsort-def.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_qsort-def.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_rand.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_rand.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_sqrti.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_sqrti.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_stringop.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_stringop.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_svd2.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_svd2.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_threads.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_threads.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/test_vec_comp.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/test_vec_comp.exe
--------------------------------------------------------------------------------
/utils/win32vlfeat/vl.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/vl.dll
--------------------------------------------------------------------------------
/utils/win32vlfeat/vl.lib:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/willard-yuan/py-cbir-image-search-engine/14185fedf7549488ea670cae2585499933f5e1b4/utils/win32vlfeat/vl.lib
--------------------------------------------------------------------------------