├── .gitignore
├── LICENSE
├── README.md
├── build
├── build.xml
└── lib
│ ├── yui-compressor-ant-task-0.5.1.jar
│ └── yuicompressor-2.4.7.jar
├── datastore
├── datastore-to-csv.py
├── mysql-to-csv.py
└── tthistory-trends.sql
├── src
├── .gitignore
├── app.yaml
├── appengine_config.py
├── cachepy.py
├── cloud_storage_utils.py
├── credentials.py
├── cron.yaml
├── csv_utils.py
├── data_model_converter.py
├── error.html
├── export_as_csv.py
├── get_trends_task.py
├── globals.py
├── index.html
├── index.yaml
├── insert_dummy_entity_task.py
├── layer_cache.py
├── lib
│ ├── cloudstorage
│ │ ├── __init__.py
│ │ ├── api_utils.py
│ │ ├── cloudstorage_api.py
│ │ ├── common.py
│ │ ├── errors.py
│ │ ├── rest_api.py
│ │ ├── storage_api.py
│ │ └── test_utils.py
│ ├── httplib2
│ │ ├── __init__.py
│ │ ├── cacerts.txt
│ │ ├── iri2uri.py
│ │ └── socks.py
│ └── oauth2
│ │ ├── __init__.py
│ │ ├── _compat.py
│ │ ├── _version.py
│ │ └── clients
│ │ ├── __init__.py
│ │ ├── imap.py
│ │ └── smtp.py
├── migrate.py
├── model.py
├── page_handler.py
├── queue.yaml
├── rate_limit_manager.py
├── send_email.py
├── static
│ ├── bootstrap
│ │ ├── config.json
│ │ ├── css
│ │ │ └── bootstrap.min.css
│ │ └── js
│ │ │ └── bootstrap.min.js
│ ├── img
│ │ ├── arrows.png
│ │ ├── dark_wall.png
│ │ ├── favicon.ico
│ │ ├── logo_b.png
│ │ ├── logo_m.png
│ │ └── logo_s.png
│ ├── robots.txt
│ ├── scripts
│ │ ├── all.min.js
│ │ ├── d3.v3.min.js
│ │ ├── jquery-3.1.1.min.js
│ │ ├── json2.js
│ │ └── main.js
│ ├── sitemap.xml
│ └── stylesheets
│ │ ├── all.min.css
│ │ ├── google-font.css
│ │ └── main.css
├── summary_task.py
├── timezone_aware_date.py
├── trend_manager.py
└── twitter.py
└── test
├── csvToJson.py
├── merge_sort.py
├── test.js
├── trends.csv
└── twitter_data.py
/.gitignore:
--------------------------------------------------------------------------------
1 | /.settings
2 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License
2 |
3 | Copyright (c) 2013 Mustafa İlhan
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in
13 | all copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 | THE SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Trending Topic History (tt-history)
2 | ===================================
3 |
4 | This project has stored the trending topics on Twitter since July 2013. Specifically stored trending topics and hashtags are the ones that appeared in the regions: Turkey and World Wide. Every 10 minutes trending topics are fetched via Twitter API and stored on the database. Website of this project visually shows trending topics in terms of how much time they appeared on the list of Twitter.
5 |
6 | Motivations:
7 | ------------
8 | Twitter shows us only current trending topics with their volume information like 11.7K tweets in last hour. However, one year ago at 12:00 PM what are the trending topics? How many time people continue to talk about that topic? What is the -average, min, max- life time of a trending topic? What is the relationship between the volume and duration of a topic? How do they change in terms of region? These questions are some them that motivates me to build this project. Further, It draws the attention of other researchers around the world and they request me the collected data by this project. It already used in one research project, others in progress.
9 |
10 | Limitations:
11 | ------------
12 | Although collecting trending topics of every region is a trivial job, it is not preferred because of these two problems:
13 | 1. Twitter API has a request rate limit. Therefore, it does not allow to make too many requests in a specified time frame. Fetching every region's trending topics result in rate limit exceed.
14 | 2. It increases the datastore write costs and bills on Google App Engine. This project has not any income (I have just started to accept donation via website to keep running up the website). Therefore, keeping bills low is more preferable.
15 |
16 | Because of this two limitation, only Turkey and World Wide is selected to collect trending topics. One of them is Turkey because it is my hometown. Another one is World Wide in order to offer common trending topics to the visitors of the website.
17 |
18 | Credits:
19 | --------
20 | - Google App Engine (for hosting and datastore)
21 | - Python-Twitter (for Twitter API)
22 | - jQuery (for building web ui)
23 | - D3.js (for visualization)
24 | - (for loading animation and underlining animation)
25 | - tipsy (for tooltips)
26 |
--------------------------------------------------------------------------------
/build/build.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
34 |
35 |
36 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
--------------------------------------------------------------------------------
/build/lib/yui-compressor-ant-task-0.5.1.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ilhan-mstf/tt-history/1bb60cb81e97ef1abecf657cfa078798bb29cace/build/lib/yui-compressor-ant-task-0.5.1.jar
--------------------------------------------------------------------------------
/build/lib/yuicompressor-2.4.7.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ilhan-mstf/tt-history/1bb60cb81e97ef1abecf657cfa078798bb29cace/build/lib/yuicompressor-2.4.7.jar
--------------------------------------------------------------------------------
/datastore/datastore-to-csv.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # encoding=utf8
3 |
4 | import csv
5 | import os
6 | import sys
7 | reload(sys)
8 | sys.setdefaultencoding('utf8')
9 | sys.path.append(
10 | '/home/mustilica/ProgramFiles/google-cloud-sdk/platform/google_appengine')
11 |
12 | from google.appengine.api.files import records
13 | from google.appengine.datastore import entity_pb
14 | from google.appengine.api import datastore
15 |
16 | files = ['output-' + ` i ` for i in range(185)]
17 |
18 | with open("all.csv", 'w') as csvfile:
19 | writer = csv.DictWriter(
20 | csvfile,
21 | fieldnames=['woeid', 'time', 'name', 'timestamp'],
22 | quoting=csv.QUOTE_NONNUMERIC,
23 | quotechar='"')
24 | writer.writeheader()
25 |
26 | for f in files:
27 | raw = open(f, 'r')
28 | try:
29 | reader = records.RecordsReader(raw)
30 | for record in reader:
31 | entity_proto = entity_pb.EntityProto(contents=record)
32 | entity = datastore.Entity.FromPb(entity_proto)
33 | writer.writerow(dict((k, v) for k, v in entity.iteritems()))
34 | print f, "completed."
35 | finally:
36 | raw.close()
37 |
--------------------------------------------------------------------------------
/datastore/mysql-to-csv.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # encoding=utf8
3 |
4 | import mysql.connector
5 | import sys
6 | import pytz
7 | import time
8 | import gzip
9 | import csv
10 |
11 | from datetime import datetime, timedelta
12 | from pytz import timezone
13 |
14 | reload(sys)
15 | sys.setdefaultencoding('utf8')
16 |
17 | file_fmt = '%Y-%m-%d'
18 |
19 | def fetchData(cur, woeid, timestamp, end_timestamp):
20 | # Use all the SQL you like
21 | params = {'woeid': woeid, 'start': timestamp, 'end': end_timestamp}
22 | cur.execute("""
23 | SELECT name, SUM(duration) FROM trends
24 | WHERE woeid = %(woeid)s
25 | AND timestamp >= %(start)s
26 | AND timestamp < %(end)s
27 | GROUP BY name
28 | ORDER BY SUM(duration) DESC""", params)
29 |
30 | return cur.fetchall()
31 |
32 |
33 | def createCsvFile(data, woeid, loc_dt):
34 | filename = '/home/mustilica/tthistory_backup/csvBucket/woeid-%d/%s.csv.gz' % (
35 | woeid, loc_dt.strftime(file_fmt))
36 |
37 | # Print all the first cell of all the rows
38 | trends = [{
39 | 'name': row[0],
40 | 'duration': row[1],
41 | 'volume': -1
42 | } for row in data]
43 |
44 | #write(sorted(trends, key=lambda x: x['duration'], reverse=True), filename)
45 | write(trends, filename)
46 |
47 | def write(data, filename):
48 | fieldnames = ['name', 'duration', 'volume']
49 | with gzip.open(filename, 'w') as f:
50 | csvWriter = csv.DictWriter(
51 | f,
52 | fieldnames=fieldnames,
53 | quoting=csv.QUOTE_NONNUMERIC,
54 | quotechar='"')
55 | csvWriter.writeheader()
56 | for obj in data:
57 | csvWriter.writerow(dict((k, v) for k, v in obj.iteritems()))
58 |
59 | # Connect to db
60 | db = mysql.connector.connect(
61 | user='root', password='5tonnane', host='127.0.0.1', database='tthistory')
62 |
63 | try:
64 | # It will let you execute all the queries you need.
65 | cur = db.cursor()
66 |
67 | for woeid in [23424969]: #[1, 23424969]
68 | # Init date and time values
69 | fmt = '%Y-%m-%d %H:%M:%S %Z%z'
70 | timezoneData = timezone('Europe/Istanbul' if woeid == 23424969 else 'UTC')
71 | loc_dt = timezoneData.localize(datetime(2013, 7, 12, 0, 0, 0))
72 | timestamp = int(time.mktime(loc_dt.timetuple()))
73 |
74 | while timestamp < 1478974943:
75 | print(loc_dt.strftime(fmt))
76 | end_loc_dt = timezoneData.normalize(loc_dt + timedelta(days=1))
77 | end_timestamp = int(time.mktime(end_loc_dt.timetuple()))
78 |
79 | data = fetchData(cur, woeid, timestamp, end_timestamp)
80 | createCsvFile(data, woeid, loc_dt)
81 |
82 | loc_dt = end_loc_dt
83 | timestamp = end_timestamp
84 |
85 | finally:
86 | db.close()
87 |
--------------------------------------------------------------------------------
/datastore/tthistory-trends.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE trends
2 | (
3 | woeid int,
4 | duration int,
5 | name varchar(255),
6 | timestamp int
7 | );
8 |
9 | CREATE INDEX woeid_index
10 | ON trends (woeid);
11 |
12 | CREATE INDEX timestamp_index
13 | ON trends (timestamp);
14 |
15 | CREATE INDEX comp_index
16 | ON trends (woeid, timestamp);
17 |
18 | LOAD DATA INFILE '/var/lib/mysql-files/all_trends_2016_11_12.csv'
19 | IGNORE INTO TABLE trends
20 | CHARACTER SET UTF8
21 | FIELDS TERMINATED BY ','
22 | OPTIONALLY ENCLOSED BY '"'
23 | LINES TERMINATED BY '\n';
24 |
25 | LOAD DATA INFILE '/var/lib/mysql-files/small.csv'
26 | IGNORE INTO TABLE trends
27 | CHARACTER SET UTF8
28 | FIELDS TERMINATED BY ','
29 | OPTIONALLY ENCLOSED BY '"'
30 | LINES TERMINATED BY '\n';
31 |
32 | SELECT name, SUM(duration) FROM trends
33 | WHERE woeid = 1
34 | AND timestamp >= 1478898000
35 | AND timestamp < 1478976120
36 | GROUP BY name
37 | ORDER BY SUM(duration) DESC;
38 |
39 | SELECT woeid, name, SUM(duration) FROM trends
40 | GROUP BY name
41 | ORDER BY SUM(duration) DESC
42 | LIMIT 5;
43 |
44 | mysql> SELECT name, SUM(duration) FROM trends GROUP BY name ORDER BY SUM(duration) DESC LIMIT 5;
45 | +------------------------------+---------------+
46 | | name | SUM(duration) |
47 | +------------------------------+---------------+
48 | | Hayırlı Cumalar | 179220 |
49 | | #fenerinmaçıvar | 64890 |
50 | | #BeşiktaşınMaçıVar | 59250 |
51 | | #BugünGünlerdenGALATASARAY | 53630 |
52 | | Mutlu Pazarlar | 50810 |
53 | +------------------------------+---------------+
54 | 5 rows in set (3 min 50,76 sec)
55 |
--------------------------------------------------------------------------------
/src/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | credentials.txt
3 |
--------------------------------------------------------------------------------
/src/app.yaml:
--------------------------------------------------------------------------------
1 | application: tt-history
2 | version: v3-migrate
3 | runtime: python27
4 | api_version: 1
5 | threadsafe: true
6 |
7 | default_expiration: "7d"
8 |
9 | handlers:
10 | - url: /favicon\.ico
11 | static_files: static/img/favicon.ico
12 | upload: static/img/favicon.ico
13 |
14 | - url: /robots\.txt
15 | static_files: static/robots.txt
16 | upload: static/robots.txt
17 |
18 | - url: /sitemap\.xml
19 | static_files: static/sitemap.xml
20 | upload: static/sitemap.xml
21 |
22 | - url: /static
23 | static_dir: static
24 | expiration: "7d"
25 |
26 | - url: /tasks/getTrends
27 | script: get_trends_task.application
28 | login: admin
29 |
30 | - url: /tasks/summary
31 | script: summary_task.application
32 | login: admin
33 |
34 | - url: /tasks/insertDummyEntity
35 | script: insert_dummy_entity_task.application
36 | login: admin
37 |
38 | - url: /migrate
39 | script: migrate.application
40 | login: admin
41 |
42 | - url: /.*
43 | script: page_handler.application
44 |
45 | builtins:
46 | - remote_api: on
47 |
48 | error_handlers:
49 | - file: error.html
50 |
--------------------------------------------------------------------------------
/src/appengine_config.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | # appengine_config.py
4 | from google.appengine.ext import vendor
5 |
6 | # Add any libraries install in the "lib" folder.
7 | vendor.add(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'lib'))
8 |
--------------------------------------------------------------------------------
/src/cachepy.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | retrieved from http://bjk5.com/post/2320616424/layer-caching-in-app-engine-with-memcache-and
4 |
5 | Author: Juan Pablo Guereca
6 |
7 | Module which implements a per GAE instance data cache, similar to what you can achieve with APC in PHP instances.
8 |
9 | Each GAE instance caches the global scope, keeping the state of every variable on the global scope. You can go farther and cache other things, creating a caching layer for each GAE instance, and it's really fast because there is no network transfer like in memcache. Moreover GAE doesn't charge for using it and it can save you many memcache and db requests.
10 |
11 | Not everything are upsides. You can not use it on every case because:
12 |
13 | - There's no way to know if you have set or deleted a key in all the GAE instances that your app is using. Everything you do with Cachepy happens in the instance of the current request and you have N instances, be aware of that.
14 | - The only way to be sure you have flushed all the GAE instances caches is doing a code upload, no code change required.
15 | - The memory available depends on each GAE instance and your app. I've been able to set a 60 millions characters string which is like 57 MB at least. You can cache somethings but not everything.
16 | """
17 |
18 | import time
19 | import logging
20 |
21 | CACHE = {}
22 | STATS_HITS = 0
23 | STATS_MISSES = 0
24 | STATS_KEYS_COUNT = 0
25 | """ Flag to deactivate it on local environment. """
26 | #ACTIVE = False if os.environ.get('SERVER_SOFTWARE').startswith('Devel') else True
27 | ACTIVE = True
28 | """ None means forever. Value in seconds. """
29 | DEFAULT_CACHING_TIME = None
30 | """ Curious thing: A dictionary in the global scope can be referenced and changed inside a function without using the global statement, but it can not be redefined. """
31 |
32 |
33 | def get(key):
34 | """ Gets the data associated to the key or a None """
35 | if ACTIVE is False:
36 | return None
37 |
38 | global CACHE, STATS_MISSES, STATS_HITS
39 | """ Return a key stored in the python instance cache or a None if it has expired or it doesn't exist """
40 | if key not in CACHE:
41 | STATS_MISSES += 1
42 | return None
43 |
44 | value, expiry = CACHE[key]
45 | current_timestamp = time.time()
46 | if expiry == None or current_timestamp < expiry:
47 | STATS_HITS += 1
48 | return value
49 | else:
50 | STATS_MISSES += 1
51 | delete(key)
52 | return None
53 |
54 |
55 | def set(key, value, expiry=DEFAULT_CACHING_TIME):
56 | """ Sets a key in the current instance key, value, expiry seconds till it expires """
57 | if ACTIVE is False:
58 | return None
59 |
60 | global CACHE, STATS_KEYS_COUNT
61 | if key not in CACHE:
62 | STATS_KEYS_COUNT += 1
63 | if expiry is not None:
64 | expiry = time.time() + int(expiry)
65 |
66 | try:
67 | CACHE[key] = (value, expiry)
68 | except MemoryError:
69 | """ It doesn't seem to catch the exception, something in the GAE's python runtime probably """
70 | logging.info("%s memory error setting key '%s'" % (__name__, key))
71 |
72 |
73 | def delete(key):
74 | """ Deletes the key stored in the cache of the current instance, not all the instances. There's no reason to use it except for debugging when developing, use expiry when setting a value instead. """
75 | global CACHE, STATS_KEYS_COUNT
76 | if key in CACHE:
77 | STATS_KEYS_COUNT -= 1
78 | del CACHE[key]
79 |
80 |
81 | def dump():
82 | """ Returns the cache dictionary with all the data of the current instance, not all the instances. There's no reason to use it except for debugging when developing. """
83 | global CACHE
84 | return CACHE
85 |
86 |
87 | def flush():
88 | """ Resets the cache of the current instance, not all the instances. There's no reason to use it except for debugging when developing. """
89 | global CACHE, STATS_KEYS_COUNT
90 | CACHE = {}
91 | STATS_KEYS_COUNT = 0
92 |
93 |
94 | def stats():
95 | """ Return the hits and misses stats, the number of keys and the cache memory address of the current instance, not all the instances. """
96 | global CACHE, STATS_MISSES, STATS_HITS, STATS_KEYS_COUNT
97 | memory_address = "0x" + str("%X" % id(CACHE)).zfill(16)
98 | return {
99 | 'cache_memory_address': memory_address,
100 | 'hits': STATS_HITS,
101 | 'misses': STATS_MISSES,
102 | 'keys_count': STATS_KEYS_COUNT,
103 | }
104 |
105 |
106 | def cacheit(keyformat, expiry=DEFAULT_CACHING_TIME):
107 | """ Decorator to memoize functions in the current instance cache, not all the instances. """
108 |
109 | def decorator(fxn):
110 | def wrapper(*args, **kwargs):
111 | key = keyformat % args[:keyformat.count('%')]
112 | data = get(key)
113 | if data is None:
114 | data = fxn(*args, **kwargs)
115 | set(key, data, expiry)
116 | return data
117 |
118 | return wrapper
119 |
120 | return decorator
121 |
--------------------------------------------------------------------------------
/src/cloud_storage_utils.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | The MIT License
4 |
5 | Copyright (c) 2013 Mustafa İlhan
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in
15 | all copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | THE SOFTWARE.
24 | """
25 |
26 | import cloudstorage as gcs
27 | import gzip
28 | import logging
29 |
30 |
31 | class CloudStorageUtils():
32 |
33 | # [START writeFile]
34 | def writeFile(self, data, filename):
35 | """Create a file.
36 | The retry_params specified in the open call will override the default
37 | retry params for this particular file handle.
38 | Args:
39 | filename: filename.
40 | """
41 | logging.info("Creating file %s" % filename)
42 |
43 | with gcs.open(
44 | filename,
45 | 'w',
46 | content_type='text/plain',
47 | options={'content-encoding': 'gzip'},
48 | retry_params=gcs.RetryParams(backoff_factor=1.1)) as f:
49 | gz = gzip.GzipFile('', 'wb', 9, f)
50 | gz.write(data)
51 | gz.close()
52 |
53 | # [END writeFile]
54 |
55 | # TODO getFile
56 | # http://stackoverflow.com/questions/35708725/how-to-open-gzip-file-on-gae-cloud
57 | # https://github.com/GoogleCloudPlatform/appengine-gcs-client/blob/master/python/test/cloudstorage_test.py
58 |
--------------------------------------------------------------------------------
/src/credentials.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 |
3 | """
4 | The MIT License
5 |
6 | Copyright (c) 2013 Mustafa İlhan
7 |
8 | Permission is hereby granted, free of charge, to any person obtaining a copy
9 | of this software and associated documentation files (the "Software"), to deal
10 | in the Software without restriction, including without limitation the rights
11 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | copies of the Software, and to permit persons to whom the Software is
13 | furnished to do so, subject to the following conditions:
14 |
15 | The above copyright notice and this permission notice shall be included in
16 | all copies or substantial portions of the Software.
17 |
18 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 | THE SOFTWARE.
25 | """
26 |
27 | class Crenditals:
28 | CONSUMER_KEY = "_your_consumer_key_"
29 | CONSUMER_SECRET = "_your_consumer_secret_"
30 | CLIENT_TOKEN = "_your_access_token_"
31 | CLIENT_SECRET = "_your_access_secret_"
32 |
--------------------------------------------------------------------------------
/src/cron.yaml:
--------------------------------------------------------------------------------
1 | cron:
2 | - description: get and put trends
3 | url: /tasks/getTrends
4 | schedule: every 10 minutes
5 | - description: daily summary job for world wide
6 | url: /tasks/summary?woeid=1&history=ld
7 | schedule: every day 23:59
8 | timezone: UTC
9 | - description: daily summary job for turkey
10 | url: /tasks/summary?woeid=23424969&history=ld
11 | schedule: every day 23:59
12 | timezone: Europe/Istanbul
13 | - description: keep awake requests
14 | url: /index.html
15 | schedule: every 3 minutes
16 | - description: keep awake requests
17 | url: /index.html
18 | schedule: every 7 minutes
19 | - description: keep awake requests
20 | url: /index.html
21 | schedule: every 11 minutes
22 |
--------------------------------------------------------------------------------
/src/csv_utils.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | The MIT License
4 |
5 | Copyright (c) 2013 Mustafa İlhan
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in
15 | all copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | THE SOFTWARE.
24 | """
25 |
26 | import cStringIO
27 | import csv
28 | import json
29 |
30 | from data_model_converter import DataModelConverter
31 |
32 |
33 | class CsvUtils:
34 | def jsonToCsv(self, data):
35 | fieldnames = DataModelConverter.CSV_FILE_FIELDS
36 | fileStream = cStringIO.StringIO()
37 | csvWriter = csv.DictWriter(
38 | fileStream,
39 | fieldnames=fieldnames,
40 | quoting=csv.QUOTE_NONNUMERIC,
41 | quotechar='"')
42 | csvWriter.writeheader()
43 | for obj in data:
44 | csvWriter.writerow(
45 | dict((k, v.encode('utf-8') if type(v) is unicode else v)
46 | for k, v in obj.iteritems()))
47 | content = fileStream.getvalue()
48 | fileStream.close()
49 | return content
50 |
51 | def csvToJson(self, filename):
52 | jsonData = []
53 | with open(filename) as f:
54 | f_csv = csv.DictReader(f, quoting=csv.QUOTE_NONNUMERIC)
55 | jsonData = [row for row in f_csv]
56 | return json.dumps(jsonData)
57 |
--------------------------------------------------------------------------------
/src/data_model_converter.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | The MIT License
4 |
5 | Copyright (c) 2013 Mustafa İlhan
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in
15 | all copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | THE SOFTWARE.
24 | """
25 |
26 |
27 | class DataModelConverter:
28 |
29 | CSV_FILE_FIELDS = ['name', 'duration', 'volume']
30 |
31 | def preProcessForCsvFile(self, data):
32 | return [{
33 | 'name': obj['name'],
34 | 'duration': obj['duration'],
35 | 'volume': obj['volume'] if 'volume' in obj else -1
36 | } for obj in data]
37 |
--------------------------------------------------------------------------------
/src/error.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 | ERROR - Trending Topic History
14 |
15 |
16 |
17 |
18 |
22 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
A project that keeps history of trending topics on Twitter.
50 |
51 |
Every 10 minutes, trending topics are fetched from twitter.com and stored on the database. More...
This project has stored the trending topics on Twitter since July 2013. Specifically stored trending topics and hashtags are the ones that appeared in the controls: Turkey and World Wide. Every 10 minutes trending topics are fetched via Twitter API and stored on the database. Website of this project visually shows trending topics in terms of how much time they appeared on the list of Twitter.
74 |
75 |
76 |
79 |
80 |
81 |
82 |
83 |
--------------------------------------------------------------------------------
/src/export_as_csv.py:
--------------------------------------------------------------------------------
1 | # Run from GAE remote API:
2 | # {GAE Path}\remote_api_shell.py -s {YourAPPName}.appspot.com
3 | # import export_as_csv
4 |
5 | import csv
6 | from google.appengine.ext import db
7 | from google.appengine.ext.db import GqlQuery
8 |
9 |
10 | def exportToCsv(query, csvFileName, delimiter):
11 | with open(csvFileName, 'wb') as csvFile:
12 | csvWriter = csv.writer(csvFile, delimiter=delimiter,
13 | quotechar='|', quoting=csv.QUOTE_MINIMAL)
14 | writeHeader(csvWriter)
15 |
16 | rowsPerQuery = 1000
17 | totalRowsSaved = 0
18 | cursor = None
19 | areMoreRows = True
20 |
21 | while areMoreRows:
22 | if cursor is not None:
23 | query.with_cursor(cursor)
24 | items = query.fetch(rowsPerQuery)
25 | cursor = query.cursor()
26 |
27 | currentRows = 0
28 | for item in items:
29 | saveItem(csvWriter, item)
30 | currentRows += 1
31 |
32 | totalRowsSaved += currentRows
33 | areMoreRows = currentRows >= rowsPerQuery
34 | print 'Saved ' + str(totalRowsSaved) + ' rows'
35 |
36 | print 'Finished saving all rows.'
37 |
38 |
39 | def writeHeader(csvWriter):
40 | # Output csv header
41 | csvWriter.writerow(['hashtag', 'region', 'timestamp',
42 | 'duration (in minutes)'])
43 |
44 |
45 | def saveItem(csvWriter, item):
46 | # Save items in preferred format
47 | csvWriter.writerow([item.name, item.woeid, item.timestamp, item.time])
48 |
49 |
50 | class Trend(db.Model):
51 | name = db.StringProperty()
52 | woeid = db.IntegerProperty()
53 | timestamp = db.IntegerProperty()
54 | time = db.IntegerProperty()
55 |
56 |
57 | # Query for items
58 | query = GqlQuery("SELECT * FROM Trend WHERE name = '#JeSuisCharlie'")
59 | exportToCsv(query, '/home/mustilica/remote.csv', ',')
60 |
--------------------------------------------------------------------------------
/src/get_trends_task.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | The MIT License
4 |
5 | Copyright (c) 2013 Mustafa İlhan
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in
15 | all copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | THE SOFTWARE.
24 | """
25 |
26 | import logging
27 | import time
28 | import traceback
29 |
30 | from google.appengine.ext import ndb
31 | from google.appengine.ext import webapp
32 | from google.appengine.ext.webapp.util import run_wsgi_app
33 | from model import TrendWindow, Error
34 | from globals import Globals
35 | from credentials import Crenditals
36 | from trend_manager import TrendManager
37 | from twitter import TwitterApi
38 | from send_email import SendEmail
39 | from google.appengine.api import taskqueue
40 |
41 |
42 | class GetTrendsTask(webapp.RequestHandler):
43 | """ makes twitter api call, inserts trends to db """
44 |
45 | def get(self):
46 | logging.info("GetTrendsTask starting...")
47 |
48 | try:
49 | # create twitter client
50 | client = TwitterApi(
51 | consumer_key=Crenditals.CONSUMER_KEY,
52 | consumer_secret=Crenditals.CONSUMER_SECRET,
53 | access_token_key=Crenditals.CLIENT_TOKEN,
54 | access_token_secret=Crenditals.CLIENT_SECRET)
55 |
56 | q_futures = []
57 | for region in Globals.REGIONS:
58 | # request trends from twitter
59 | response = client.getTrendsByWoeid(woeid=region)
60 | # get current timestamp in seconds
61 | timestamp = int(time.time())
62 | # put trends to db
63 | entityList = []
64 | for trend in response:
65 | entityList.append(
66 | TrendWindow(
67 | name=trend['name'],
68 | woeid=region,
69 | timestamp=timestamp,
70 | time=10,
71 | volume=trend['tweet_volume']))
72 | q_futures.extend(ndb.put_multi_async(entityList))
73 | self.updateCacheValues(region, entityList)
74 |
75 | # wait all async put operations to finish.
76 | ndb.Future.wait_all(q_futures)
77 | except ValueError as v_e:
78 | logging.error(v_e)
79 | # self.retry()
80 | except Exception, e:
81 | traceback.print_exc()
82 | Error(msg=str(e), timestamp=int(time.time())).put()
83 | SendEmail().send('Error on GetTrendsTask', str(e))
84 | # self.retry()
85 |
86 | logging.info("GetTrendsTask finished.")
87 |
88 | def updateCacheValues(self, region, entityList):
89 | logging.info("updateCacheValues()")
90 | trendManager = TrendManager()
91 | trendManager.updateRawTrends(
92 | trendManager.convertTrendsToDict(entityList),
93 | "trends-ld-" + str(region))
94 |
95 | # Retry
96 | def retry(self):
97 | logging.info('Running task queue for getTrends')
98 | taskqueue.add(url='/tasks/getTrends')
99 |
100 |
101 | application = webapp.WSGIApplication(
102 | [('/tasks/getTrends', GetTrendsTask)], debug=True)
103 |
104 |
105 | def main():
106 | run_wsgi_app(application)
107 |
108 |
109 | if __name__ == "__main__":
110 | main()
111 |
--------------------------------------------------------------------------------
/src/globals.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | The MIT License
4 |
5 | Copyright (c) 2013 Mustafa İlhan
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in
15 | all copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | THE SOFTWARE.
24 | """
25 |
26 |
27 | class Globals:
28 | _1_DAY = 86400 # 24 * 60 * 60 seconds
29 | _1_WEEK = 604800 # 7 * 24 * 60 * 60 seconds
30 | _1_MONTH = 2592000 # 30 * 24 * 60 * 60 seconds
31 | _10_MINUTES = 600 # seconds
32 |
33 | DEFAULT_LIMIT = 15
34 |
35 | MAX_REQUESTS = 15
36 |
37 | REGIONS = [
38 | 1, 23424969
39 | ] # regions = [('tr', '23424969'), ('usa', '23424977'), ('world', '1')]
40 |
41 | DUAL_LAYER_MEMCACHE_AND_IN_APP_MEMORY_CACHE = 0 # Cache in both memcache and cachepy by default
42 | SINGLE_LAYER_MEMCACHE_ONLY = 1
43 | SINGLE_LAYER_IN_APP_MEMORY_CACHE_ONLY = 2
44 |
--------------------------------------------------------------------------------
/src/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 | Trending Topic History
14 |
15 |
16 |
17 |
18 |
22 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
A project that keeps history of trending topics on Twitter.
50 |
51 |
Every 10 minutes, trending topics are fetched from twitter.com and stored on the database. More...
This project has stored the trending topics on Twitter since July 2013. Specifically stored trending topics and hashtags are the ones that appeared in the controls: Turkey and World Wide. Every 10 minutes trending topics are fetched via Twitter API and stored on the database. Website of this project visually shows trending topics in terms of how much time they appeared on the list of Twitter.
107 |
108 |
109 |
112 |
113 |
114 |
115 |
116 |
--------------------------------------------------------------------------------
/src/index.yaml:
--------------------------------------------------------------------------------
1 | indexes:
2 |
3 | - kind: Trend
4 | properties:
5 | - name: woeid
6 | - name: timestamp
7 | direction: desc
8 |
9 | - kind: TrendWindow
10 | properties:
11 | - name: woeid
12 | - name: timestamp
13 | direction: desc
14 |
--------------------------------------------------------------------------------
/src/insert_dummy_entity_task.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | The MIT License
4 |
5 | Copyright (c) 2013 Mustafa İlhan
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in
15 | all copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | THE SOFTWARE.
24 | """
25 |
26 | import logging
27 | import time
28 | import traceback
29 |
30 | from google.appengine.ext import ndb
31 | from google.appengine.ext import webapp
32 | from google.appengine.ext.webapp.util import run_wsgi_app
33 | from model import TrendWindow
34 | from globals import Globals
35 |
36 |
37 | class InsertDummyEntityTask(webapp.RequestHandler):
38 | """ insert dummy entities """
39 |
40 | def get(self):
41 | if "localhost" not in self.request.url:
42 | return
43 |
44 | logging.info("InsertDummyEntityTask starting...")
45 |
46 | dummyVals = [{
47 | 'name': 'mustilica',
48 | 'time': 300,
49 | 'tweet_volume': 45225
50 | }, {
51 | 'name': 'freebird',
52 | 'time': 30,
53 | 'tweet_volume': 225
54 | }, {
55 | 'name': 'flyingbird',
56 | 'time': 240,
57 | 'tweet_volume': 5225
58 | }, {
59 | 'name': 'bahattin abi',
60 | 'time': 80,
61 | 'tweet_volume': 85
62 | }, {
63 | 'name': '#ThisIsSparta',
64 | 'time': 320,
65 | 'tweet_volume': 231198
66 | }]
67 |
68 | q_futures = []
69 | for region in Globals.REGIONS:
70 | try:
71 | # get current timestamp in seconds
72 | timestamp = int(time.time())
73 | # put trends to db
74 | entityList = []
75 | for trend in dummyVals:
76 | entityList.append(
77 | TrendWindow(
78 | name=trend['name'],
79 | woeid=region,
80 | timestamp=timestamp,
81 | time=trend['time'],
82 | volume=trend['tweet_volume']))
83 | q_futures.extend(ndb.put_multi_async(entityList))
84 |
85 | # wait all async put operations to finish.
86 | ndb.Future.wait_all(q_futures)
87 | except Exception, e:
88 | traceback.print_exc()
89 |
90 | logging.info("InsertDummyEntityTask finished.")
91 |
92 |
93 | application = webapp.WSGIApplication(
94 | [('/tasks/insertDummyEntity', InsertDummyEntityTask)], debug=True)
95 |
96 |
97 | def main():
98 | run_wsgi_app(application)
99 |
100 |
101 | if __name__ == "__main__":
102 | main()
103 |
--------------------------------------------------------------------------------
/src/layer_cache.py:
--------------------------------------------------------------------------------
1 | from google.appengine.api import memcache
2 | from globals import Globals
3 | import logging
4 | import cachepy
5 |
6 | # layer_cache provides an easy way to cache the result of functions in
7 | # both memcache and cachepy's storage for quick retrieval later.
8 | #
9 | #
10 | #
11 | # _____Explanation by examples:_____
12 | #
13 | # Cache in both memcache and cachepy the result of
14 | # this long-running function using a static key,
15 | # and return the result when available instead of recalculating:
16 | #
17 | # @cache_with_key("calculate_user_averages")
18 | # def calculate_user_averages:
19 | # ...do lots of long-running work...
20 | #
21 | #
22 | #
23 | # and with expiration every minute:
24 | #
25 | # @cache_with_key("calculate_user_averages", expiration=60)
26 | # def calculate_user_averages:
27 | # ...do lots of long-running work...
28 | #
29 | #
30 | #
31 | # Cache using key generated by utility function that
32 | # varies the key based on the function's input parameters:
33 | #
34 | # @cache_with_key_function(lambda object: return "layer_cache_key_for_object_%s" % object.id())
35 | # def calculate_object_average(object):
36 | # ... do lots of long-running work...
37 | #
38 | #
39 | #
40 | # _____Manually busting the cache:_____
41 | #
42 | # When you call your cached function, just pass a special "bust_cache"
43 | # named parameter to ignore any existing cached values and replace
44 | # with whatever is newly returned:
45 | #
46 | # calculate_object_average(object, bust_cache=True)
47 | #
48 | #
49 | #
50 | # _____Other settings/options:_____
51 | #
52 | # Only cache in memcache, not cachepy's in-app memory cache:
53 | # @cache_with_key(... layer=SINGLE_LAYER_MEMCACHE_ONLY)
54 | #
55 | # Only cache in cachepy's in-app memory cache, not memcache:
56 | # @cache_with_key(... layer=SINGLE_LAYER_IN_APP_MEMORY_CACHE_ONLY)
57 | #
58 | # Persist the cached values across different uploaded app verions
59 | # (by default this will not happen w/ memcache):
60 | # @cache_with_key(... persist_across_app_versions=True)
61 |
62 |
63 | def cache(layer=None, expiration=None, bust_cache=None):
64 | def decorator(target):
65 | def wrapper(*args, **kwargs):
66 | return layer_cache_check_set_return(target, layer, expiration,
67 | bust_cache, *args, **kwargs)
68 |
69 | return wrapper
70 |
71 | return decorator
72 |
73 |
74 | def layer_cache_check_set_return(target, d_layer, d_expiration, d_bust_cache,
75 | *args, **kwargs):
76 |
77 | key = kwargs.get("key", "")
78 | if d_layer is None:
79 | layer = kwargs.get("layer",
80 | Globals.DUAL_LAYER_MEMCACHE_AND_IN_APP_MEMORY_CACHE)
81 | else:
82 | layer = d_layer
83 | if d_expiration is None:
84 | expiration = kwargs.get("expiration", Globals._1_WEEK)
85 | else:
86 | expiration = d_expiration
87 | if d_bust_cache is None:
88 | bust_cache = kwargs.get("bust_cache", False)
89 | else:
90 | bust_cache = d_bust_cache
91 |
92 | #logging.info("read key: %s, layer: %s, bust_cache: %s, expiration: %s", key, layer, bust_cache, expiration)
93 |
94 | if not bust_cache:
95 | if layer != Globals.SINGLE_LAYER_MEMCACHE_ONLY:
96 | result = cachepy.get(key)
97 | if result is not None:
98 | return result
99 |
100 | if layer != Globals.SINGLE_LAYER_IN_APP_MEMORY_CACHE_ONLY:
101 | result = memcache.Client().get(key)
102 | if result is not None:
103 | cachepy.set(key, result)
104 | return result
105 |
106 | result = target(*args, **kwargs)
107 |
108 | # In case the key's value has been changed by target's execution
109 | key = kwargs.get("key", "")
110 |
111 | #logging.info("write key: %s, layer: %s, bust_cache: %s, expiration: %s", key, layer, bust_cache, expiration)
112 |
113 | if layer != Globals.SINGLE_LAYER_MEMCACHE_ONLY:
114 | cachepy.set(key, result, expiry=expiration)
115 |
116 | if layer != Globals.SINGLE_LAYER_IN_APP_MEMORY_CACHE_ONLY:
117 | if not memcache.Client().set(key, result, time=expiration):
118 | logging.error("Memcache set failed for %s" % key)
119 |
120 | return result
121 |
--------------------------------------------------------------------------------
/src/lib/cloudstorage/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2014 Google Inc. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing,
10 | # software distributed under the License is distributed on an
11 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
12 | # either express or implied. See the License for the specific
13 | # language governing permissions and limitations under the License.
14 |
15 | """Client Library for Google Cloud Storage."""
16 |
17 |
18 |
19 |
20 | from .api_utils import RetryParams
21 | from .api_utils import set_default_retry_params
22 | from cloudstorage_api import *
23 | from .common import CSFileStat
24 | from .common import GCSFileStat
25 | from .common import validate_bucket_name
26 | from .common import validate_bucket_path
27 | from .common import validate_file_path
28 | from errors import *
29 | from storage_api import *
30 |
--------------------------------------------------------------------------------
/src/lib/cloudstorage/api_utils.py:
--------------------------------------------------------------------------------
1 | # Copyright 2013 Google Inc. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing,
10 | # software distributed under the License is distributed on an
11 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
12 | # either express or implied. See the License for the specific
13 | # language governing permissions and limitations under the License.
14 |
15 | """Util functions and classes for cloudstorage_api."""
16 |
17 |
18 |
19 | __all__ = ['set_default_retry_params',
20 | 'RetryParams',
21 | ]
22 |
23 | import copy
24 | import httplib
25 | import logging
26 | import math
27 | import os
28 | import threading
29 | import time
30 | import urllib
31 |
32 |
33 | try:
34 | from google.appengine.api import app_identity
35 | from google.appengine.api import urlfetch
36 | from google.appengine.api import urlfetch_errors
37 | from google.appengine.datastore import datastore_rpc
38 | from google.appengine.ext import ndb
39 | from google.appengine.ext.ndb import eventloop
40 | from google.appengine.ext.ndb import tasklets
41 | from google.appengine.ext.ndb import utils
42 | from google.appengine import runtime
43 | from google.appengine.runtime import apiproxy_errors
44 | except ImportError:
45 | from google.appengine.api import app_identity
46 | from google.appengine.api import urlfetch
47 | from google.appengine.api import urlfetch_errors
48 | from google.appengine.datastore import datastore_rpc
49 | from google.appengine import runtime
50 | from google.appengine.runtime import apiproxy_errors
51 | from google.appengine.ext import ndb
52 | from google.appengine.ext.ndb import eventloop
53 | from google.appengine.ext.ndb import tasklets
54 | from google.appengine.ext.ndb import utils
55 |
56 |
57 | _RETRIABLE_EXCEPTIONS = (urlfetch.DownloadError,
58 | urlfetch_errors.InternalTransientError,
59 | apiproxy_errors.Error,
60 | app_identity.InternalError,
61 | app_identity.BackendDeadlineExceeded)
62 |
63 | _thread_local_settings = threading.local()
64 | _thread_local_settings.default_retry_params = None
65 |
66 |
67 | def set_default_retry_params(retry_params):
68 | """Set a default RetryParams for current thread current request."""
69 | _thread_local_settings.default_retry_params = copy.copy(retry_params)
70 |
71 |
72 | def _get_default_retry_params():
73 | """Get default RetryParams for current request and current thread.
74 |
75 | Returns:
76 | A new instance of the default RetryParams.
77 | """
78 | default = getattr(_thread_local_settings, 'default_retry_params', None)
79 | if default is None or not default.belong_to_current_request():
80 | return RetryParams()
81 | else:
82 | return copy.copy(default)
83 |
84 |
85 | def _quote_filename(filename):
86 | """Quotes filename to use as a valid URI path.
87 |
88 | Args:
89 | filename: user provided filename. /bucket/filename.
90 |
91 | Returns:
92 | The filename properly quoted to use as URI's path component.
93 | """
94 | return urllib.quote(filename)
95 |
96 |
97 | def _unquote_filename(filename):
98 | """Unquotes a valid URI path back to its filename.
99 |
100 | This is the opposite of _quote_filename.
101 |
102 | Args:
103 | filename: a quoted filename. /bucket/some%20filename.
104 |
105 | Returns:
106 | The filename unquoted.
107 | """
108 | return urllib.unquote(filename)
109 |
110 |
111 | def _should_retry(resp):
112 | """Given a urlfetch response, decide whether to retry that request."""
113 | return (resp.status_code == httplib.REQUEST_TIMEOUT or
114 | (resp.status_code >= 500 and
115 | resp.status_code < 600))
116 |
117 |
118 | class _RetryWrapper(object):
119 | """A wrapper that wraps retry logic around any tasklet."""
120 |
121 | def __init__(self,
122 | retry_params,
123 | retriable_exceptions=_RETRIABLE_EXCEPTIONS,
124 | should_retry=lambda r: False):
125 | """Init.
126 |
127 | Args:
128 | retry_params: an RetryParams instance.
129 | retriable_exceptions: a list of exception classes that are retriable.
130 | should_retry: a function that takes a result from the tasklet and returns
131 | a boolean. True if the result should be retried.
132 | """
133 | self.retry_params = retry_params
134 | self.retriable_exceptions = retriable_exceptions
135 | self.should_retry = should_retry
136 |
137 | @ndb.tasklet
138 | def run(self, tasklet, **kwds):
139 | """Run a tasklet with retry.
140 |
141 | The retry should be transparent to the caller: if no results
142 | are successful, the exception or result from the last retry is returned
143 | to the caller.
144 |
145 | Args:
146 | tasklet: the tasklet to run.
147 | **kwds: keywords arguments to run the tasklet.
148 |
149 | Raises:
150 | The exception from running the tasklet.
151 |
152 | Returns:
153 | The result from running the tasklet.
154 | """
155 | start_time = time.time()
156 | n = 1
157 |
158 | while True:
159 | e = None
160 | result = None
161 | got_result = False
162 |
163 | try:
164 | result = yield tasklet(**kwds)
165 | got_result = True
166 | if not self.should_retry(result):
167 | raise ndb.Return(result)
168 | except runtime.DeadlineExceededError:
169 | logging.debug(
170 | 'Tasklet has exceeded request deadline after %s seconds total',
171 | time.time() - start_time)
172 | raise
173 | except self.retriable_exceptions, e:
174 | pass
175 |
176 | if n == 1:
177 | logging.debug('Tasklet is %r', tasklet)
178 |
179 | delay = self.retry_params.delay(n, start_time)
180 |
181 | if delay <= 0:
182 | logging.debug(
183 | 'Tasklet failed after %s attempts and %s seconds in total',
184 | n, time.time() - start_time)
185 | if got_result:
186 | raise ndb.Return(result)
187 | elif e is not None:
188 | raise e
189 | else:
190 | assert False, 'Should never reach here.'
191 |
192 | if got_result:
193 | logging.debug(
194 | 'Got result %r from tasklet.', result)
195 | else:
196 | logging.debug(
197 | 'Got exception "%r" from tasklet.', e)
198 | logging.debug('Retry in %s seconds.', delay)
199 | n += 1
200 | yield tasklets.sleep(delay)
201 |
202 |
203 | class RetryParams(object):
204 | """Retry configuration parameters."""
205 |
206 | _DEFAULT_USER_AGENT = 'App Engine Python GCS Client'
207 |
208 | @datastore_rpc._positional(1)
209 | def __init__(self,
210 | backoff_factor=2.0,
211 | initial_delay=0.1,
212 | max_delay=10.0,
213 | min_retries=3,
214 | max_retries=6,
215 | max_retry_period=30.0,
216 | urlfetch_timeout=None,
217 | save_access_token=False,
218 | _user_agent=None):
219 | """Init.
220 |
221 | This object is unique per request per thread.
222 |
223 | Library will retry according to this setting when App Engine Server
224 | can't call urlfetch, urlfetch timed out, or urlfetch got a 408 or
225 | 500-600 response.
226 |
227 | Args:
228 | backoff_factor: exponential backoff multiplier.
229 | initial_delay: seconds to delay for the first retry.
230 | max_delay: max seconds to delay for every retry.
231 | min_retries: min number of times to retry. This value is automatically
232 | capped by max_retries.
233 | max_retries: max number of times to retry. Set this to 0 for no retry.
234 | max_retry_period: max total seconds spent on retry. Retry stops when
235 | this period passed AND min_retries has been attempted.
236 | urlfetch_timeout: timeout for urlfetch in seconds. Could be None,
237 | in which case the value will be chosen by urlfetch module.
238 | save_access_token: persist access token to datastore to avoid
239 | excessive usage of GetAccessToken API. Usually the token is cached
240 | in process and in memcache. In some cases, memcache isn't very
241 | reliable.
242 | _user_agent: The user agent string that you want to use in your requests.
243 | """
244 | self.backoff_factor = self._check('backoff_factor', backoff_factor)
245 | self.initial_delay = self._check('initial_delay', initial_delay)
246 | self.max_delay = self._check('max_delay', max_delay)
247 | self.max_retry_period = self._check('max_retry_period', max_retry_period)
248 | self.max_retries = self._check('max_retries', max_retries, True, int)
249 | self.min_retries = self._check('min_retries', min_retries, True, int)
250 | if self.min_retries > self.max_retries:
251 | self.min_retries = self.max_retries
252 |
253 | self.urlfetch_timeout = None
254 | if urlfetch_timeout is not None:
255 | self.urlfetch_timeout = self._check('urlfetch_timeout', urlfetch_timeout)
256 | self.save_access_token = self._check('save_access_token', save_access_token,
257 | True, bool)
258 | self._user_agent = _user_agent or self._DEFAULT_USER_AGENT
259 |
260 | self._request_id = os.getenv('REQUEST_LOG_ID')
261 |
262 | def __eq__(self, other):
263 | if not isinstance(other, self.__class__):
264 | return False
265 | return self.__dict__ == other.__dict__
266 |
267 | def __ne__(self, other):
268 | return not self.__eq__(other)
269 |
270 | @classmethod
271 | def _check(cls, name, val, can_be_zero=False, val_type=float):
272 | """Check init arguments.
273 |
274 | Args:
275 | name: name of the argument. For logging purpose.
276 | val: value. Value has to be non negative number.
277 | can_be_zero: whether value can be zero.
278 | val_type: Python type of the value.
279 |
280 | Returns:
281 | The value.
282 |
283 | Raises:
284 | ValueError: when invalid value is passed in.
285 | TypeError: when invalid value type is passed in.
286 | """
287 | valid_types = [val_type]
288 | if val_type is float:
289 | valid_types.append(int)
290 |
291 | if type(val) not in valid_types:
292 | raise TypeError(
293 | 'Expect type %s for parameter %s' % (val_type.__name__, name))
294 | if val < 0:
295 | raise ValueError(
296 | 'Value for parameter %s has to be greater than 0' % name)
297 | if not can_be_zero and val == 0:
298 | raise ValueError(
299 | 'Value for parameter %s can not be 0' % name)
300 | return val
301 |
302 | def belong_to_current_request(self):
303 | return os.getenv('REQUEST_LOG_ID') == self._request_id
304 |
305 | def delay(self, n, start_time):
306 | """Calculate delay before the next retry.
307 |
308 | Args:
309 | n: the number of current attempt. The first attempt should be 1.
310 | start_time: the time when retry started in unix time.
311 |
312 | Returns:
313 | Number of seconds to wait before next retry. -1 if retry should give up.
314 | """
315 | if (n > self.max_retries or
316 | (n > self.min_retries and
317 | time.time() - start_time > self.max_retry_period)):
318 | return -1
319 | return min(
320 | math.pow(self.backoff_factor, n-1) * self.initial_delay,
321 | self.max_delay)
322 |
323 |
324 | def _run_until_rpc():
325 | """Eagerly evaluate tasklets until it is blocking on some RPC.
326 |
327 | Usually ndb eventloop el isn't run until some code calls future.get_result().
328 |
329 | When an async tasklet is called, the tasklet wrapper evaluates the tasklet
330 | code into a generator, enqueues a callback _help_tasklet_along onto
331 | the el.current queue, and returns a future.
332 |
333 | _help_tasklet_along, when called by the el, will
334 | get one yielded value from the generator. If the value if another future,
335 | set up a callback _on_future_complete to invoke _help_tasklet_along
336 | when the dependent future fulfills. If the value if a RPC, set up a
337 | callback _on_rpc_complete to invoke _help_tasklet_along when the RPC fulfills.
338 | Thus _help_tasklet_along drills down
339 | the chain of futures until some future is blocked by RPC. El runs
340 | all callbacks and constantly check pending RPC status.
341 | """
342 | el = eventloop.get_event_loop()
343 | while el.current:
344 | el.run0()
345 |
346 |
347 | def _eager_tasklet(tasklet):
348 | """Decorator to turn tasklet to run eagerly."""
349 |
350 | @utils.wrapping(tasklet)
351 | def eager_wrapper(*args, **kwds):
352 | fut = tasklet(*args, **kwds)
353 | _run_until_rpc()
354 | return fut
355 |
356 | return eager_wrapper
357 |
--------------------------------------------------------------------------------
/src/lib/cloudstorage/common.py:
--------------------------------------------------------------------------------
1 | # Copyright 2012 Google Inc. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing,
10 | # software distributed under the License is distributed on an
11 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
12 | # either express or implied. See the License for the specific
13 | # language governing permissions and limitations under the License.
14 |
15 | """Helpers shared by cloudstorage_stub and cloudstorage_api."""
16 |
17 |
18 |
19 |
20 |
21 | __all__ = ['CS_XML_NS',
22 | 'CSFileStat',
23 | 'dt_str_to_posix',
24 | 'local_api_url',
25 | 'LOCAL_GCS_ENDPOINT',
26 | 'local_run',
27 | 'get_access_token',
28 | 'get_stored_content_length',
29 | 'get_metadata',
30 | 'GCSFileStat',
31 | 'http_time_to_posix',
32 | 'memory_usage',
33 | 'posix_time_to_http',
34 | 'posix_to_dt_str',
35 | 'set_access_token',
36 | 'validate_options',
37 | 'validate_bucket_name',
38 | 'validate_bucket_path',
39 | 'validate_file_path',
40 | ]
41 |
42 |
43 | import calendar
44 | import datetime
45 | from email import utils as email_utils
46 | import logging
47 | import os
48 | import re
49 |
50 | try:
51 | from google.appengine.api import runtime
52 | except ImportError:
53 | from google.appengine.api import runtime
54 |
55 |
56 | _GCS_BUCKET_REGEX_BASE = r'[a-z0-9\.\-_]{3,63}'
57 | _GCS_BUCKET_REGEX = re.compile(_GCS_BUCKET_REGEX_BASE + r'$')
58 | _GCS_BUCKET_PATH_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'$')
59 | _GCS_PATH_PREFIX_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'.*')
60 | _GCS_FULLPATH_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'/.*')
61 | _GCS_METADATA = ['x-goog-meta-',
62 | 'content-disposition',
63 | 'cache-control',
64 | 'content-encoding']
65 | _GCS_OPTIONS = _GCS_METADATA + ['x-goog-acl']
66 | CS_XML_NS = 'http://doc.s3.amazonaws.com/2006-03-01'
67 | LOCAL_GCS_ENDPOINT = '/_ah/gcs'
68 | _access_token = ''
69 |
70 |
71 | _MAX_GET_BUCKET_RESULT = 1000
72 |
73 |
74 | def set_access_token(access_token):
75 | """Set the shared access token to authenticate with Google Cloud Storage.
76 |
77 | When set, the library will always attempt to communicate with the
78 | real Google Cloud Storage with this token even when running on dev appserver.
79 | Note the token could expire so it's up to you to renew it.
80 |
81 | When absent, the library will automatically request and refresh a token
82 | on appserver, or when on dev appserver, talk to a Google Cloud Storage
83 | stub.
84 |
85 | Args:
86 | access_token: you can get one by run 'gsutil -d ls' and copy the
87 | str after 'Bearer'.
88 | """
89 | global _access_token
90 | _access_token = access_token
91 |
92 |
93 | def get_access_token():
94 | """Returns the shared access token."""
95 | return _access_token
96 |
97 |
98 | class GCSFileStat(object):
99 | """Container for GCS file stat."""
100 |
101 | def __init__(self,
102 | filename,
103 | st_size,
104 | etag,
105 | st_ctime,
106 | content_type=None,
107 | metadata=None,
108 | is_dir=False):
109 | """Initialize.
110 |
111 | For files, the non optional arguments are always set.
112 | For directories, only filename and is_dir is set.
113 |
114 | Args:
115 | filename: a Google Cloud Storage filename of form '/bucket/filename'.
116 | st_size: file size in bytes. long compatible.
117 | etag: hex digest of the md5 hash of the file's content. str.
118 | st_ctime: posix file creation time. float compatible.
119 | content_type: content type. str.
120 | metadata: a str->str dict of user specified options when creating
121 | the file. Possible keys are x-goog-meta-, content-disposition,
122 | content-encoding, and cache-control.
123 | is_dir: True if this represents a directory. False if this is a real file.
124 | """
125 | self.filename = filename
126 | self.is_dir = is_dir
127 | self.st_size = None
128 | self.st_ctime = None
129 | self.etag = None
130 | self.content_type = content_type
131 | self.metadata = metadata
132 |
133 | if not is_dir:
134 | self.st_size = long(st_size)
135 | self.st_ctime = float(st_ctime)
136 | if etag[0] == '"' and etag[-1] == '"':
137 | etag = etag[1:-1]
138 | self.etag = etag
139 |
140 | def __repr__(self):
141 | if self.is_dir:
142 | return '(directory: %s)' % self.filename
143 |
144 | return (
145 | '(filename: %(filename)s, st_size: %(st_size)s, '
146 | 'st_ctime: %(st_ctime)s, etag: %(etag)s, '
147 | 'content_type: %(content_type)s, '
148 | 'metadata: %(metadata)s)' %
149 | dict(filename=self.filename,
150 | st_size=self.st_size,
151 | st_ctime=self.st_ctime,
152 | etag=self.etag,
153 | content_type=self.content_type,
154 | metadata=self.metadata))
155 |
156 | def __cmp__(self, other):
157 | if not isinstance(other, self.__class__):
158 | raise ValueError('Argument to cmp must have the same type. '
159 | 'Expect %s, got %s', self.__class__.__name__,
160 | other.__class__.__name__)
161 | if self.filename > other.filename:
162 | return 1
163 | elif self.filename < other.filename:
164 | return -1
165 | return 0
166 |
167 | def __hash__(self):
168 | if self.etag:
169 | return hash(self.etag)
170 | return hash(self.filename)
171 |
172 |
173 | CSFileStat = GCSFileStat
174 |
175 |
176 | def get_stored_content_length(headers):
177 | """Return the content length (in bytes) of the object as stored in GCS.
178 |
179 | x-goog-stored-content-length should always be present except when called via
180 | the local dev_appserver. Therefore if it is not present we default to the
181 | standard content-length header.
182 |
183 | Args:
184 | headers: a dict of headers from the http response.
185 |
186 | Returns:
187 | the stored content length.
188 | """
189 | length = headers.get('x-goog-stored-content-length')
190 | if length is None:
191 | length = headers.get('content-length')
192 | return length
193 |
194 |
195 | def get_metadata(headers):
196 | """Get user defined options from HTTP response headers."""
197 | return dict((k, v) for k, v in headers.iteritems()
198 | if any(k.lower().startswith(valid) for valid in _GCS_METADATA))
199 |
200 |
201 | def validate_bucket_name(name):
202 | """Validate a Google Storage bucket name.
203 |
204 | Args:
205 | name: a Google Storage bucket name with no prefix or suffix.
206 |
207 | Raises:
208 | ValueError: if name is invalid.
209 | """
210 | _validate_path(name)
211 | if not _GCS_BUCKET_REGEX.match(name):
212 | raise ValueError('Bucket should be 3-63 characters long using only a-z,'
213 | '0-9, underscore, dash or dot but got %s' % name)
214 |
215 |
216 | def validate_bucket_path(path):
217 | """Validate a Google Cloud Storage bucket path.
218 |
219 | Args:
220 | path: a Google Storage bucket path. It should have form '/bucket'.
221 |
222 | Raises:
223 | ValueError: if path is invalid.
224 | """
225 | _validate_path(path)
226 | if not _GCS_BUCKET_PATH_REGEX.match(path):
227 | raise ValueError('Bucket should have format /bucket '
228 | 'but got %s' % path)
229 |
230 |
231 | def validate_file_path(path):
232 | """Validate a Google Cloud Storage file path.
233 |
234 | Args:
235 | path: a Google Storage file path. It should have form '/bucket/filename'.
236 |
237 | Raises:
238 | ValueError: if path is invalid.
239 | """
240 | _validate_path(path)
241 | if not _GCS_FULLPATH_REGEX.match(path):
242 | raise ValueError('Path should have format /bucket/filename '
243 | 'but got %s' % path)
244 |
245 |
246 | def _process_path_prefix(path_prefix):
247 | """Validate and process a Google Cloud Stoarge path prefix.
248 |
249 | Args:
250 | path_prefix: a Google Cloud Storage path prefix of format '/bucket/prefix'
251 | or '/bucket/' or '/bucket'.
252 |
253 | Raises:
254 | ValueError: if path is invalid.
255 |
256 | Returns:
257 | a tuple of /bucket and prefix. prefix can be None.
258 | """
259 | _validate_path(path_prefix)
260 | if not _GCS_PATH_PREFIX_REGEX.match(path_prefix):
261 | raise ValueError('Path prefix should have format /bucket, /bucket/, '
262 | 'or /bucket/prefix but got %s.' % path_prefix)
263 | bucket_name_end = path_prefix.find('/', 1)
264 | bucket = path_prefix
265 | prefix = None
266 | if bucket_name_end != -1:
267 | bucket = path_prefix[:bucket_name_end]
268 | prefix = path_prefix[bucket_name_end + 1:] or None
269 | return bucket, prefix
270 |
271 |
272 | def _validate_path(path):
273 | """Basic validation of Google Storage paths.
274 |
275 | Args:
276 | path: a Google Storage path. It should have form '/bucket/filename'
277 | or '/bucket'.
278 |
279 | Raises:
280 | ValueError: if path is invalid.
281 | TypeError: if path is not of type basestring.
282 | """
283 | if not path:
284 | raise ValueError('Path is empty')
285 | if not isinstance(path, basestring):
286 | raise TypeError('Path should be a string but is %s (%s).' %
287 | (path.__class__, path))
288 |
289 |
290 | def validate_options(options):
291 | """Validate Google Cloud Storage options.
292 |
293 | Args:
294 | options: a str->basestring dict of options to pass to Google Cloud Storage.
295 |
296 | Raises:
297 | ValueError: if option is not supported.
298 | TypeError: if option is not of type str or value of an option
299 | is not of type basestring.
300 | """
301 | if not options:
302 | return
303 |
304 | for k, v in options.iteritems():
305 | if not isinstance(k, str):
306 | raise TypeError('option %r should be a str.' % k)
307 | if not any(k.lower().startswith(valid) for valid in _GCS_OPTIONS):
308 | raise ValueError('option %s is not supported.' % k)
309 | if not isinstance(v, basestring):
310 | raise TypeError('value %r for option %s should be of type basestring.' %
311 | (v, k))
312 |
313 |
314 | def http_time_to_posix(http_time):
315 | """Convert HTTP time format to posix time.
316 |
317 | See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.3.1
318 | for http time format.
319 |
320 | Args:
321 | http_time: time in RFC 2616 format. e.g.
322 | "Mon, 20 Nov 1995 19:12:08 GMT".
323 |
324 | Returns:
325 | A float of secs from unix epoch.
326 | """
327 | if http_time is not None:
328 | return email_utils.mktime_tz(email_utils.parsedate_tz(http_time))
329 |
330 |
331 | def posix_time_to_http(posix_time):
332 | """Convert posix time to HTML header time format.
333 |
334 | Args:
335 | posix_time: unix time.
336 |
337 | Returns:
338 | A datatime str in RFC 2616 format.
339 | """
340 | if posix_time:
341 | return email_utils.formatdate(posix_time, usegmt=True)
342 |
343 |
344 | _DT_FORMAT = '%Y-%m-%dT%H:%M:%S'
345 |
346 |
347 | def dt_str_to_posix(dt_str):
348 | """format str to posix.
349 |
350 | datetime str is of format %Y-%m-%dT%H:%M:%S.%fZ,
351 | e.g. 2013-04-12T00:22:27.978Z. According to ISO 8601, T is a separator
352 | between date and time when they are on the same line.
353 | Z indicates UTC (zero meridian).
354 |
355 | A pointer: http://www.cl.cam.ac.uk/~mgk25/iso-time.html
356 |
357 | This is used to parse LastModified node from GCS's GET bucket XML response.
358 |
359 | Args:
360 | dt_str: A datetime str.
361 |
362 | Returns:
363 | A float of secs from unix epoch. By posix definition, epoch is midnight
364 | 1970/1/1 UTC.
365 | """
366 | parsable, _ = dt_str.split('.')
367 | dt = datetime.datetime.strptime(parsable, _DT_FORMAT)
368 | return calendar.timegm(dt.utctimetuple())
369 |
370 |
371 | def posix_to_dt_str(posix):
372 | """Reverse of str_to_datetime.
373 |
374 | This is used by GCS stub to generate GET bucket XML response.
375 |
376 | Args:
377 | posix: A float of secs from unix epoch.
378 |
379 | Returns:
380 | A datetime str.
381 | """
382 | dt = datetime.datetime.utcfromtimestamp(posix)
383 | dt_str = dt.strftime(_DT_FORMAT)
384 | return dt_str + '.000Z'
385 |
386 |
387 | def local_run():
388 | """Whether we should hit GCS dev appserver stub."""
389 | server_software = os.environ.get('SERVER_SOFTWARE')
390 | if server_software is None:
391 | return True
392 | if 'remote_api' in server_software:
393 | return False
394 | if server_software.startswith(('Development', 'testutil')):
395 | return True
396 | return False
397 |
398 |
399 | def local_api_url():
400 | """Return URL for GCS emulation on dev appserver."""
401 | return 'http://%s%s' % (os.environ.get('HTTP_HOST'), LOCAL_GCS_ENDPOINT)
402 |
403 |
404 | def memory_usage(method):
405 | """Log memory usage before and after a method."""
406 | def wrapper(*args, **kwargs):
407 | logging.info('Memory before method %s is %s.',
408 | method.__name__, runtime.memory_usage().current())
409 | result = method(*args, **kwargs)
410 | logging.info('Memory after method %s is %s',
411 | method.__name__, runtime.memory_usage().current())
412 | return result
413 | return wrapper
414 |
415 |
416 | def _add_ns(tagname):
417 | return '{%(ns)s}%(tag)s' % {'ns': CS_XML_NS,
418 | 'tag': tagname}
419 |
420 |
421 | _T_CONTENTS = _add_ns('Contents')
422 | _T_LAST_MODIFIED = _add_ns('LastModified')
423 | _T_ETAG = _add_ns('ETag')
424 | _T_KEY = _add_ns('Key')
425 | _T_SIZE = _add_ns('Size')
426 | _T_PREFIX = _add_ns('Prefix')
427 | _T_COMMON_PREFIXES = _add_ns('CommonPrefixes')
428 | _T_NEXT_MARKER = _add_ns('NextMarker')
429 | _T_IS_TRUNCATED = _add_ns('IsTruncated')
430 |
--------------------------------------------------------------------------------
/src/lib/cloudstorage/errors.py:
--------------------------------------------------------------------------------
1 | # Copyright 2012 Google Inc. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing,
10 | # software distributed under the License is distributed on an
11 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
12 | # either express or implied. See the License for the specific
13 | # language governing permissions and limitations under the License.
14 |
15 | """Google Cloud Storage specific Files API calls."""
16 |
17 |
18 |
19 |
20 |
21 | __all__ = ['AuthorizationError',
22 | 'check_status',
23 | 'Error',
24 | 'FatalError',
25 | 'FileClosedError',
26 | 'ForbiddenError',
27 | 'InvalidRange',
28 | 'NotFoundError',
29 | 'ServerError',
30 | 'TimeoutError',
31 | 'TransientError',
32 | ]
33 |
34 | import httplib
35 |
36 |
37 | class Error(Exception):
38 | """Base error for all gcs operations.
39 |
40 | Error can happen on GAE side or GCS server side.
41 | For details on a particular GCS HTTP response code, see
42 | https://developers.google.com/storage/docs/reference-status#standardcodes
43 | """
44 |
45 |
46 | class TransientError(Error):
47 | """TransientError could be retried."""
48 |
49 |
50 | class TimeoutError(TransientError):
51 | """HTTP 408 timeout."""
52 |
53 |
54 | class FatalError(Error):
55 | """FatalError shouldn't be retried."""
56 |
57 |
58 | class FileClosedError(FatalError):
59 | """File is already closed.
60 |
61 | This can happen when the upload has finished but 'write' is called on
62 | a stale upload handle.
63 | """
64 |
65 |
66 | class NotFoundError(FatalError):
67 | """HTTP 404 resource not found."""
68 |
69 |
70 | class ForbiddenError(FatalError):
71 | """HTTP 403 Forbidden.
72 |
73 | While GCS replies with a 403 error for many reasons, the most common one
74 | is due to bucket permission not correctly setup for your app to access.
75 | """
76 |
77 |
78 | class AuthorizationError(FatalError):
79 | """HTTP 401 authentication required.
80 |
81 | Unauthorized request has been received by GCS.
82 |
83 | This error is mostly handled by GCS client. GCS client will request
84 | a new access token and retry the request.
85 | """
86 |
87 |
88 | class InvalidRange(FatalError):
89 | """HTTP 416 RequestRangeNotSatifiable."""
90 |
91 |
92 | class ServerError(TransientError):
93 | """HTTP >= 500 server side error."""
94 |
95 |
96 | def check_status(status, expected, path, headers=None,
97 | resp_headers=None, body=None, extras=None):
98 | """Check HTTP response status is expected.
99 |
100 | Args:
101 | status: HTTP response status. int.
102 | expected: a list of expected statuses. A list of ints.
103 | path: filename or a path prefix.
104 | headers: HTTP request headers.
105 | resp_headers: HTTP response headers.
106 | body: HTTP response body.
107 | extras: extra info to be logged verbatim if error occurs.
108 |
109 | Raises:
110 | AuthorizationError: if authorization failed.
111 | NotFoundError: if an object that's expected to exist doesn't.
112 | TimeoutError: if HTTP request timed out.
113 | ServerError: if server experienced some errors.
114 | FatalError: if any other unexpected errors occurred.
115 | """
116 | if status in expected:
117 | return
118 |
119 | msg = ('Expect status %r from Google Storage. But got status %d.\n'
120 | 'Path: %r.\n'
121 | 'Request headers: %r.\n'
122 | 'Response headers: %r.\n'
123 | 'Body: %r.\n'
124 | 'Extra info: %r.\n' %
125 | (expected, status, path, headers, resp_headers, body, extras))
126 |
127 | if status == httplib.UNAUTHORIZED:
128 | raise AuthorizationError(msg)
129 | elif status == httplib.FORBIDDEN:
130 | raise ForbiddenError(msg)
131 | elif status == httplib.NOT_FOUND:
132 | raise NotFoundError(msg)
133 | elif status == httplib.REQUEST_TIMEOUT:
134 | raise TimeoutError(msg)
135 | elif status == httplib.REQUESTED_RANGE_NOT_SATISFIABLE:
136 | raise InvalidRange(msg)
137 | elif (status == httplib.OK and 308 in expected and
138 | httplib.OK not in expected):
139 | raise FileClosedError(msg)
140 | elif status >= 500:
141 | raise ServerError(msg)
142 | else:
143 | raise FatalError(msg)
144 |
--------------------------------------------------------------------------------
/src/lib/cloudstorage/rest_api.py:
--------------------------------------------------------------------------------
1 | # Copyright 2012 Google Inc. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing,
10 | # software distributed under the License is distributed on an
11 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
12 | # either express or implied. See the License for the specific
13 | # language governing permissions and limitations under the License.
14 |
15 | """Base and helper classes for Google RESTful APIs."""
16 |
17 |
18 |
19 |
20 |
21 | __all__ = ['add_sync_methods']
22 |
23 | import logging
24 | import os
25 | import random
26 | import time
27 |
28 | from . import api_utils
29 |
30 | try:
31 | from google.appengine.api import app_identity
32 | from google.appengine.ext import ndb
33 | except ImportError:
34 | from google.appengine.api import app_identity
35 | from google.appengine.ext import ndb
36 |
37 |
38 |
39 | def _make_sync_method(name):
40 | """Helper to synthesize a synchronous method from an async method name.
41 |
42 | Used by the @add_sync_methods class decorator below.
43 |
44 | Args:
45 | name: The name of the synchronous method.
46 |
47 | Returns:
48 | A method (with first argument 'self') that retrieves and calls
49 | self., passing its own arguments, expects it to return a
50 | Future, and then waits for and returns that Future's result.
51 | """
52 |
53 | def sync_wrapper(self, *args, **kwds):
54 | method = getattr(self, name)
55 | future = method(*args, **kwds)
56 | return future.get_result()
57 |
58 | return sync_wrapper
59 |
60 |
61 | def add_sync_methods(cls):
62 | """Class decorator to add synchronous methods corresponding to async methods.
63 |
64 | This modifies the class in place, adding additional methods to it.
65 | If a synchronous method of a given name already exists it is not
66 | replaced.
67 |
68 | Args:
69 | cls: A class.
70 |
71 | Returns:
72 | The same class, modified in place.
73 | """
74 | for name in cls.__dict__.keys():
75 | if name.endswith('_async'):
76 | sync_name = name[:-6]
77 | if not hasattr(cls, sync_name):
78 | setattr(cls, sync_name, _make_sync_method(name))
79 | return cls
80 |
81 |
82 | class _AE_TokenStorage_(ndb.Model):
83 | """Entity to store app_identity tokens in memcache."""
84 |
85 | token = ndb.StringProperty()
86 | expires = ndb.FloatProperty()
87 |
88 |
89 | @ndb.tasklet
90 | def _make_token_async(scopes, service_account_id):
91 | """Get a fresh authentication token.
92 |
93 | Args:
94 | scopes: A list of scopes.
95 | service_account_id: Internal-use only.
96 |
97 | Raises:
98 | An ndb.Return with a tuple (token, expiration_time) where expiration_time is
99 | seconds since the epoch.
100 | """
101 | rpc = app_identity.create_rpc()
102 | app_identity.make_get_access_token_call(rpc, scopes, service_account_id)
103 | token, expires_at = yield rpc
104 | raise ndb.Return((token, expires_at))
105 |
106 |
107 | class _RestApi(object):
108 | """Base class for REST-based API wrapper classes.
109 |
110 | This class manages authentication tokens and request retries. All
111 | APIs are available as synchronous and async methods; synchronous
112 | methods are synthesized from async ones by the add_sync_methods()
113 | function in this module.
114 |
115 | WARNING: Do NOT directly use this api. It's an implementation detail
116 | and is subject to change at any release.
117 | """
118 |
119 | def __init__(self, scopes, service_account_id=None, token_maker=None,
120 | retry_params=None):
121 | """Constructor.
122 |
123 | Args:
124 | scopes: A scope or a list of scopes.
125 | service_account_id: Internal use only.
126 | token_maker: An asynchronous function of the form
127 | (scopes, service_account_id) -> (token, expires).
128 | retry_params: An instance of api_utils.RetryParams. If None, the
129 | default for current thread will be used.
130 | """
131 |
132 | if isinstance(scopes, basestring):
133 | scopes = [scopes]
134 | self.scopes = scopes
135 | self.service_account_id = service_account_id
136 | self.make_token_async = token_maker or _make_token_async
137 | if not retry_params:
138 | retry_params = api_utils._get_default_retry_params()
139 | self.retry_params = retry_params
140 | self.user_agent = {'User-Agent': retry_params._user_agent}
141 | self.expiration_headroom = random.randint(60, 240)
142 |
143 | def __getstate__(self):
144 | """Store state as part of serialization/pickling."""
145 | return {'scopes': self.scopes,
146 | 'id': self.service_account_id,
147 | 'a_maker': (None if self.make_token_async == _make_token_async
148 | else self.make_token_async),
149 | 'retry_params': self.retry_params,
150 | 'expiration_headroom': self.expiration_headroom}
151 |
152 | def __setstate__(self, state):
153 | """Restore state as part of deserialization/unpickling."""
154 | self.__init__(state['scopes'],
155 | service_account_id=state['id'],
156 | token_maker=state['a_maker'],
157 | retry_params=state['retry_params'])
158 | self.expiration_headroom = state['expiration_headroom']
159 |
160 | @ndb.tasklet
161 | def do_request_async(self, url, method='GET', headers=None, payload=None,
162 | deadline=None, callback=None):
163 | """Issue one HTTP request.
164 |
165 | It performs async retries using tasklets.
166 |
167 | Args:
168 | url: the url to fetch.
169 | method: the method in which to fetch.
170 | headers: the http headers.
171 | payload: the data to submit in the fetch.
172 | deadline: the deadline in which to make the call.
173 | callback: the call to make once completed.
174 |
175 | Yields:
176 | The async fetch of the url.
177 | """
178 | retry_wrapper = api_utils._RetryWrapper(
179 | self.retry_params,
180 | retriable_exceptions=api_utils._RETRIABLE_EXCEPTIONS,
181 | should_retry=api_utils._should_retry)
182 | resp = yield retry_wrapper.run(
183 | self.urlfetch_async,
184 | url=url,
185 | method=method,
186 | headers=headers,
187 | payload=payload,
188 | deadline=deadline,
189 | callback=callback,
190 | follow_redirects=False)
191 | raise ndb.Return((resp.status_code, resp.headers, resp.content))
192 |
193 | @ndb.tasklet
194 | def get_token_async(self, refresh=False):
195 | """Get an authentication token.
196 |
197 | The token is cached in memcache, keyed by the scopes argument.
198 | Uses a random token expiration headroom value generated in the constructor
199 | to eliminate a burst of GET_ACCESS_TOKEN API requests.
200 |
201 | Args:
202 | refresh: If True, ignore a cached token; default False.
203 |
204 | Yields:
205 | An authentication token. This token is guaranteed to be non-expired.
206 | """
207 | key = '%s,%s' % (self.service_account_id, ','.join(self.scopes))
208 | ts = yield _AE_TokenStorage_.get_by_id_async(
209 | key, use_cache=True, use_memcache=True,
210 | use_datastore=self.retry_params.save_access_token)
211 | if refresh or ts is None or ts.expires < (
212 | time.time() + self.expiration_headroom):
213 | token, expires_at = yield self.make_token_async(
214 | self.scopes, self.service_account_id)
215 | timeout = int(expires_at - time.time())
216 | ts = _AE_TokenStorage_(id=key, token=token, expires=expires_at)
217 | if timeout > 0:
218 | yield ts.put_async(memcache_timeout=timeout,
219 | use_datastore=self.retry_params.save_access_token,
220 | use_cache=True, use_memcache=True)
221 | raise ndb.Return(ts.token)
222 |
223 | @ndb.tasklet
224 | def urlfetch_async(self, url, method='GET', headers=None,
225 | payload=None, deadline=None, callback=None,
226 | follow_redirects=False):
227 | """Make an async urlfetch() call.
228 |
229 | This is an async wrapper around urlfetch(). It adds an authentication
230 | header.
231 |
232 | Args:
233 | url: the url to fetch.
234 | method: the method in which to fetch.
235 | headers: the http headers.
236 | payload: the data to submit in the fetch.
237 | deadline: the deadline in which to make the call.
238 | callback: the call to make once completed.
239 | follow_redirects: whether or not to follow redirects.
240 |
241 | Yields:
242 | This returns a Future despite not being decorated with @ndb.tasklet!
243 | """
244 | headers = {} if headers is None else dict(headers)
245 | headers.update(self.user_agent)
246 | try:
247 | self.token = yield self.get_token_async()
248 | except app_identity.InternalError, e:
249 | if os.environ.get('DATACENTER', '').endswith('sandman'):
250 | self.token = None
251 | logging.warning('Could not fetch an authentication token in sandman '
252 | 'based Appengine devel setup; proceeding without one.')
253 | else:
254 | raise e
255 | if self.token:
256 | headers['authorization'] = 'OAuth ' + self.token
257 |
258 | deadline = deadline or self.retry_params.urlfetch_timeout
259 |
260 | ctx = ndb.get_context()
261 | resp = yield ctx.urlfetch(
262 | url, payload=payload, method=method,
263 | headers=headers, follow_redirects=follow_redirects,
264 | deadline=deadline, callback=callback)
265 | raise ndb.Return(resp)
266 |
267 |
268 | _RestApi = add_sync_methods(_RestApi)
269 |
--------------------------------------------------------------------------------
/src/lib/cloudstorage/test_utils.py:
--------------------------------------------------------------------------------
1 | # Copyright 2013 Google Inc. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing,
10 | # software distributed under the License is distributed on an
11 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
12 | # either express or implied. See the License for the specific
13 | # language governing permissions and limitations under the License.
14 |
15 | """Utils for testing."""
16 |
17 |
18 | class MockUrlFetchResult(object):
19 |
20 | def __init__(self, status, headers, body):
21 | self.status_code = status
22 | self.headers = headers
23 | self.content = body
24 | self.content_was_truncated = False
25 | self.final_url = None
26 |
--------------------------------------------------------------------------------
/src/lib/httplib2/iri2uri.py:
--------------------------------------------------------------------------------
1 | """
2 | iri2uri
3 |
4 | Converts an IRI to a URI.
5 |
6 | """
7 | __author__ = "Joe Gregorio (joe@bitworking.org)"
8 | __copyright__ = "Copyright 2006, Joe Gregorio"
9 | __contributors__ = []
10 | __version__ = "1.0.0"
11 | __license__ = "MIT"
12 | __history__ = """
13 | """
14 |
15 | import urlparse
16 |
17 |
18 | # Convert an IRI to a URI following the rules in RFC 3987
19 | #
20 | # The characters we need to enocde and escape are defined in the spec:
21 | #
22 | # iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
23 | # ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
24 | # / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
25 | # / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
26 | # / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
27 | # / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
28 | # / %xD0000-DFFFD / %xE1000-EFFFD
29 |
30 | escape_range = [
31 | (0xA0, 0xD7FF),
32 | (0xE000, 0xF8FF),
33 | (0xF900, 0xFDCF),
34 | (0xFDF0, 0xFFEF),
35 | (0x10000, 0x1FFFD),
36 | (0x20000, 0x2FFFD),
37 | (0x30000, 0x3FFFD),
38 | (0x40000, 0x4FFFD),
39 | (0x50000, 0x5FFFD),
40 | (0x60000, 0x6FFFD),
41 | (0x70000, 0x7FFFD),
42 | (0x80000, 0x8FFFD),
43 | (0x90000, 0x9FFFD),
44 | (0xA0000, 0xAFFFD),
45 | (0xB0000, 0xBFFFD),
46 | (0xC0000, 0xCFFFD),
47 | (0xD0000, 0xDFFFD),
48 | (0xE1000, 0xEFFFD),
49 | (0xF0000, 0xFFFFD),
50 | (0x100000, 0x10FFFD),
51 | ]
52 |
53 | def encode(c):
54 | retval = c
55 | i = ord(c)
56 | for low, high in escape_range:
57 | if i < low:
58 | break
59 | if i >= low and i <= high:
60 | retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')])
61 | break
62 | return retval
63 |
64 |
65 | def iri2uri(uri):
66 | """Convert an IRI to a URI. Note that IRIs must be
67 | passed in a unicode strings. That is, do not utf-8 encode
68 | the IRI before passing it into the function."""
69 | if isinstance(uri ,unicode):
70 | (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
71 | authority = authority.encode('idna')
72 | # For each character in 'ucschar' or 'iprivate'
73 | # 1. encode as utf-8
74 | # 2. then %-encode each octet of that utf-8
75 | uri = urlparse.urlunsplit((scheme, authority, path, query, fragment))
76 | uri = "".join([encode(c) for c in uri])
77 | return uri
78 |
79 | if __name__ == "__main__":
80 | import unittest
81 |
82 | class Test(unittest.TestCase):
83 |
84 | def test_uris(self):
85 | """Test that URIs are invariant under the transformation."""
86 | invariant = [
87 | u"ftp://ftp.is.co.za/rfc/rfc1808.txt",
88 | u"http://www.ietf.org/rfc/rfc2396.txt",
89 | u"ldap://[2001:db8::7]/c=GB?objectClass?one",
90 | u"mailto:John.Doe@example.com",
91 | u"news:comp.infosystems.www.servers.unix",
92 | u"tel:+1-816-555-1212",
93 | u"telnet://192.0.2.16:80/",
94 | u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
95 | for uri in invariant:
96 | self.assertEqual(uri, iri2uri(uri))
97 |
98 | def test_iri(self):
99 | """ Test that the right type of escaping is done for each part of the URI."""
100 | self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}"))
101 | self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}"))
102 | self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}"))
103 | self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
104 | self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
105 | self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
106 | self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
107 |
108 | unittest.main()
109 |
110 |
111 |
--------------------------------------------------------------------------------
/src/lib/oauth2/_compat.py:
--------------------------------------------------------------------------------
1 | try:
2 | TEXT = unicode
3 | except NameError: #pragma NO COVER Py3k
4 | PY3 = True
5 | TEXT = str
6 | STRING_TYPES = (str, bytes)
7 | def b(x, encoding='ascii'):
8 | return bytes(x, encoding)
9 | else: #pragma NO COVER Python2
10 | PY3 = False
11 | STRING_TYPES = (unicode, bytes)
12 | def b(x, encoding='ascii'):
13 | if isinstance(x, unicode):
14 | x = x.encode(encoding)
15 | return x
16 |
17 | def u(x, encoding='ascii'):
18 | if isinstance(x, TEXT): #pragma NO COVER
19 | return x
20 | try:
21 | return x.decode(encoding)
22 | except AttributeError: #pragma NO COVER
23 | raise ValueError('WTF: %s' % x)
24 |
25 | try:
26 | import urlparse
27 | except ImportError: #pragma NO COVER Py3k
28 | from urllib.parse import parse_qs
29 | from urllib.parse import parse_qsl
30 | from urllib.parse import quote
31 | from urllib.parse import unquote
32 | from urllib.parse import unquote_to_bytes
33 | from urllib.parse import urlencode
34 | from urllib.parse import urlsplit
35 | from urllib.parse import urlunsplit
36 | from urllib.parse import urlparse
37 | from urllib.parse import urlunparse
38 | else: #pragma NO COVER Python2
39 | from urlparse import parse_qs
40 | from urlparse import parse_qsl
41 | from urllib import quote
42 | from urllib import unquote
43 | from urllib import urlencode
44 | from urlparse import urlsplit
45 | from urlparse import urlunsplit
46 | from urlparse import urlparse
47 | from urlparse import urlunparse
48 | unquote_to_bytes = unquote
49 |
--------------------------------------------------------------------------------
/src/lib/oauth2/_version.py:
--------------------------------------------------------------------------------
1 | # This is the version of this source code.
2 |
3 | manual_verstr = "1.9"
4 |
5 |
6 |
7 | auto_build_num = "0.post1"
8 |
9 |
10 |
11 | verstr = manual_verstr + "." + auto_build_num
12 | try:
13 | from pyutil.version_class import Version as pyutil_Version
14 | except (ImportError, ValueError): #pragma NO COVER
15 | # Maybe there is no pyutil installed.
16 | from distutils.version import LooseVersion as distutils_Version
17 | __version__ = distutils_Version(verstr)
18 | else: #pragma NO COVER
19 | __version__ = pyutil_Version(verstr)
20 |
--------------------------------------------------------------------------------
/src/lib/oauth2/clients/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ilhan-mstf/tt-history/1bb60cb81e97ef1abecf657cfa078798bb29cace/src/lib/oauth2/clients/__init__.py
--------------------------------------------------------------------------------
/src/lib/oauth2/clients/imap.py:
--------------------------------------------------------------------------------
1 | """
2 | The MIT License
3 |
4 | Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in
14 | all copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 | THE SOFTWARE.
23 | """
24 |
25 | import oauth2
26 | import imaplib
27 |
28 |
29 | class IMAP4_SSL(imaplib.IMAP4_SSL):
30 | """IMAP wrapper for imaplib.IMAP4_SSL that implements XOAUTH."""
31 |
32 | def authenticate(self, url, consumer, token):
33 | if consumer is not None and not isinstance(consumer, oauth2.Consumer):
34 | raise ValueError("Invalid consumer.")
35 |
36 | if token is not None and not isinstance(token, oauth2.Token):
37 | raise ValueError("Invalid token.")
38 |
39 | imaplib.IMAP4_SSL.authenticate(self, 'XOAUTH',
40 | lambda x: oauth2.build_xoauth_string(url, consumer, token))
41 |
--------------------------------------------------------------------------------
/src/lib/oauth2/clients/smtp.py:
--------------------------------------------------------------------------------
1 | """
2 | The MIT License
3 |
4 | Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in
14 | all copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 | THE SOFTWARE.
23 | """
24 |
25 | import oauth2
26 | import smtplib
27 | import base64
28 |
29 |
30 | class SMTP(smtplib.SMTP):
31 | """SMTP wrapper for smtplib.SMTP that implements XOAUTH."""
32 |
33 | def authenticate(self, url, consumer, token):
34 | if consumer is not None and not isinstance(consumer, oauth2.Consumer):
35 | raise ValueError("Invalid consumer.")
36 |
37 | if token is not None and not isinstance(token, oauth2.Token):
38 | raise ValueError("Invalid token.")
39 |
40 | self.docmd('AUTH', 'XOAUTH %s' % \
41 | base64.b64encode(oauth2.build_xoauth_string(url, consumer, token)))
42 |
--------------------------------------------------------------------------------
/src/migrate.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | The MIT License
4 |
5 | Copyright (c) 2013 Mustafa İlhan
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in
15 | all copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | THE SOFTWARE.
24 | """
25 |
26 | from google.appengine.ext import ndb
27 | from google.appengine.ext import webapp
28 | from google.appengine.ext.webapp.util import run_wsgi_app
29 | from globals import Globals
30 | from model import Trend, TrendWindow
31 |
32 | import logging
33 | import math
34 | import time
35 |
36 |
37 | class Migrate(webapp.RequestHandler):
38 | """ """
39 |
40 | def get(self):
41 | logging.info("Migration starting...")
42 |
43 | region = self.request.get('region')
44 | if self.request.get('v') == '3':
45 | self.toV3(region)
46 |
47 | logging.info("Migration finished.")
48 |
49 | def toV3(self, region):
50 | """ In this migration, only daily summary of trends are stored.
51 | Therefore, there is no need to store trends fetched in 10 minutes.
52 | To switch this version, this code moves last days trends to temp trend
53 | entity. """
54 |
55 | ndb.Future.wait_all(self.getAndPutTrends(region))
56 |
57 | def putTrends(self, trends):
58 | logging.info("will put %s", len(trends))
59 | entityList = []
60 | for trend in trends:
61 | entityList.append(
62 | TrendWindow(
63 | name=trend.name,
64 | woeid=trend.woeid,
65 | timestamp=trend.timestamp,
66 | time=trend.time,
67 | volume=trend.volume))
68 | return ndb.put_multi_async(entityList)
69 |
70 | def getAndPutTrends(self, region):
71 | get_futures = []
72 | endTimestamp = int(math.floor(time.time()))
73 | startTimestamp = endTimestamp - Globals._1_DAY
74 | get_futures.extend(
75 | self.requestTrendsFromDatastore({
76 | 'name': '',
77 | 'history': 'ld',
78 | 'woeid': str(region),
79 | 'startTimestamp': startTimestamp,
80 | 'endTimestamp': endTimestamp,
81 | 'limit': ''
82 | }))
83 |
84 | put_futures = []
85 | for f in get_futures:
86 | put_futures.extend(self.putTrends(f.get_result()))
87 |
88 | logging.info("get and put completed, waiting database operations.")
89 | return put_futures
90 |
91 | def requestTrendsFromDatastore(self, prms):
92 | """ Requests request to datastore and returns request objects. """
93 |
94 | prms['endTimestamp'] = int(prms['endTimestamp'])
95 | prms['startTimestamp'] = int(prms['startTimestamp'])
96 |
97 | if prms['endTimestamp'] == 0:
98 | prms['endTimestamp'] = prms['startTimestamp'] + Globals._10_MINUTES
99 |
100 | # split up timestamp space into {ts_intervals} equal parts and async
101 | # query each of them
102 | ts_intervals = 24
103 | ts_delta = (
104 | prms['endTimestamp'] - prms['startTimestamp']) / ts_intervals
105 | cur_start_time = prms['startTimestamp']
106 | q_futures = []
107 |
108 | for x in range(ts_intervals):
109 | cur_end_time = (cur_start_time + ts_delta)
110 | if x == (ts_intervals - 1): # Last one has to cover full range
111 | cur_end_time = prms['endTimestamp']
112 |
113 | q_futures.append(
114 | Trend.query(Trend.timestamp >= cur_start_time,
115 | Trend.timestamp < cur_end_time,
116 | Trend.woeid == int(prms['woeid']))
117 | .order(-Trend.timestamp)
118 | .fetch_async(limit=None))
119 | cur_start_time = cur_end_time
120 |
121 | return q_futures
122 |
123 |
124 | application = webapp.WSGIApplication([('/migrate', Migrate)], debug=True)
125 |
126 |
127 | def main():
128 | run_wsgi_app(application)
129 |
130 |
131 | if __name__ == "__main__":
132 | main()
133 |
--------------------------------------------------------------------------------
/src/model.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | The MIT License
4 |
5 | Copyright (c) 2013 Mustafa İlhan
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in
15 | all copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | THE SOFTWARE.
24 | """
25 |
26 | from google.appengine.ext import ndb
27 |
28 |
29 | # Old version's trend storage table. It is very huge. To reduce the costs,
30 | # project switches to window and summary based approaches.
31 | class Trend(ndb.Model):
32 | name = ndb.StringProperty(indexed=False)
33 | woeid = ndb.IntegerProperty(indexed=True)
34 | timestamp = ndb.IntegerProperty(indexed=True)
35 | time = ndb.IntegerProperty(indexed=False)
36 | volume = ndb.IntegerProperty(indexed=False)
37 |
38 |
39 | # To migrate v3 version of tthistory old (10 minutes resolution) trends
40 | # will be deleted. Therefore, temporarily trends will be saved to this entity.
41 | # Trends will be stored in this table for a specified window (e.g last 24 hours).
42 | class TrendWindow(ndb.Model):
43 | name = ndb.StringProperty(indexed=False)
44 | woeid = ndb.IntegerProperty(indexed=True)
45 | timestamp = ndb.IntegerProperty(indexed=True)
46 | time = ndb.IntegerProperty(indexed=False)
47 | volume = ndb.IntegerProperty(indexed=False)
48 |
49 |
50 | # Daily summary of trends.
51 | class TrendSummary(ndb.Model):
52 | name = ndb.StringProperty(indexed=True)
53 | woeid = ndb.IntegerProperty(indexed=False)
54 | date = ndb.StringProperty(indexed=False)
55 | duration = ndb.IntegerProperty(indexed=False)
56 | volume = ndb.IntegerProperty(indexed=False)
57 |
58 |
59 | class Error(ndb.Model):
60 | msg = ndb.StringProperty(indexed=False)
61 | timestamp = ndb.IntegerProperty(indexed=False)
62 |
--------------------------------------------------------------------------------
/src/page_handler.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | The MIT License
4 |
5 | Copyright (c) 2013 Mustafa İlhan
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in
15 | all copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | THE SOFTWARE.
24 | """
25 |
26 | import json
27 | import os
28 | import traceback
29 | import logging
30 |
31 | from google.appengine.ext import webapp
32 | from google.appengine.ext.webapp import template
33 | from google.appengine.ext.webapp.util import run_wsgi_app
34 | from trend_manager import TrendManager
35 | from rate_limit_manager import RateLimitManager
36 |
37 |
38 | class MainPage(webapp.RequestHandler):
39 | """ Renders the main template. """
40 |
41 | def get(self):
42 |
43 | template_values = {}
44 | path = os.path.join(os.path.dirname(__file__), 'index.html')
45 | self.response.out.write(template.render(path, template_values))
46 |
47 |
48 | class RPCHandler(webapp.RequestHandler):
49 | """ Handles the RemoteProcedureCall requests. """
50 |
51 | def get(self):
52 |
53 | try:
54 | # Check request ip
55 | if "localhost" not in self.request.url and not RateLimitManager(
56 | ).checkRateLimit(self.request.remote_addr):
57 | logging.warning("Remote user has exceed limits; rejecting. %s"
58 | % self.request.remote_addr)
59 | self.error(503)
60 | return
61 |
62 | # Read and set paremeters
63 | prms = {
64 | 'name': self.request.get('name'),
65 | 'history':
66 | self.request.get('history'), # history = ['ld'] last day
67 | 'woeid': self.request.get('woeid'),
68 | 'startTimestamp': self.request.get('timestamp'),
69 | 'endTimestamp': self.request.get('end_timestamp', '0'),
70 | 'limit': self.request.get('limit')
71 | }
72 |
73 | # Get trends
74 | if prms['name'] is not "":
75 | trends = TrendManager().getResultsTrendByName(prms)
76 | else:
77 | trends = TrendManager().getResultTrends(prms)
78 |
79 | # Set response in json format
80 | self.response.out.write(json.dumps({"trends": trends}))
81 |
82 | except Exception, e:
83 | traceback.print_exc()
84 | self.response.out.write(json.dumps({"error": str(e)}))
85 |
86 |
87 | application = webapp.WSGIApplication(
88 | [('/rpc', RPCHandler), ('/.*', MainPage)], debug=False)
89 |
90 |
91 | def main():
92 | run_wsgi_app(application)
93 |
94 |
95 | if __name__ == "__main__":
96 | main()
97 |
--------------------------------------------------------------------------------
/src/queue.yaml:
--------------------------------------------------------------------------------
1 | queue:
2 | - name: default
3 | rate: 1/s
4 | retry_parameters:
5 | task_retry_limit: 7
6 | min_backoff_seconds: 1
7 |
--------------------------------------------------------------------------------
/src/rate_limit_manager.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | The MIT License
4 |
5 | Copyright (c) 2013 Mustafa İlhan
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in
15 | all copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | THE SOFTWARE.
24 | """
25 |
26 | import layer_cache
27 | from globals import Globals
28 |
29 |
30 | class RateLimitManager(object):
31 | def __init__(self):
32 | self.rateLimits = self.getRateLimits(key="rate-limits")
33 |
34 | @layer_cache.cache(
35 | layer=Globals.DUAL_LAYER_MEMCACHE_AND_IN_APP_MEMORY_CACHE,
36 | expiration=Globals._1_DAY)
37 | def getRateLimits(self, key=""):
38 | return {}
39 |
40 | @layer_cache.cache(
41 | layer=Globals.DUAL_LAYER_MEMCACHE_AND_IN_APP_MEMORY_CACHE,
42 | expiration=Globals._1_DAY,
43 | bust_cache=True)
44 | def setRateLimits(self, rateLimits, key=""):
45 | return rateLimits
46 |
47 | def getRateValue(self, key):
48 | if key not in self.rateLimits:
49 | return None
50 | return self.rateLimits[key]
51 |
52 | def setRateValue(self, rate, key):
53 | self.rateLimits[key] = rate
54 | self.setRateLimits(self.rateLimits, key="rate-limits")
55 |
56 | def checkRateLimit(self, ip):
57 | rate = self.getRateValue(ip)
58 | if rate is None:
59 | self.setRateValue(1, ip)
60 | return True
61 | else:
62 | if rate > Globals.MAX_REQUESTS:
63 | return False
64 | else:
65 | rate += 1
66 | self.setRateValue(rate, ip)
67 | return True
68 |
--------------------------------------------------------------------------------
/src/send_email.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | """
3 | The MIT License
4 |
5 | Copyright (c) 2013 Mustafa İlhan
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in
15 | all copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | THE SOFTWARE.
24 | """
25 |
26 | from google.appengine.api import app_identity
27 | from google.appengine.api import mail
28 |
29 |
30 | class SendEmail(object):
31 | def __init__(self):
32 | self.sender_address = '{}@appspot.gserviceaccount.com'.format(
33 | app_identity.get_application_id())
34 |
35 | def send(self, subject, body):
36 | # [START send_mail]
37 | mail.send_mail(
38 | sender=self.sender_address,
39 | to="Mustafa İlhan ",
40 | subject=subject,
41 | body=body)
42 | # [END send_mail]
43 |
--------------------------------------------------------------------------------
/src/static/bootstrap/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "vars": {
3 | "@gray-base": "#000",
4 | "@gray-darker": "lighten(@gray-base, 13.5%)",
5 | "@gray-dark": "lighten(@gray-base, 20%)",
6 | "@gray": "lighten(@gray-base, 33.5%)",
7 | "@gray-light": "lighten(@gray-base, 46.7%)",
8 | "@gray-lighter": "lighten(@gray-base, 93.5%)",
9 | "@brand-primary": "darken(#428bca, 6.5%)",
10 | "@brand-success": "#5cb85c",
11 | "@brand-info": "#5bc0de",
12 | "@brand-warning": "#f0ad4e",
13 | "@brand-danger": "#d9534f",
14 | "@body-bg": "#fff",
15 | "@text-color": "@gray-dark",
16 | "@link-color": "@brand-primary",
17 | "@link-hover-color": "darken(@link-color, 15%)",
18 | "@link-hover-decoration": "underline",
19 | "@font-family-sans-serif": "\"Helvetica Neue\", Helvetica, Arial, sans-serif",
20 | "@font-family-serif": "Georgia, \"Times New Roman\", Times, serif",
21 | "@font-family-monospace": "Menlo, Monaco, Consolas, \"Courier New\", monospace",
22 | "@font-family-base": "@font-family-sans-serif",
23 | "@font-size-base": "14px",
24 | "@font-size-large": "ceil((@font-size-base * 1.25))",
25 | "@font-size-small": "ceil((@font-size-base * 0.85))",
26 | "@font-size-h1": "floor((@font-size-base * 2.6))",
27 | "@font-size-h2": "floor((@font-size-base * 2.15))",
28 | "@font-size-h3": "ceil((@font-size-base * 1.7))",
29 | "@font-size-h4": "ceil((@font-size-base * 1.25))",
30 | "@font-size-h5": "@font-size-base",
31 | "@font-size-h6": "ceil((@font-size-base * 0.85))",
32 | "@line-height-base": "1.428571429",
33 | "@line-height-computed": "floor((@font-size-base * @line-height-base))",
34 | "@headings-font-family": "inherit",
35 | "@headings-font-weight": "500",
36 | "@headings-line-height": "1.1",
37 | "@headings-color": "inherit",
38 | "@icon-font-path": "\"../fonts/\"",
39 | "@icon-font-name": "\"glyphicons-halflings-regular\"",
40 | "@icon-font-svg-id": "\"glyphicons_halflingsregular\"",
41 | "@padding-base-vertical": "6px",
42 | "@padding-base-horizontal": "12px",
43 | "@padding-large-vertical": "10px",
44 | "@padding-large-horizontal": "16px",
45 | "@padding-small-vertical": "5px",
46 | "@padding-small-horizontal": "10px",
47 | "@padding-xs-vertical": "1px",
48 | "@padding-xs-horizontal": "5px",
49 | "@line-height-large": "1.3333333",
50 | "@line-height-small": "1.5",
51 | "@border-radius-base": "4px",
52 | "@border-radius-large": "6px",
53 | "@border-radius-small": "3px",
54 | "@component-active-color": "#fff",
55 | "@component-active-bg": "@brand-primary",
56 | "@caret-width-base": "4px",
57 | "@caret-width-large": "5px",
58 | "@table-cell-padding": "8px",
59 | "@table-condensed-cell-padding": "5px",
60 | "@table-bg": "transparent",
61 | "@table-bg-accent": "#f9f9f9",
62 | "@table-bg-hover": "#f5f5f5",
63 | "@table-bg-active": "@table-bg-hover",
64 | "@table-border-color": "#ddd",
65 | "@btn-font-weight": "normal",
66 | "@btn-default-color": "#333",
67 | "@btn-default-bg": "#fff",
68 | "@btn-default-border": "#ccc",
69 | "@btn-primary-color": "#fff",
70 | "@btn-primary-bg": "@brand-primary",
71 | "@btn-primary-border": "darken(@btn-primary-bg, 5%)",
72 | "@btn-success-color": "#fff",
73 | "@btn-success-bg": "@brand-success",
74 | "@btn-success-border": "darken(@btn-success-bg, 5%)",
75 | "@btn-info-color": "#fff",
76 | "@btn-info-bg": "@brand-info",
77 | "@btn-info-border": "darken(@btn-info-bg, 5%)",
78 | "@btn-warning-color": "#fff",
79 | "@btn-warning-bg": "@brand-warning",
80 | "@btn-warning-border": "darken(@btn-warning-bg, 5%)",
81 | "@btn-danger-color": "#fff",
82 | "@btn-danger-bg": "@brand-danger",
83 | "@btn-danger-border": "darken(@btn-danger-bg, 5%)",
84 | "@btn-link-disabled-color": "@gray-light",
85 | "@btn-border-radius-base": "@border-radius-base",
86 | "@btn-border-radius-large": "@border-radius-large",
87 | "@btn-border-radius-small": "@border-radius-small",
88 | "@input-bg": "#fff",
89 | "@input-bg-disabled": "@gray-lighter",
90 | "@input-color": "@gray",
91 | "@input-border": "#ccc",
92 | "@input-border-radius": "@border-radius-base",
93 | "@input-border-radius-large": "@border-radius-large",
94 | "@input-border-radius-small": "@border-radius-small",
95 | "@input-border-focus": "#66afe9",
96 | "@input-color-placeholder": "#999",
97 | "@input-height-base": "(@line-height-computed + (@padding-base-vertical * 2) + 2)",
98 | "@input-height-large": "(ceil(@font-size-large * @line-height-large) + (@padding-large-vertical * 2) + 2)",
99 | "@input-height-small": "(floor(@font-size-small * @line-height-small) + (@padding-small-vertical * 2) + 2)",
100 | "@form-group-margin-bottom": "15px",
101 | "@legend-color": "@gray-dark",
102 | "@legend-border-color": "#e5e5e5",
103 | "@input-group-addon-bg": "@gray-lighter",
104 | "@input-group-addon-border-color": "@input-border",
105 | "@cursor-disabled": "not-allowed",
106 | "@dropdown-bg": "#fff",
107 | "@dropdown-border": "rgba(0,0,0,.15)",
108 | "@dropdown-fallback-border": "#ccc",
109 | "@dropdown-divider-bg": "#e5e5e5",
110 | "@dropdown-link-color": "@gray-dark",
111 | "@dropdown-link-hover-color": "darken(@gray-dark, 5%)",
112 | "@dropdown-link-hover-bg": "#f5f5f5",
113 | "@dropdown-link-active-color": "@component-active-color",
114 | "@dropdown-link-active-bg": "@component-active-bg",
115 | "@dropdown-link-disabled-color": "@gray-light",
116 | "@dropdown-header-color": "@gray-light",
117 | "@dropdown-caret-color": "#000",
118 | "@screen-xs": "480px",
119 | "@screen-xs-min": "@screen-xs",
120 | "@screen-phone": "@screen-xs-min",
121 | "@screen-sm": "768px",
122 | "@screen-sm-min": "@screen-sm",
123 | "@screen-tablet": "@screen-sm-min",
124 | "@screen-md": "992px",
125 | "@screen-md-min": "@screen-md",
126 | "@screen-desktop": "@screen-md-min",
127 | "@screen-lg": "1200px",
128 | "@screen-lg-min": "@screen-lg",
129 | "@screen-lg-desktop": "@screen-lg-min",
130 | "@screen-xs-max": "(@screen-sm-min - 1)",
131 | "@screen-sm-max": "(@screen-md-min - 1)",
132 | "@screen-md-max": "(@screen-lg-min - 1)",
133 | "@grid-columns": "12",
134 | "@grid-gutter-width": "30px",
135 | "@grid-float-breakpoint": "@screen-sm-min",
136 | "@grid-float-breakpoint-max": "(@grid-float-breakpoint - 1)",
137 | "@container-tablet": "(720px + @grid-gutter-width)",
138 | "@container-sm": "@container-tablet",
139 | "@container-desktop": "(940px + @grid-gutter-width)",
140 | "@container-md": "@container-desktop",
141 | "@container-large-desktop": "(1140px + @grid-gutter-width)",
142 | "@container-lg": "@container-large-desktop",
143 | "@navbar-height": "50px",
144 | "@navbar-margin-bottom": "@line-height-computed",
145 | "@navbar-border-radius": "@border-radius-base",
146 | "@navbar-padding-horizontal": "floor((@grid-gutter-width / 2))",
147 | "@navbar-padding-vertical": "((@navbar-height - @line-height-computed) / 2)",
148 | "@navbar-collapse-max-height": "340px",
149 | "@navbar-default-color": "#777",
150 | "@navbar-default-bg": "#f8f8f8",
151 | "@navbar-default-border": "darken(@navbar-default-bg, 6.5%)",
152 | "@navbar-default-link-color": "#777",
153 | "@navbar-default-link-hover-color": "#333",
154 | "@navbar-default-link-hover-bg": "transparent",
155 | "@navbar-default-link-active-color": "#555",
156 | "@navbar-default-link-active-bg": "darken(@navbar-default-bg, 6.5%)",
157 | "@navbar-default-link-disabled-color": "#ccc",
158 | "@navbar-default-link-disabled-bg": "transparent",
159 | "@navbar-default-brand-color": "@navbar-default-link-color",
160 | "@navbar-default-brand-hover-color": "darken(@navbar-default-brand-color, 10%)",
161 | "@navbar-default-brand-hover-bg": "transparent",
162 | "@navbar-default-toggle-hover-bg": "#ddd",
163 | "@navbar-default-toggle-icon-bar-bg": "#888",
164 | "@navbar-default-toggle-border-color": "#ddd",
165 | "@navbar-inverse-color": "lighten(@gray-light, 15%)",
166 | "@navbar-inverse-bg": "#222",
167 | "@navbar-inverse-border": "darken(@navbar-inverse-bg, 10%)",
168 | "@navbar-inverse-link-color": "lighten(@gray-light, 15%)",
169 | "@navbar-inverse-link-hover-color": "#fff",
170 | "@navbar-inverse-link-hover-bg": "transparent",
171 | "@navbar-inverse-link-active-color": "@navbar-inverse-link-hover-color",
172 | "@navbar-inverse-link-active-bg": "darken(@navbar-inverse-bg, 10%)",
173 | "@navbar-inverse-link-disabled-color": "#444",
174 | "@navbar-inverse-link-disabled-bg": "transparent",
175 | "@navbar-inverse-brand-color": "@navbar-inverse-link-color",
176 | "@navbar-inverse-brand-hover-color": "#fff",
177 | "@navbar-inverse-brand-hover-bg": "transparent",
178 | "@navbar-inverse-toggle-hover-bg": "#333",
179 | "@navbar-inverse-toggle-icon-bar-bg": "#fff",
180 | "@navbar-inverse-toggle-border-color": "#333",
181 | "@nav-link-padding": "10px 15px",
182 | "@nav-link-hover-bg": "@gray-lighter",
183 | "@nav-disabled-link-color": "@gray-light",
184 | "@nav-disabled-link-hover-color": "@gray-light",
185 | "@nav-tabs-border-color": "#ddd",
186 | "@nav-tabs-link-hover-border-color": "@gray-lighter",
187 | "@nav-tabs-active-link-hover-bg": "@body-bg",
188 | "@nav-tabs-active-link-hover-color": "@gray",
189 | "@nav-tabs-active-link-hover-border-color": "#ddd",
190 | "@nav-tabs-justified-link-border-color": "#ddd",
191 | "@nav-tabs-justified-active-link-border-color": "@body-bg",
192 | "@nav-pills-border-radius": "@border-radius-base",
193 | "@nav-pills-active-link-hover-bg": "@component-active-bg",
194 | "@nav-pills-active-link-hover-color": "@component-active-color",
195 | "@pagination-color": "@link-color",
196 | "@pagination-bg": "#fff",
197 | "@pagination-border": "#ddd",
198 | "@pagination-hover-color": "@link-hover-color",
199 | "@pagination-hover-bg": "@gray-lighter",
200 | "@pagination-hover-border": "#ddd",
201 | "@pagination-active-color": "#fff",
202 | "@pagination-active-bg": "@brand-primary",
203 | "@pagination-active-border": "@brand-primary",
204 | "@pagination-disabled-color": "@gray-light",
205 | "@pagination-disabled-bg": "#fff",
206 | "@pagination-disabled-border": "#ddd",
207 | "@pager-bg": "@pagination-bg",
208 | "@pager-border": "@pagination-border",
209 | "@pager-border-radius": "15px",
210 | "@pager-hover-bg": "@pagination-hover-bg",
211 | "@pager-active-bg": "@pagination-active-bg",
212 | "@pager-active-color": "@pagination-active-color",
213 | "@pager-disabled-color": "@pagination-disabled-color",
214 | "@jumbotron-padding": "30px",
215 | "@jumbotron-color": "inherit",
216 | "@jumbotron-bg": "@gray-lighter",
217 | "@jumbotron-heading-color": "inherit",
218 | "@jumbotron-font-size": "ceil((@font-size-base * 1.5))",
219 | "@jumbotron-heading-font-size": "ceil((@font-size-base * 4.5))",
220 | "@state-success-text": "#3c763d",
221 | "@state-success-bg": "#dff0d8",
222 | "@state-success-border": "darken(spin(@state-success-bg, -10), 5%)",
223 | "@state-info-text": "#31708f",
224 | "@state-info-bg": "#d9edf7",
225 | "@state-info-border": "darken(spin(@state-info-bg, -10), 7%)",
226 | "@state-warning-text": "#8a6d3b",
227 | "@state-warning-bg": "#fcf8e3",
228 | "@state-warning-border": "darken(spin(@state-warning-bg, -10), 5%)",
229 | "@state-danger-text": "#a94442",
230 | "@state-danger-bg": "#f2dede",
231 | "@state-danger-border": "darken(spin(@state-danger-bg, -10), 5%)",
232 | "@tooltip-max-width": "200px",
233 | "@tooltip-color": "#fff",
234 | "@tooltip-bg": "#000",
235 | "@tooltip-opacity": ".9",
236 | "@tooltip-arrow-width": "5px",
237 | "@tooltip-arrow-color": "@tooltip-bg",
238 | "@popover-bg": "#fff",
239 | "@popover-max-width": "276px",
240 | "@popover-border-color": "rgba(0,0,0,.2)",
241 | "@popover-fallback-border-color": "#ccc",
242 | "@popover-title-bg": "darken(@popover-bg, 3%)",
243 | "@popover-arrow-width": "10px",
244 | "@popover-arrow-color": "@popover-bg",
245 | "@popover-arrow-outer-width": "(@popover-arrow-width + 1)",
246 | "@popover-arrow-outer-color": "fadein(@popover-border-color, 5%)",
247 | "@popover-arrow-outer-fallback-color": "darken(@popover-fallback-border-color, 20%)",
248 | "@label-default-bg": "@gray-light",
249 | "@label-primary-bg": "@brand-primary",
250 | "@label-success-bg": "@brand-success",
251 | "@label-info-bg": "@brand-info",
252 | "@label-warning-bg": "@brand-warning",
253 | "@label-danger-bg": "@brand-danger",
254 | "@label-color": "#fff",
255 | "@label-link-hover-color": "#fff",
256 | "@modal-inner-padding": "15px",
257 | "@modal-title-padding": "15px",
258 | "@modal-title-line-height": "@line-height-base",
259 | "@modal-content-bg": "#fff",
260 | "@modal-content-border-color": "rgba(0,0,0,.2)",
261 | "@modal-content-fallback-border-color": "#999",
262 | "@modal-backdrop-bg": "#000",
263 | "@modal-backdrop-opacity": ".5",
264 | "@modal-header-border-color": "#e5e5e5",
265 | "@modal-footer-border-color": "@modal-header-border-color",
266 | "@modal-lg": "900px",
267 | "@modal-md": "600px",
268 | "@modal-sm": "300px",
269 | "@alert-padding": "15px",
270 | "@alert-border-radius": "@border-radius-base",
271 | "@alert-link-font-weight": "bold",
272 | "@alert-success-bg": "@state-success-bg",
273 | "@alert-success-text": "@state-success-text",
274 | "@alert-success-border": "@state-success-border",
275 | "@alert-info-bg": "@state-info-bg",
276 | "@alert-info-text": "@state-info-text",
277 | "@alert-info-border": "@state-info-border",
278 | "@alert-warning-bg": "@state-warning-bg",
279 | "@alert-warning-text": "@state-warning-text",
280 | "@alert-warning-border": "@state-warning-border",
281 | "@alert-danger-bg": "@state-danger-bg",
282 | "@alert-danger-text": "@state-danger-text",
283 | "@alert-danger-border": "@state-danger-border",
284 | "@progress-bg": "#f5f5f5",
285 | "@progress-bar-color": "#fff",
286 | "@progress-border-radius": "@border-radius-base",
287 | "@progress-bar-bg": "@brand-primary",
288 | "@progress-bar-success-bg": "@brand-success",
289 | "@progress-bar-warning-bg": "@brand-warning",
290 | "@progress-bar-danger-bg": "@brand-danger",
291 | "@progress-bar-info-bg": "@brand-info",
292 | "@list-group-bg": "#fff",
293 | "@list-group-border": "#ddd",
294 | "@list-group-border-radius": "@border-radius-base",
295 | "@list-group-hover-bg": "#f5f5f5",
296 | "@list-group-active-color": "@component-active-color",
297 | "@list-group-active-bg": "@component-active-bg",
298 | "@list-group-active-border": "@list-group-active-bg",
299 | "@list-group-active-text-color": "lighten(@list-group-active-bg, 40%)",
300 | "@list-group-disabled-color": "@gray-light",
301 | "@list-group-disabled-bg": "@gray-lighter",
302 | "@list-group-disabled-text-color": "@list-group-disabled-color",
303 | "@list-group-link-color": "#555",
304 | "@list-group-link-hover-color": "@list-group-link-color",
305 | "@list-group-link-heading-color": "#333",
306 | "@panel-bg": "#fff",
307 | "@panel-body-padding": "15px",
308 | "@panel-heading-padding": "10px 15px",
309 | "@panel-footer-padding": "@panel-heading-padding",
310 | "@panel-border-radius": "@border-radius-base",
311 | "@panel-inner-border": "#ddd",
312 | "@panel-footer-bg": "#f5f5f5",
313 | "@panel-default-text": "@gray-dark",
314 | "@panel-default-border": "#ddd",
315 | "@panel-default-heading-bg": "#f5f5f5",
316 | "@panel-primary-text": "#fff",
317 | "@panel-primary-border": "@brand-primary",
318 | "@panel-primary-heading-bg": "@brand-primary",
319 | "@panel-success-text": "@state-success-text",
320 | "@panel-success-border": "@state-success-border",
321 | "@panel-success-heading-bg": "@state-success-bg",
322 | "@panel-info-text": "@state-info-text",
323 | "@panel-info-border": "@state-info-border",
324 | "@panel-info-heading-bg": "@state-info-bg",
325 | "@panel-warning-text": "@state-warning-text",
326 | "@panel-warning-border": "@state-warning-border",
327 | "@panel-warning-heading-bg": "@state-warning-bg",
328 | "@panel-danger-text": "@state-danger-text",
329 | "@panel-danger-border": "@state-danger-border",
330 | "@panel-danger-heading-bg": "@state-danger-bg",
331 | "@thumbnail-padding": "4px",
332 | "@thumbnail-bg": "@body-bg",
333 | "@thumbnail-border": "#ddd",
334 | "@thumbnail-border-radius": "@border-radius-base",
335 | "@thumbnail-caption-color": "@text-color",
336 | "@thumbnail-caption-padding": "9px",
337 | "@well-bg": "#f5f5f5",
338 | "@well-border": "darken(@well-bg, 7%)",
339 | "@badge-color": "#fff",
340 | "@badge-link-hover-color": "#fff",
341 | "@badge-bg": "@gray-light",
342 | "@badge-active-color": "@link-color",
343 | "@badge-active-bg": "#fff",
344 | "@badge-font-weight": "bold",
345 | "@badge-line-height": "1",
346 | "@badge-border-radius": "10px",
347 | "@breadcrumb-padding-vertical": "8px",
348 | "@breadcrumb-padding-horizontal": "15px",
349 | "@breadcrumb-bg": "#f5f5f5",
350 | "@breadcrumb-color": "#ccc",
351 | "@breadcrumb-active-color": "@gray-light",
352 | "@breadcrumb-separator": "\"/\"",
353 | "@carousel-text-shadow": "0 1px 2px rgba(0,0,0,.6)",
354 | "@carousel-control-color": "#fff",
355 | "@carousel-control-width": "15%",
356 | "@carousel-control-opacity": ".5",
357 | "@carousel-control-font-size": "20px",
358 | "@carousel-indicator-active-bg": "#fff",
359 | "@carousel-indicator-border-color": "#fff",
360 | "@carousel-caption-color": "#fff",
361 | "@close-font-weight": "bold",
362 | "@close-color": "#000",
363 | "@close-text-shadow": "0 1px 0 #fff",
364 | "@code-color": "#c7254e",
365 | "@code-bg": "#f9f2f4",
366 | "@kbd-color": "#fff",
367 | "@kbd-bg": "#333",
368 | "@pre-bg": "#f5f5f5",
369 | "@pre-color": "@gray-dark",
370 | "@pre-border-color": "#ccc",
371 | "@pre-scrollable-max-height": "340px",
372 | "@component-offset-horizontal": "180px",
373 | "@text-muted": "@gray-light",
374 | "@abbr-border-color": "@gray-light",
375 | "@headings-small-color": "@gray-light",
376 | "@blockquote-small-color": "@gray-light",
377 | "@blockquote-font-size": "(@font-size-base * 1.25)",
378 | "@blockquote-border-color": "@gray-lighter",
379 | "@page-header-border-color": "@gray-lighter",
380 | "@dl-horizontal-offset": "@component-offset-horizontal",
381 | "@dl-horizontal-breakpoint": "@grid-float-breakpoint",
382 | "@hr-border": "@gray-lighter"
383 | },
384 | "css": [
385 | "type.less",
386 | "grid.less",
387 | "forms.less",
388 | "responsive-utilities.less",
389 | "input-groups.less",
390 | "tooltip.less",
391 | "popovers.less"
392 | ],
393 | "js": [
394 | "tooltip.js",
395 | "popover.js"
396 | ],
397 | "customizerUrl": "http://getbootstrap.com/customize/?id=8dcb476a93d73c3de3564069682bb493"
398 | }
--------------------------------------------------------------------------------
/src/static/bootstrap/js/bootstrap.min.js:
--------------------------------------------------------------------------------
1 | /*!
2 | * Bootstrap v3.3.7 (http://getbootstrap.com)
3 | * Copyright 2011-2016 Twitter, Inc.
4 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
5 | */
6 |
7 | /*!
8 | * Generated using the Bootstrap Customizer (http://getbootstrap.com/customize/?id=8dcb476a93d73c3de3564069682bb493)
9 | * Config saved to config.json and https://gist.github.com/8dcb476a93d73c3de3564069682bb493
10 | */
11 | if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(t){"use strict";var e=t.fn.jquery.split(" ")[0].split(".");if(e[0]<2&&e[1]<9||1==e[0]&&9==e[1]&&e[2]<1||e[0]>3)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var i=t(this),n=i.data("bs.tooltip"),s="object"==typeof e&&e;!n&&/destroy|hide/.test(e)||(n||i.data("bs.tooltip",n=new o(this,s)),"string"==typeof e&&n[e]())})}var o=function(t,e){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",t,e)};o.VERSION="3.3.7",o.TRANSITION_DURATION=150,o.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'
',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},o.prototype.init=function(e,o,i){if(this.enabled=!0,this.type=e,this.$element=t(o),this.options=this.getOptions(i),this.$viewport=this.options.viewport&&t(t.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var n=this.options.trigger.split(" "),s=n.length;s--;){var r=n[s];if("click"==r)this.$element.on("click."+this.type,this.options.selector,t.proxy(this.toggle,this));else if("manual"!=r){var p="hover"==r?"mouseenter":"focusin",l="hover"==r?"mouseleave":"focusout";this.$element.on(p+"."+this.type,this.options.selector,t.proxy(this.enter,this)),this.$element.on(l+"."+this.type,this.options.selector,t.proxy(this.leave,this))}}this.options.selector?this._options=t.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},o.prototype.getDefaults=function(){return o.DEFAULTS},o.prototype.getOptions=function(e){return e=t.extend({},this.getDefaults(),this.$element.data(),e),e.delay&&"number"==typeof e.delay&&(e.delay={show:e.delay,hide:e.delay}),e},o.prototype.getDelegateOptions=function(){var e={},o=this.getDefaults();return this._options&&t.each(this._options,function(t,i){o[t]!=i&&(e[t]=i)}),e},o.prototype.enter=function(e){var o=e instanceof this.constructor?e:t(e.currentTarget).data("bs."+this.type);return o||(o=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,o)),e instanceof t.Event&&(o.inState["focusin"==e.type?"focus":"hover"]=!0),o.tip().hasClass("in")||"in"==o.hoverState?void(o.hoverState="in"):(clearTimeout(o.timeout),o.hoverState="in",o.options.delay&&o.options.delay.show?void(o.timeout=setTimeout(function(){"in"==o.hoverState&&o.show()},o.options.delay.show)):o.show())},o.prototype.isInStateTrue=function(){for(var t in this.inState)if(this.inState[t])return!0;return!1},o.prototype.leave=function(e){var o=e instanceof this.constructor?e:t(e.currentTarget).data("bs."+this.type);return o||(o=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,o)),e instanceof t.Event&&(o.inState["focusout"==e.type?"focus":"hover"]=!1),o.isInStateTrue()?void 0:(clearTimeout(o.timeout),o.hoverState="out",o.options.delay&&o.options.delay.hide?void(o.timeout=setTimeout(function(){"out"==o.hoverState&&o.hide()},o.options.delay.hide)):o.hide())},o.prototype.show=function(){var e=t.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(e);var i=t.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(e.isDefaultPrevented()||!i)return;var n=this,s=this.tip(),r=this.getUID(this.type);this.setContent(),s.attr("id",r),this.$element.attr("aria-describedby",r),this.options.animation&&s.addClass("fade");var p="function"==typeof this.options.placement?this.options.placement.call(this,s[0],this.$element[0]):this.options.placement,l=/\s?auto?\s?/i,a=l.test(p);a&&(p=p.replace(l,"")||"top"),s.detach().css({top:0,left:0,display:"block"}).addClass(p).data("bs."+this.type,this),this.options.container?s.appendTo(this.options.container):s.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var h=this.getPosition(),f=s[0].offsetWidth,c=s[0].offsetHeight;if(a){var u=p,d=this.getPosition(this.$viewport);p="bottom"==p&&h.bottom+c>d.bottom?"top":"top"==p&&h.top-cd.width?"left":"left"==p&&h.left-fr.top+r.height&&(n.top=r.top+r.height-l)}else{var a=e.left-s,h=e.left+s+o;ar.right&&(n.left=r.left+r.width-h)}return n},o.prototype.getTitle=function(){var t,e=this.$element,o=this.options;return t=e.attr("data-original-title")||("function"==typeof o.title?o.title.call(e[0]):o.title)},o.prototype.getUID=function(t){do t+=~~(1e6*Math.random());while(document.getElementById(t));return t},o.prototype.tip=function(){if(!this.$tip&&(this.$tip=t(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},o.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},o.prototype.enable=function(){this.enabled=!0},o.prototype.disable=function(){this.enabled=!1},o.prototype.toggleEnabled=function(){this.enabled=!this.enabled},o.prototype.toggle=function(e){var o=this;e&&(o=t(e.currentTarget).data("bs."+this.type),o||(o=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,o))),e?(o.inState.click=!o.inState.click,o.isInStateTrue()?o.enter(o):o.leave(o)):o.tip().hasClass("in")?o.leave(o):o.enter(o)},o.prototype.destroy=function(){var t=this;clearTimeout(this.timeout),this.hide(function(){t.$element.off("."+t.type).removeData("bs."+t.type),t.$tip&&t.$tip.detach(),t.$tip=null,t.$arrow=null,t.$viewport=null,t.$element=null})};var i=t.fn.tooltip;t.fn.tooltip=e,t.fn.tooltip.Constructor=o,t.fn.tooltip.noConflict=function(){return t.fn.tooltip=i,this}}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var i=t(this),n=i.data("bs.popover"),s="object"==typeof e&&e;!n&&/destroy|hide/.test(e)||(n||i.data("bs.popover",n=new o(this,s)),"string"==typeof e&&n[e]())})}var o=function(t,e){this.init("popover",t,e)};if(!t.fn.tooltip)throw new Error("Popover requires tooltip.js");o.VERSION="3.3.7",o.DEFAULTS=t.extend({},t.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'