├── AnkiServer ├── apps │ ├── __init__.py │ ├── sync_app.py │ └── rest_app.py ├── utils.py ├── find.py ├── __init__.py ├── logpatch.py ├── importer.py ├── collection.py └── threading.py ├── CHANGES.txt ├── .gitmodules ├── .travis.yml ├── .gitignore ├── MANIFEST.in ├── .coveragerc ├── example.ini ├── logging.conf ├── supervisor-anki-server.conf ├── generate-test-locale.sh ├── tests ├── CollectionTestBase.py ├── test_importer.py ├── test_collection.py ├── test_sync_app.py └── test_rest_app.py ├── setup.py ├── ankiserverctl.py ├── README.rst └── LICENSE.txt /AnkiServer/apps/__init__.py: -------------------------------------------------------------------------------- 1 | # package 2 | 3 | -------------------------------------------------------------------------------- /CHANGES.txt: -------------------------------------------------------------------------------- 1 | 2 | V2.0.0, 2013-XX-XX -- Initial release for Anki 2.0. 3 | 4 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "anki-bundled"] 2 | path = anki-bundled 3 | url = https://github.com/dae/anki.git 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "2.7" 4 | before_script: 5 | - ./generate-test-locale.sh anki-bundled/anki 6 | script: 7 | - python setup.py test 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | *.pyc 3 | /.coverage 4 | /AnkiServer.egg-info 5 | /development.ini 6 | /server.log 7 | /collections 8 | /session.db 9 | /auth.db 10 | /dist 11 | /build 12 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.txt *.rst 2 | recursive-include anki-bundled * 3 | recursive-exclude anki-bundled *.pyc 4 | prune anki-bundle/.git 5 | include example.ini logging.conf supervisor-anki-server.conf 6 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | include = 4 | AnkiServer/* 5 | 6 | [report] 7 | exclude_lines = 8 | if __name__ == .__main__.: 9 | def server_runner 10 | def make_app 11 | 12 | -------------------------------------------------------------------------------- /example.ini: -------------------------------------------------------------------------------- 1 | 2 | [server:main] 3 | use = egg:AnkiServer#server 4 | host = 127.0.0.1 5 | port = 27701 6 | 7 | [filter-app:main] 8 | use = egg:Paste#translogger 9 | next = real 10 | 11 | [app:real] 12 | use = egg:Paste#urlmap 13 | / = rest_app 14 | /msync = sync_app 15 | /sync = sync_app 16 | 17 | [app:rest_app] 18 | use = egg:AnkiServer#rest_app 19 | data_root = ./collections 20 | allowed_hosts = 127.0.0.1 21 | ;logging.config_file = logging.conf 22 | 23 | [app:sync_app] 24 | use = egg:AnkiServer#sync_app 25 | data_root = ./collections 26 | base_url = /sync/ 27 | base_media_url = /msync/ 28 | session_db_path = ./session.db 29 | auth_db_path = ./auth.db 30 | 31 | -------------------------------------------------------------------------------- /logging.conf: -------------------------------------------------------------------------------- 1 | 2 | [loggers] 3 | keys=root 4 | 5 | [handlers] 6 | keys=screen,file,email 7 | 8 | [formatters] 9 | keys=normal,email 10 | 11 | [logger_root] 12 | level=INFO 13 | handlers=screen 14 | #handlers=file 15 | #handlers=file,email 16 | 17 | [handler_file] 18 | class=FileHandler 19 | formatter=normal 20 | args=('server.log','a') 21 | 22 | [handler_screen] 23 | class=StreamHandler 24 | level=NOTSET 25 | formatter=normal 26 | args=(sys.stdout,) 27 | 28 | [handler_email] 29 | class=handlers.SMTPHandler 30 | level=ERROR 31 | formatter=email 32 | args=('smtp.example.com', 'support@example.com', ['support_guy1@example.com', 'support_guy2@example.com'], 'AnkiServer error', ('smtp_user', 'smtp_password')) 33 | 34 | [formatter_normal] 35 | format=%(asctime)s:%(name)s:%(levelname)s:%(message)s 36 | datefmt= 37 | 38 | [formatter_email] 39 | format=%(asctime)s - %(name)s - %(levelname)s - %(message)s 40 | datefmt= 41 | 42 | -------------------------------------------------------------------------------- /supervisor-anki-server.conf: -------------------------------------------------------------------------------- 1 | [program:anki-server] 2 | 3 | ; The command used to execute the Anki Server. If you setup a virtualenv like described 4 | ; in the README.md, then be sure to point to the "paster" command inside of it! All files 5 | ; are relative to the "directory" variable given below 6 | command=/usr/local/bin/paster serve production.ini 7 | 8 | ; This is the directory to execute the Anki Server from. All files will be relative to this 9 | ; directory. This includes arguments to the "command" above and in the configuration files. 10 | directory=/var/lib/anki 11 | 12 | ; This is the user the Anki Server will run as. It should have permission to read and write 13 | ; the Anki collections referred to in the configuration file, but, for security reasons it 14 | ; shouldn't be "root"! 15 | user=anki 16 | 17 | autostart=true 18 | autorestart=true 19 | redirect_stderr=true 20 | 21 | ; Sometimes necessary if Anki is complaining about a UTF-8 locale. Make sure 22 | ; that the local you pick is actually installed on your system. 23 | ;environment=LANG=en_US.UTF-8,LC_ALL=en_US.UTF-8,LC_LANG=en_US.UTF-8 24 | 25 | -------------------------------------------------------------------------------- /generate-test-locale.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DEST=$1 4 | 5 | if [ x$DEST = x ]; then 6 | echo "Destination directory where to create local directory must be given as the first argument" 7 | exit 1 8 | fi 9 | 10 | if ! [ -d $DEST ]; then 11 | echo "Destination $DEST does not exist" 12 | exit 1 13 | fi 14 | 15 | if [ -e $DEST/locale ]; then 16 | echo "$DEST/locale already exists" 17 | exit 1 18 | fi 19 | 20 | PL_DEST=$DEST/locale/pl/LC_MESSAGES 21 | mkdir -p $PL_DEST 22 | cat > $PL_DEST/anki.po <=2 && n%10<=4 && (n%100<10 " 29 | "|| n%100>=20) ? 1 : 2;\n" 30 | "Language: pl\n" 31 | 32 | msgid "Again" 33 | msgstr "Znowu" 34 | 35 | msgid "Good" 36 | msgstr "Dobra" 37 | 38 | msgid "Easy" 39 | msgstr "Łatwa" 40 | 41 | msgid "%s minute" 42 | msgid_plural "%s minutes" 43 | msgstr[0] "%s minuta" 44 | msgstr[1] "%s minuty" 45 | msgstr[2] "%s minut" 46 | 47 | msgid "%s day" 48 | msgid_plural "%s days" 49 | msgstr[0] "%s dzień" 50 | msgstr[1] "%s dni" 51 | msgstr[2] "%s dni" 52 | EOL 53 | 54 | (cd $PL_DEST && msgfmt anki.po --output-file anki.mo) 55 | 56 | -------------------------------------------------------------------------------- /AnkiServer/utils.py: -------------------------------------------------------------------------------- 1 | 2 | # AnkiServer - A personal Anki sync server 3 | # Copyright (C) 2013 David Snopek 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Affero General Public License as 7 | # published by the Free Software Foundation, either version 3 of the 8 | # License, or (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU Affero General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Affero General Public License 16 | # along with this program. If not, see . 17 | 18 | def setup_logging(config_file=None): 19 | """Setup logging based on a config_file.""" 20 | 21 | import logging 22 | 23 | if config_file is not None: 24 | # monkey patch the logging.config.SMTPHandler if necessary 25 | import sys 26 | if sys.version_info[0] == 2 and sys.version_info[1] == 5: 27 | import AnkiServer.logpatch 28 | 29 | # load the config file 30 | import logging.config 31 | logging.config.fileConfig(config_file) 32 | else: 33 | logging.getLogger().setLevel(logging.INFO) 34 | 35 | -------------------------------------------------------------------------------- /AnkiServer/find.py: -------------------------------------------------------------------------------- 1 | 2 | # AnkiServer - A personal Anki sync server 3 | # Copyright (C) 2013 David Snopek 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Affero General Public License as 7 | # published by the Free Software Foundation, either version 3 of the 8 | # License, or (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU Affero General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Affero General Public License 16 | # along with this program. If not, see . 17 | 18 | import anki.find 19 | 20 | # TODO: Make a patch against Anki and get a non-hack solution in anki.find.Finder 21 | class Finder(anki.find.Finder): 22 | """A sub-class of anki.find.Finder that hacks in support for limit/offset in findCards().""" 23 | limit = 0 24 | offset = 0 25 | 26 | def _query(self, preds, order): 27 | sql = super(Finder, self)._query(preds, order) 28 | if self.limit: 29 | sql += ' LIMIT ' + str(self.limit) 30 | if self.offset: 31 | sql += ' OFFSET ' + str(self.offset) 32 | return sql 33 | 34 | -------------------------------------------------------------------------------- /tests/CollectionTestBase.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import unittest 4 | import tempfile 5 | import os 6 | from mock import MagicMock 7 | import shutil 8 | 9 | import anki 10 | import anki.storage 11 | 12 | 13 | class CollectionTestBase(unittest.TestCase): 14 | """Parent class for tests that need a collection set up and torn down.""" 15 | 16 | def setUp(self): 17 | self.temp_dir = tempfile.mkdtemp() 18 | self.collection_path = os.path.join(self.temp_dir, 'collection.anki2'); 19 | self.collection = anki.storage.Collection(self.collection_path) 20 | self.mock_app = MagicMock() 21 | 22 | def tearDown(self): 23 | self.collection.close() 24 | self.collection = None 25 | shutil.rmtree(self.temp_dir) 26 | self.mock_app.reset_mock() 27 | 28 | # TODO: refactor into some kind of utility 29 | def add_note(self, data): 30 | from anki.notes import Note 31 | 32 | model = self.collection.models.byName(data['model']) 33 | 34 | note = Note(self.collection, model) 35 | for name, value in data['fields'].items(): 36 | note[name] = value 37 | 38 | if data.has_key('tags'): 39 | note.setTagsFromStr(data['tags']) 40 | 41 | self.collection.addNote(note) 42 | 43 | # TODO: refactor into a parent class 44 | def add_default_note(self, count=1): 45 | data = { 46 | 'model': 'Basic', 47 | 'fields': { 48 | 'Front': 'The front', 49 | 'Back': 'The back', 50 | }, 51 | 'tags': "Tag1 Tag2", 52 | } 53 | for idx in range(0, count): 54 | self.add_note(data) 55 | -------------------------------------------------------------------------------- /AnkiServer/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | # AnkiServer - A personal Anki sync server 3 | # Copyright (C) 2013 David Snopek 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Affero General Public License as 7 | # published by the Free Software Foundation, either version 3 of the 8 | # License, or (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU Affero General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Affero General Public License 16 | # along with this program. If not, see . 17 | 18 | import sys, os.path 19 | # We put the system installed Anki first! 20 | sys.path.insert(0, "/usr/share/anki") 21 | # We'll put our bundled Anki after it 22 | sys.path.insert(1, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'anki-bundled')) 23 | 24 | __author__ = "David Snopek " 25 | __copyright__ = "Copyright (C) 2013 David Snopek" 26 | __license__ = "GNU Affero General Public License v3 or later (AGPLv3+)" 27 | __version__ = "2.0.6" 28 | 29 | __all__ = [] 30 | 31 | def server_runner(app, global_conf, **kw): 32 | """ Special version of paste.httpserver.server_runner which calls 33 | AnkiServer.threading.shutdown() on server exit.""" 34 | 35 | from paste.httpserver import server_runner as paste_server_runner 36 | from AnkiServer.threading import shutdown 37 | try: 38 | paste_server_runner(app, global_conf, **kw) 39 | finally: 40 | shutdown() 41 | 42 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | 2 | from setuptools import setup 3 | 4 | #try: 5 | # from setuptools import setup 6 | #except ImportError: 7 | # from distutils.core import setup 8 | 9 | def get_anki_bundled_files(): 10 | import os 11 | data_files = [] 12 | for root, dirs, files in os.walk('anki-bundled'): 13 | data_files.append((root, [os.path.join(root, f) for f in files])) 14 | return data_files 15 | 16 | setup( 17 | name="AnkiServer", 18 | version="2.0.6", 19 | description="A personal Anki sync server (so you can sync against your own server rather than AnkiWeb)", 20 | long_description=open('README.rst').read(), 21 | license='LICENSE.txt', 22 | author="David Snopek", 23 | author_email="dsnopek@gmail.com", 24 | url="https://github.com/dsnopek/anki-sync-server", 25 | install_requires=[ 26 | "PasteDeploy>=1.3.2", 27 | "PasteScript>=1.7.3", 28 | "WebOb>=0.9.7", 29 | "SQLAlchemy>=0.6.3", 30 | ], 31 | tests_require=[ 32 | 'nose>=1.3.0', 33 | 'mock>=1.0.0,<2.0.0a', 34 | ], 35 | data_files=get_anki_bundled_files()+[ 36 | ('examples', [ 37 | 'example.ini', 38 | 'logging.conf', 39 | 'supervisor-anki-server.conf', 40 | ]), 41 | ], 42 | zip_safe=False, 43 | test_suite='nose.collector', 44 | scripts=['ankiserverctl.py'], 45 | packages=['AnkiServer','AnkiServer.apps'], 46 | classifiers=[ 47 | 'Development Status :: 3 - Alpha', 48 | 'Environment :: Console', 49 | 'Environment :: Web Environment', 50 | 'Intended Audience :: End Users/Desktop', 51 | 'Intended Audience :: Developers', 52 | 'Intended Audience :: System Administrators', 53 | 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)', 54 | 'Operating System :: POSIX', 55 | 'Programming Language :: Python', 56 | 'Programming Language :: Python :: 2', 57 | 'Topic :: Education', 58 | 'Topic :: Education :: Computer Aided Instruction (CAI)', 59 | 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 60 | 'Topic :: Utilities', 61 | ], 62 | entry_points=""" 63 | [paste.app_factory] 64 | sync_app = AnkiServer.apps.sync_app:make_app 65 | rest_app = AnkiServer.apps.rest_app:make_app 66 | 67 | [paste.server_runner] 68 | server = AnkiServer:server_runner 69 | """, 70 | ) 71 | 72 | -------------------------------------------------------------------------------- /tests/test_importer.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | import shutil 4 | import tempfile 5 | import unittest 6 | 7 | import mock 8 | from mock import MagicMock, sentinel 9 | 10 | import AnkiServer 11 | from AnkiServer.importer import get_importer_class, import_file 12 | 13 | import anki.storage 14 | 15 | # TODO: refactor into some kind of utility 16 | def add_note(col, data): 17 | from anki.notes import Note 18 | 19 | model = col.models.byName(data['model']) 20 | 21 | note = Note(col, model) 22 | for name, value in data['fields'].items(): 23 | note[name] = value 24 | 25 | if data.has_key('tags'): 26 | note.setTagsFromStr(data['tags']) 27 | 28 | col.addNote(note) 29 | 30 | class ImporterTest(unittest.TestCase): 31 | def setUp(self): 32 | self.temp_dir = tempfile.mkdtemp() 33 | self.collection_path = os.path.join(self.temp_dir, 'collection.anki2') 34 | self.collection = anki.storage.Collection(self.collection_path) 35 | 36 | def tearDown(self): 37 | self.collection.close() 38 | self.collection = None 39 | shutil.rmtree(self.temp_dir) 40 | 41 | 42 | # TODO: refactor into a parent class 43 | def add_default_note(self, count=1): 44 | data = { 45 | 'model': 'Basic', 46 | 'fields': { 47 | 'Front': 'The front', 48 | 'Back': 'The back', 49 | }, 50 | 'tags': "Tag1 Tag2", 51 | } 52 | for idx in range(0, count): 53 | add_note(self.collection, data) 54 | self.add_note(data) 55 | 56 | def test_resync(self): 57 | from anki.exporting import AnkiPackageExporter 58 | from anki.utils import intTime 59 | 60 | # create a new collection with a single note 61 | src_collection = anki.storage.Collection(os.path.join(self.temp_dir, 'src_collection.anki2')) 62 | add_note(src_collection, { 63 | 'model': 'Basic', 64 | 'fields': { 65 | 'Front': 'The front', 66 | 'Back': 'The back', 67 | }, 68 | 'tags': 'Tag1 Tag2', 69 | }) 70 | note_id = src_collection.findNotes('')[0] 71 | note = src_collection.getNote(note_id) 72 | self.assertEqual(note.id, note_id) 73 | self.assertEqual(note['Front'], 'The front') 74 | self.assertEqual(note['Back'], 'The back') 75 | 76 | # export to an .apkg file 77 | dst1_path = os.path.join(self.temp_dir, 'export1.apkg') 78 | exporter = AnkiPackageExporter(src_collection) 79 | exporter.exportInto(dst1_path) 80 | 81 | # import it into the main collection 82 | import_file(get_importer_class('apkg'), self.collection, dst1_path) 83 | 84 | # make sure the note exists 85 | note = self.collection.getNote(note_id) 86 | self.assertEqual(note.id, note_id) 87 | self.assertEqual(note['Front'], 'The front') 88 | self.assertEqual(note['Back'], 'The back') 89 | 90 | # now we change the source collection and re-export it 91 | note = src_collection.getNote(note_id) 92 | note['Front'] = 'The new front' 93 | note.tags.append('Tag3') 94 | note.flush(intTime()+1) 95 | dst2_path = os.path.join(self.temp_dir, 'export2.apkg') 96 | exporter = AnkiPackageExporter(src_collection) 97 | exporter.exportInto(dst2_path) 98 | 99 | # first, import it without allow_update - no change should happen 100 | import_file(get_importer_class('apkg'), self.collection, dst2_path, allow_update=False) 101 | note = self.collection.getNote(note_id) 102 | self.assertEqual(note['Front'], 'The front') 103 | self.assertEqual(note.tags, ['Tag1', 'Tag2']) 104 | 105 | # now, import it with allow_update=True, so the note should change 106 | import_file(get_importer_class('apkg'), self.collection, dst2_path, allow_update=True) 107 | note = self.collection.getNote(note_id) 108 | self.assertEqual(note['Front'], 'The new front') 109 | self.assertEqual(note.tags, ['Tag1', 'Tag2', 'Tag3']) 110 | 111 | if __name__ == '__main__': 112 | unittest.main() 113 | 114 | -------------------------------------------------------------------------------- /AnkiServer/logpatch.py: -------------------------------------------------------------------------------- 1 | 2 | # AnkiServer - A personal Anki sync server 3 | # Copyright (C) 2013 David Snopek 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Affero General Public License as 7 | # published by the Free Software Foundation, either version 3 of the 8 | # License, or (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU Affero General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Affero General Public License 16 | # along with this program. If not, see . 17 | 18 | import logging 19 | import logging.handlers 20 | import types 21 | 22 | # The SMTPHandler taken from python 2.6 23 | class SMTPHandler(logging.Handler): 24 | """ 25 | A handler class which sends an SMTP email for each logging event. 26 | """ 27 | def __init__(self, mailhost, fromaddr, toaddrs, subject, credentials=None): 28 | """ 29 | Initialize the handler. 30 | 31 | Initialize the instance with the from and to addresses and subject 32 | line of the email. To specify a non-standard SMTP port, use the 33 | (host, port) tuple format for the mailhost argument. To specify 34 | authentication credentials, supply a (username, password) tuple 35 | for the credentials argument. 36 | """ 37 | logging.Handler.__init__(self) 38 | if type(mailhost) == types.TupleType: 39 | self.mailhost, self.mailport = mailhost 40 | else: 41 | self.mailhost, self.mailport = mailhost, None 42 | if type(credentials) == types.TupleType: 43 | self.username, self.password = credentials 44 | else: 45 | self.username = None 46 | self.fromaddr = fromaddr 47 | if type(toaddrs) == types.StringType: 48 | toaddrs = [toaddrs] 49 | self.toaddrs = toaddrs 50 | self.subject = subject 51 | 52 | def getSubject(self, record): 53 | """ 54 | Determine the subject for the email. 55 | 56 | If you want to specify a subject line which is record-dependent, 57 | override this method. 58 | """ 59 | return self.subject 60 | 61 | weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] 62 | 63 | monthname = [None, 64 | 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 65 | 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] 66 | 67 | def date_time(self): 68 | """ 69 | Return the current date and time formatted for a MIME header. 70 | Needed for Python 1.5.2 (no email package available) 71 | """ 72 | year, month, day, hh, mm, ss, wd, y, z = time.gmtime(time.time()) 73 | s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( 74 | self.weekdayname[wd], 75 | day, self.monthname[month], year, 76 | hh, mm, ss) 77 | return s 78 | 79 | def emit(self, record): 80 | """ 81 | Emit a record. 82 | 83 | Format the record and send it to the specified addressees. 84 | """ 85 | try: 86 | import smtplib 87 | try: 88 | from email.utils import formatdate 89 | except ImportError: 90 | formatdate = self.date_time 91 | port = self.mailport 92 | if not port: 93 | port = smtplib.SMTP_PORT 94 | smtp = smtplib.SMTP(self.mailhost, port) 95 | msg = self.format(record) 96 | msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % ( 97 | self.fromaddr, 98 | string.join(self.toaddrs, ","), 99 | self.getSubject(record), 100 | formatdate(), msg) 101 | if self.username: 102 | smtp.login(self.username, self.password) 103 | smtp.sendmail(self.fromaddr, self.toaddrs, msg) 104 | smtp.quit() 105 | except (KeyboardInterrupt, SystemExit): 106 | raise 107 | except: 108 | self.handleError(record) 109 | 110 | # Monkey patch logging.handlers 111 | logging.handlers.SMTPHandler = SMTPHandler 112 | 113 | -------------------------------------------------------------------------------- /AnkiServer/importer.py: -------------------------------------------------------------------------------- 1 | 2 | # AnkiServer - A personal Anki sync server 3 | # Copyright (C) 2013 David Snopek 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Affero General Public License as 7 | # published by the Free Software Foundation, either version 3 of the 8 | # License, or (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU Affero General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Affero General Public License 16 | # along with this program. If not, see . 17 | 18 | from anki.importing.csvfile import TextImporter 19 | from anki.importing.apkg import AnkiPackageImporter 20 | from anki.importing.anki1 import Anki1Importer 21 | from anki.importing.supermemo_xml import SupermemoXmlImporter 22 | from anki.importing.mnemo import MnemosyneImporter 23 | from anki.importing.pauker import PaukerImporter 24 | 25 | from anki import version as anki_version 26 | from distutils.version import StrictVersion 27 | 28 | __all__ = ['get_importer_class', 'import_file'] 29 | 30 | importers = { 31 | 'text': TextImporter, 32 | 'apkg': AnkiPackageImporter, 33 | 'anki1': Anki1Importer, 34 | 'supermemo_xml': SupermemoXmlImporter, 35 | 'mnemosyne': MnemosyneImporter, 36 | 'pauker': PaukerImporter, 37 | } 38 | 39 | def get_importer_class(type): 40 | global importers 41 | return importers.get(type) 42 | 43 | def import_file(importer_class, col, path, allow_update = True): 44 | importer = importer_class(col, path) 45 | importer.allowUpdate = allow_update 46 | 47 | if importer.needMapper: 48 | importer.open() 49 | 50 | importer.run() 51 | 52 | # Monkey patch anki.importing.anki2 to support updating existing notes on Anki 53 | # versions 2.0.12 and earlier. This was added upstream with version 2.0.13. 54 | if StrictVersion(anki_version) < StrictVersion('2.0.13'): 55 | def _importNotes(self): 56 | # build guid -> (id,mod,mid) hash & map of existing note ids 57 | self._notes = {} 58 | existing = {} 59 | for id, guid, mod, mid in self.dst.db.execute( 60 | "select id, guid, mod, mid from notes"): 61 | self._notes[guid] = (id, mod, mid) 62 | existing[id] = True 63 | # we may need to rewrite the guid if the model schemas don't match, 64 | # so we need to keep track of the changes for the card import stage 65 | self._changedGuids = {} 66 | # iterate over source collection 67 | add = [] 68 | dirty = [] 69 | usn = self.dst.usn() 70 | dupes = 0 71 | for note in self.src.db.execute( 72 | "select * from notes"): 73 | # turn the db result into a mutable list 74 | note = list(note) 75 | shouldAdd = self._uniquifyNote(note) 76 | if shouldAdd: 77 | # ensure id is unique 78 | while note[0] in existing: 79 | note[0] += 999 80 | existing[note[0]] = True 81 | # bump usn 82 | note[4] = usn 83 | # update media references in case of dupes 84 | note[6] = self._mungeMedia(note[MID], note[6]) 85 | add.append(note) 86 | dirty.append(note[0]) 87 | # note we have the added the guid 88 | self._notes[note[GUID]] = (note[0], note[3], note[MID]) 89 | else: 90 | dupes += 1 91 | 92 | # update existing note 93 | newer = note[3] > mod 94 | if self.allowUpdate and self._mid(mid) == mid and newer: 95 | localNid = self._notes[note[GUID]][0] 96 | note[0] = localNid 97 | note[4] = usn 98 | add.append(note) 99 | dirty.append(note[0]) 100 | 101 | if dupes: 102 | self.log.append(_("Already in collection: %s.") % (ngettext( 103 | "%d note", "%d notes", dupes) % dupes)) 104 | # add to col 105 | self.dst.db.executemany( 106 | "insert or replace into notes values (?,?,?,?,?,?,?,?,?,?,?)", 107 | add) 108 | self.dst.updateFieldCache(dirty) 109 | self.dst.tags.registerNotes(dirty) 110 | 111 | from anki.importing.anki2 import Anki2Importer, MID, GUID 112 | from anki.lang import _, ngettext 113 | Anki2Importer._importNotes = _importNotes 114 | Anki2Importer.allowUpdate = True 115 | 116 | -------------------------------------------------------------------------------- /AnkiServer/collection.py: -------------------------------------------------------------------------------- 1 | 2 | # AnkiServer - A personal Anki sync server 3 | # Copyright (C) 2013 David Snopek 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Affero General Public License as 7 | # published by the Free Software Foundation, either version 3 of the 8 | # License, or (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU Affero General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Affero General Public License 16 | # along with this program. If not, see . 17 | 18 | import anki 19 | import anki.storage 20 | 21 | import os, errno 22 | 23 | __all__ = ['CollectionWrapper', 'CollectionManager'] 24 | 25 | class CollectionWrapper(object): 26 | """A simple wrapper around an anki.storage.Collection object. 27 | 28 | This allows us to manage and refer to the collection, whether it's open or not. It 29 | also provides a special "continuation passing" interface for executing functions 30 | on the collection, which makes it easy to switch to a threading mode. 31 | 32 | See ThreadingCollectionWrapper for a version that maintains a seperate thread for 33 | interacting with the collection. 34 | """ 35 | 36 | def __init__(self, path, setup_new_collection=None): 37 | self.path = os.path.realpath(path) 38 | self.setup_new_collection = setup_new_collection 39 | self.__col = None 40 | 41 | def __del__(self): 42 | """Close the collection if the user forgot to do so.""" 43 | self.close() 44 | 45 | def execute(self, func, args=[], kw={}, waitForReturn=True): 46 | """ Executes the given function with the underlying anki.storage.Collection 47 | object as the first argument and any additional arguments specified by *args 48 | and **kw. 49 | 50 | If 'waitForReturn' is True, then it will block until the function has 51 | executed and return its return value. If False, the function MAY be 52 | executed some time later and None will be returned. 53 | """ 54 | 55 | # Open the collection and execute the function 56 | self.open() 57 | args = [self.__col] + args 58 | ret = func(*args, **kw) 59 | 60 | # Only return the value if they requested it, so the interface remains 61 | # identical between this class and ThreadingCollectionWrapper 62 | if waitForReturn: 63 | return ret 64 | 65 | def __create_collection(self): 66 | """Creates a new collection and runs any special setup.""" 67 | 68 | # mkdir -p the path, because it might not exist 69 | dirname = os.path.dirname(self.path) 70 | try: 71 | os.makedirs(dirname) 72 | except OSError, exc: 73 | if exc.errno == errno.EEXIST: 74 | pass 75 | else: 76 | raise 77 | 78 | col = anki.storage.Collection(self.path) 79 | 80 | # Do any special setup 81 | if self.setup_new_collection is not None: 82 | self.setup_new_collection(col) 83 | 84 | return col 85 | 86 | def open(self): 87 | """Open the collection, or create it if it doesn't exist.""" 88 | if self.__col is None: 89 | if os.path.exists(self.path): 90 | self.__col = anki.storage.Collection(self.path) 91 | else: 92 | self.__col = self.__create_collection() 93 | 94 | # If for some reason the underlying Collection is closed, then 95 | # we attempt to re-open it! (this probably shouldn't happen, but 96 | # I'm seeing it in production...) 97 | if not self.__col.db: 98 | self.__col.reopen() 99 | 100 | def close(self): 101 | """Close the collection if opened.""" 102 | if not self.opened(): 103 | return 104 | 105 | self.__col.close() 106 | self.__col = None 107 | 108 | def opened(self): 109 | """Returns True if the collection is open, False otherwise.""" 110 | return self.__col is not None 111 | 112 | class CollectionManager(object): 113 | """Manages a set of CollectionWrapper objects.""" 114 | 115 | collection_wrapper = CollectionWrapper 116 | 117 | def __init__(self): 118 | self.collections = {} 119 | 120 | def get_collection(self, path, setup_new_collection=None): 121 | """Gets a CollectionWrapper for the given path.""" 122 | 123 | path = os.path.realpath(path) 124 | 125 | try: 126 | col = self.collections[path] 127 | except KeyError: 128 | col = self.collections[path] = self.collection_wrapper(path, setup_new_collection) 129 | 130 | return col 131 | 132 | def shutdown(self): 133 | """Close all CollectionWrappers managed by this object.""" 134 | for path, col in self.collections.items(): 135 | del self.collections[path] 136 | col.close() 137 | 138 | -------------------------------------------------------------------------------- /ankiserverctl.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import signal 6 | import subprocess 7 | import binascii 8 | import getpass 9 | import hashlib 10 | import sqlite3 11 | 12 | SERVERCONFIG = "production.ini" 13 | AUTHDBPATH = "auth.db" 14 | PIDPATH = "/tmp/ankiserver.pid" 15 | COLLECTIONPATH = "collections/" 16 | 17 | def usage(): 18 | print "usage: "+sys.argv[0]+" []" 19 | print 20 | print "Commands:" 21 | print " start [configfile] - start the server" 22 | print " debug [configfile] - start the server in debug mode" 23 | print " stop - stop the server" 24 | print " adduser - add a new user" 25 | print " deluser - delete a user" 26 | print " lsuser - list users" 27 | print " passwd - change password of a user" 28 | 29 | def startsrv(configpath, debug): 30 | if not configpath: 31 | configpath = SERVERCONFIG 32 | 33 | # We change to the directory containing the config file 34 | # so that all the paths will be relative to it. 35 | configdir = os.path.dirname(configpath) 36 | if configdir != '': 37 | os.chdir(configdir) 38 | configpath = os.path.basename(configpath) 39 | 40 | if debug: 41 | # Start it in the foreground and wait for it to complete. 42 | subprocess.call( ["paster", "serve", configpath], shell=False) 43 | return 44 | 45 | devnull = open(os.devnull, "w") 46 | pid = subprocess.Popen( ["paster", "serve", configpath], 47 | stdout=devnull, 48 | stderr=devnull).pid 49 | 50 | with open(PIDPATH, "w") as pidfile: 51 | pidfile.write(str(pid)) 52 | 53 | def stopsrv(): 54 | if os.path.isfile(PIDPATH): 55 | try: 56 | with open(PIDPATH) as pidfile: 57 | pid = int(pidfile.read()) 58 | 59 | os.kill(pid, signal.SIGKILL) 60 | os.remove(PIDPATH) 61 | except Exception, error: 62 | print >>sys.stderr, sys.argv[0]+": Failed to stop server: "+error.message 63 | else: 64 | print >>sys.stderr, sys.argv[0]+": The server is not running" 65 | 66 | def adduser(username): 67 | if username: 68 | print "Enter password for "+username+": " 69 | 70 | password = getpass.getpass() 71 | salt = binascii.b2a_hex(os.urandom(8)) 72 | hash = hashlib.sha256(username+password+salt).hexdigest()+salt 73 | 74 | conn = sqlite3.connect(AUTHDBPATH) 75 | cursor = conn.cursor() 76 | 77 | cursor.execute( "CREATE TABLE IF NOT EXISTS auth " 78 | "(user VARCHAR PRIMARY KEY, hash VARCHAR)") 79 | 80 | cursor.execute("INSERT INTO auth VALUES (?, ?)", (username, hash)) 81 | 82 | if not os.path.isdir(COLLECTIONPATH+username): 83 | os.makedirs(COLLECTIONPATH+username) 84 | 85 | conn.commit() 86 | conn.close() 87 | else: 88 | usage() 89 | 90 | def deluser(username): 91 | if username and os.path.isfile(AUTHDBPATH): 92 | conn = sqlite3.connect(AUTHDBPATH) 93 | cursor = conn.cursor() 94 | 95 | cursor.execute("DELETE FROM auth WHERE user=?", (username,)) 96 | 97 | conn.commit() 98 | conn.close() 99 | elif not username: 100 | usage() 101 | else: 102 | print >>sys.stderr, sys.argv[0]+": Database file does not exist" 103 | 104 | def lsuser(): 105 | conn = sqlite3.connect(AUTHDBPATH) 106 | cursor = conn.cursor() 107 | 108 | cursor.execute("SELECT user FROM auth") 109 | 110 | row = cursor.fetchone() 111 | 112 | while row is not None: 113 | print row[0] 114 | 115 | row = cursor.fetchone() 116 | 117 | conn.close() 118 | 119 | def passwd(username): 120 | if os.path.isfile(AUTHDBPATH): 121 | print "Enter password for "+username+": " 122 | 123 | password = getpass.getpass() 124 | salt = binascii.b2a_hex(os.urandom(8)) 125 | hash = hashlib.sha256(username+password+salt).hexdigest()+salt 126 | 127 | conn = sqlite3.connect(AUTHDBPATH) 128 | cursor = conn.cursor() 129 | 130 | cursor.execute("UPDATE auth SET hash=? WHERE user=?", (hash, username)) 131 | 132 | conn.commit() 133 | conn.close() 134 | else: 135 | print >>sys.stderr, sys.argv[0]+": Database file does not exist" 136 | 137 | def main(): 138 | argc = len(sys.argv) 139 | exitcode = 0 140 | 141 | if argc < 2: 142 | usage() 143 | exitcode = 1 144 | else: 145 | if argc < 3: 146 | sys.argv.append(None) 147 | 148 | if sys.argv[1] == "start": 149 | startsrv(sys.argv[2], False) 150 | elif sys.argv[1] == "debug": 151 | startsrv(sys.argv[2], True) 152 | elif sys.argv[1] == "stop": 153 | stopsrv() 154 | elif sys.argv[1] == "adduser": 155 | adduser(sys.argv[2]) 156 | elif sys.argv[1] == "deluser": 157 | deluser(sys.argv[2]) 158 | elif sys.argv[1] == "lsuser": 159 | lsuser() 160 | elif sys.argv[1] == "passwd": 161 | passwd(sys.argv[2]) 162 | else: 163 | usage() 164 | exitcode = 1 165 | 166 | sys.exit(exitcode) 167 | 168 | if __name__ == "__main__": 169 | main() 170 | -------------------------------------------------------------------------------- /tests/test_collection.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | import shutil 4 | import tempfile 5 | import unittest 6 | 7 | import mock 8 | from mock import MagicMock, sentinel 9 | 10 | import AnkiServer 11 | from AnkiServer.collection import CollectionWrapper, CollectionManager 12 | 13 | class CollectionWrapperTest(unittest.TestCase): 14 | def setUp(self): 15 | self.temp_dir = tempfile.mkdtemp() 16 | self.collection_path = os.path.join(self.temp_dir, 'collection.anki2'); 17 | 18 | def tearDown(self): 19 | shutil.rmtree(self.temp_dir) 20 | 21 | def test_lifecycle_real(self): 22 | """Testing common life-cycle with existing and non-existant collections. This 23 | test uses the real Anki objects and actually creates a new collection on disk.""" 24 | 25 | wrapper = CollectionWrapper(self.collection_path) 26 | self.assertFalse(os.path.exists(self.collection_path)) 27 | self.assertFalse(wrapper.opened()) 28 | 29 | wrapper.open() 30 | self.assertTrue(os.path.exists(self.collection_path)) 31 | self.assertTrue(wrapper.opened()) 32 | 33 | # calling open twice shouldn't break anything 34 | wrapper.open() 35 | 36 | wrapper.close() 37 | self.assertTrue(os.path.exists(self.collection_path)) 38 | self.assertFalse(wrapper.opened()) 39 | 40 | # open the same collection again (not a creation) 41 | wrapper = CollectionWrapper(self.collection_path) 42 | self.assertFalse(wrapper.opened()) 43 | wrapper.open() 44 | self.assertTrue(wrapper.opened()) 45 | wrapper.close() 46 | self.assertFalse(wrapper.opened()) 47 | self.assertTrue(os.path.exists(self.collection_path)) 48 | 49 | def test_del(self): 50 | with mock.patch('anki.storage.Collection') as anki_storage_Collection: 51 | col = anki_storage_Collection.return_value 52 | wrapper = CollectionWrapper(self.collection_path) 53 | wrapper.open() 54 | wrapper = None 55 | col.close.assert_called_with() 56 | 57 | def test_setup_func(self): 58 | # Run it when the collection doesn't exist 59 | with mock.patch('anki.storage.Collection') as anki_storage_Collection: 60 | col = anki_storage_Collection.return_value 61 | setup_new_collection = MagicMock() 62 | self.assertFalse(os.path.exists(self.collection_path)) 63 | wrapper = CollectionWrapper(self.collection_path, setup_new_collection) 64 | wrapper.open() 65 | anki_storage_Collection.assert_called_with(self.collection_path) 66 | setup_new_collection.assert_called_with(col) 67 | wrapper = None 68 | 69 | # Make sure that no collection was actually created 70 | self.assertFalse(os.path.exists(self.collection_path)) 71 | 72 | # Create a faux collection file 73 | with file(self.collection_path, 'wt') as fd: 74 | fd.write('Collection!') 75 | 76 | # Run it when the collection does exist 77 | with mock.patch('anki.storage.Collection'): 78 | setup_new_collection = lambda col: self.fail("Setup function called when collection already exists!") 79 | self.assertTrue(os.path.exists(self.collection_path)) 80 | wrapper = CollectionWrapper(self.collection_path, setup_new_collection) 81 | wrapper.open() 82 | anki_storage_Collection.assert_called_with(self.collection_path) 83 | wrapper = None 84 | 85 | def test_execute(self): 86 | with mock.patch('anki.storage.Collection') as anki_storage_Collection: 87 | col = anki_storage_Collection.return_value 88 | func = MagicMock() 89 | func.return_value = sentinel.some_object 90 | 91 | # check that execute works and auto-creates the collection 92 | wrapper = CollectionWrapper(self.collection_path) 93 | ret = wrapper.execute(func, [1, 2, 3], {'key': 'aoeu'}) 94 | self.assertEqual(ret, sentinel.some_object) 95 | anki_storage_Collection.assert_called_with(self.collection_path) 96 | func.assert_called_with(col, 1, 2, 3, key='aoeu') 97 | 98 | # check that execute always returns False if waitForReturn=False 99 | func.reset_mock() 100 | ret = wrapper.execute(func, [1, 2, 3], {'key': 'aoeu'}, waitForReturn=False) 101 | self.assertEqual(ret, None) 102 | func.assert_called_with(col, 1, 2, 3, key='aoeu') 103 | 104 | class CollectionManagerTest(unittest.TestCase): 105 | def test_lifecycle(self): 106 | with mock.patch('AnkiServer.collection.CollectionManager.collection_wrapper') as CollectionWrapper: 107 | wrapper = MagicMock() 108 | CollectionWrapper.return_value = wrapper 109 | 110 | manager = CollectionManager() 111 | 112 | # check getting a new collection 113 | ret = manager.get_collection('path1') 114 | CollectionWrapper.assert_called_with(os.path.realpath('path1'), None) 115 | self.assertEqual(ret, wrapper) 116 | 117 | # change the return value, so that it would return a new object 118 | new_wrapper = MagicMock() 119 | CollectionWrapper.return_value = new_wrapper 120 | CollectionWrapper.reset_mock() 121 | 122 | # get the new wrapper 123 | ret = manager.get_collection('path2') 124 | CollectionWrapper.assert_called_with(os.path.realpath('path2'), None) 125 | self.assertEqual(ret, new_wrapper) 126 | 127 | # make sure the wrapper and new_wrapper are different 128 | self.assertNotEqual(wrapper, new_wrapper) 129 | 130 | # assert that calling with the first path again, returns the first wrapper 131 | ret = manager.get_collection('path1') 132 | self.assertEqual(ret, wrapper) 133 | 134 | manager.shutdown() 135 | wrapper.close.assert_called_with() 136 | new_wrapper.close.assert_called_with() 137 | 138 | if __name__ == '__main__': 139 | unittest.main() 140 | 141 | -------------------------------------------------------------------------------- /AnkiServer/threading.py: -------------------------------------------------------------------------------- 1 | 2 | # AnkiServer - A personal Anki sync server 3 | # Copyright (C) 2013 David Snopek 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Affero General Public License as 7 | # published by the Free Software Foundation, either version 3 of the 8 | # License, or (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU Affero General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Affero General Public License 16 | # along with this program. If not, see . 17 | 18 | from __future__ import absolute_import 19 | 20 | import anki 21 | import anki.storage 22 | 23 | from AnkiServer.collection import CollectionWrapper, CollectionManager 24 | 25 | from threading import Thread 26 | from Queue import Queue 27 | 28 | import time, logging 29 | 30 | __all__ = ['ThreadingCollectionWrapper', 'ThreadingCollectionManager'] 31 | 32 | class ThreadingCollectionWrapper(object): 33 | """Provides the same interface as CollectionWrapper, but it creates a new Thread to 34 | interact with the collection.""" 35 | 36 | def __init__(self, path, setup_new_collection=None): 37 | self.path = path 38 | self.wrapper = CollectionWrapper(path, setup_new_collection) 39 | 40 | self._queue = Queue() 41 | self._thread = None 42 | self._running = False 43 | self.last_timestamp = time.time() 44 | 45 | self.start() 46 | 47 | @property 48 | def running(self): 49 | return self._running 50 | 51 | def qempty(self): 52 | return self._queue.empty() 53 | 54 | def current(self): 55 | from threading import current_thread 56 | return current_thread() == self._thread 57 | 58 | def execute(self, func, args=[], kw={}, waitForReturn=True): 59 | """ Executes a given function on this thread with the *args and **kw. 60 | 61 | If 'waitForReturn' is True, then it will block until the function has 62 | executed and return its return value. If False, it will return None 63 | immediately and the function will be executed sometime later. 64 | """ 65 | 66 | if waitForReturn: 67 | return_queue = Queue() 68 | else: 69 | return_queue = None 70 | 71 | self._queue.put((func, args, kw, return_queue)) 72 | 73 | if return_queue is not None: 74 | ret = return_queue.get(True) 75 | if isinstance(ret, Exception): 76 | raise ret 77 | return ret 78 | 79 | def _run(self): 80 | logging.info('CollectionThread[%s]: Starting...', self.path) 81 | 82 | try: 83 | while self._running: 84 | func, args, kw, return_queue = self._queue.get(True) 85 | 86 | if hasattr(func, 'func_name'): 87 | func_name = func.func_name 88 | else: 89 | func_name = func.__class__.__name__ 90 | 91 | logging.info('CollectionThread[%s]: Running %s(*%s, **%s)', self.path, func_name, repr(args), repr(kw)) 92 | self.last_timestamp = time.time() 93 | 94 | try: 95 | ret = self.wrapper.execute(func, args, kw, return_queue) 96 | except Exception, e: 97 | logging.error('CollectionThread[%s]: Unable to %s(*%s, **%s): %s', 98 | self.path, func_name, repr(args), repr(kw), e, exc_info=True) 99 | # we return the Exception which will be raise'd on the other end 100 | ret = e 101 | 102 | if return_queue is not None: 103 | return_queue.put(ret) 104 | except Exception, e: 105 | logging.error('CollectionThread[%s]: Thread crashed! Exception: %s', self.path, e, exc_info=True) 106 | finally: 107 | self.wrapper.close() 108 | # clean out old thread object 109 | self._thread = None 110 | # in case we got here via an exception 111 | self._running = False 112 | 113 | logging.info('CollectionThread[%s]: Stopped!', self.path) 114 | 115 | def start(self): 116 | if not self._running: 117 | self._running = True 118 | assert self._thread is None 119 | self._thread = Thread(target=self._run) 120 | self._thread.start() 121 | 122 | def stop(self): 123 | def _stop(col): 124 | self._running = False 125 | self.execute(_stop, waitForReturn=False) 126 | 127 | def stop_and_wait(self): 128 | """ Tell the thread to stop and wait for it to happen. """ 129 | self.stop() 130 | if self._thread is not None: 131 | self._thread.join() 132 | 133 | # 134 | # Mimic the CollectionWrapper interface 135 | # 136 | 137 | def open(self): 138 | """Non-op. The collection will be opened on demand.""" 139 | pass 140 | 141 | def close(self): 142 | """Closes the underlying collection without stopping the thread.""" 143 | 144 | def _close(col): 145 | self.wrapper.close() 146 | self.execute(_close, waitForReturn=False) 147 | 148 | def opened(self): 149 | return self.wrapper.opened() 150 | 151 | class ThreadingCollectionManager(CollectionManager): 152 | """Manages a set of ThreadingCollectionWrapper objects.""" 153 | 154 | collection_wrapper = ThreadingCollectionWrapper 155 | 156 | def __init__(self): 157 | super(ThreadingCollectionManager, self).__init__() 158 | 159 | self.monitor_frequency = 15 160 | self.monitor_inactivity = 90 161 | 162 | monitor = Thread(target=self._monitor_run) 163 | monitor.daemon = True 164 | monitor.start() 165 | self._monitor_thread = monitor 166 | 167 | # TODO: we should raise some error if a collection is started on a manager that has already been shutdown! 168 | # or maybe we could support being restarted? 169 | 170 | # TODO: it would be awesome to have a safe way to stop inactive threads completely! 171 | # TODO: we need a way to inform other code that the collection has been closed 172 | def _monitor_run(self): 173 | """ Monitors threads for inactivity and closes the collection on them 174 | (leaves the thread itself running -- hopefully waiting peacefully with only a 175 | small memory footprint!) """ 176 | while True: 177 | cur = time.time() 178 | for path, thread in self.collections.items(): 179 | if thread.running and thread.wrapper.opened() and thread.qempty() and cur - thread.last_timestamp >= self.monitor_inactivity: 180 | logging.info('Monitor is closing collection on inactive CollectionThread[%s]', thread.path) 181 | thread.close() 182 | time.sleep(self.monitor_frequency) 183 | 184 | def shutdown(self): 185 | # TODO: stop the monitor thread! 186 | 187 | # stop all the threads 188 | for path, col in self.collections.items(): 189 | del self.collections[path] 190 | col.stop() 191 | 192 | # let the parent do whatever else it might want to do... 193 | super(ThreadingCollectionManager, self).shutdown() 194 | 195 | # 196 | # For working with the global ThreadingCollectionManager: 197 | # 198 | 199 | collection_manager = None 200 | 201 | def getCollectionManager(): 202 | """Return the global ThreadingCollectionManager for this process.""" 203 | global collection_manager 204 | if collection_manager is None: 205 | collection_manager = ThreadingCollectionManager() 206 | return collection_manager 207 | 208 | def shutdown(): 209 | """If the global ThreadingCollectionManager exists, shut it down.""" 210 | global collection_manager 211 | if collection_manager is not None: 212 | collection_manager.shutdown() 213 | collection_manager = None 214 | 215 | -------------------------------------------------------------------------------- /tests/test_sync_app.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import binascii 4 | import hashlib 5 | import os 6 | import sqlite3 7 | import tempfile 8 | import unittest 9 | 10 | from AnkiServer.apps.sync_app import SyncCollectionHandler 11 | from AnkiServer.apps.sync_app import SimpleUserManager 12 | from AnkiServer.apps.sync_app import SqliteUserManager 13 | from AnkiServer.apps.sync_app import SyncUserSession 14 | from AnkiServer.apps.sync_app import SimpleSessionManager 15 | from AnkiServer.apps.sync_app import SqliteSessionManager 16 | from AnkiServer.apps.sync_app import SyncApp 17 | 18 | from CollectionTestBase import CollectionTestBase 19 | 20 | 21 | class SyncCollectionHandlerTest(CollectionTestBase): 22 | 23 | def setUp(self): 24 | CollectionTestBase.setUp(self) 25 | self.syncCollectionHandler = SyncCollectionHandler(self.collection) 26 | 27 | def tearDown(self): 28 | CollectionTestBase.tearDown(self) 29 | self.syncCollectionHandler = None 30 | 31 | def test_meta(self): 32 | version_info = (None, 33 | ','.join(('ankidesktop', '2.0.12', 'lin::')), 34 | ','.join(('ankidesktop', '2.0.32', 'lin::'))) 35 | 36 | meta = self.syncCollectionHandler.meta(version_info[0]) 37 | self.assertEqual(meta[0], self.collection.mod) 38 | self.assertEqual(meta[1], self.collection.scm) 39 | self.assertEqual(meta[2], self.collection._usn) 40 | self.assertTrue((type(meta[3]) == int) and meta[3] > 0) 41 | self.assertEqual(meta[4], self.collection.media.lastUsn()) 42 | 43 | meta = self.syncCollectionHandler.meta(version_info[1]) 44 | self.assertEqual(meta[0], self.collection.mod) 45 | self.assertEqual(meta[1], self.collection.scm) 46 | self.assertEqual(meta[2], self.collection._usn) 47 | self.assertTrue((type(meta[3]) == int) and meta[3] > 0) 48 | self.assertEqual(meta[4], self.collection.media.lastUsn()) 49 | 50 | meta = self.syncCollectionHandler.meta(version_info[2]) 51 | self.assertEqual(meta['scm'], self.collection.scm) 52 | self.assertTrue((type(meta['ts']) == int) and meta['ts'] > 0) 53 | self.assertEqual(meta['mod'], self.collection.mod) 54 | self.assertEqual(meta['usn'], self.collection._usn) 55 | self.assertEqual(meta['musn'], self.collection.media.lastUsn()) 56 | self.assertEqual(meta['msg'], '') 57 | self.assertEqual(meta['cont'], True) 58 | 59 | 60 | class SimpleUserManagerTest(unittest.TestCase): 61 | _good_test_un = 'username' 62 | _good_test_pw = 'password' 63 | 64 | _bad_test_un = 'notAUsername' 65 | _bad_test_pw = 'notAPassword' 66 | 67 | def setUp(self): 68 | self._user_manager = SimpleUserManager() 69 | 70 | def tearDown(self): 71 | self._user_manager = None 72 | 73 | def test_authenticate(self): 74 | self.assertTrue(self._user_manager.authenticate(self._good_test_un, 75 | self._good_test_pw)) 76 | 77 | self.assertTrue(self._user_manager.authenticate(self._bad_test_un, 78 | self._bad_test_pw)) 79 | 80 | self.assertTrue(self._user_manager.authenticate(self._good_test_un, 81 | self._bad_test_pw)) 82 | 83 | self.assertTrue(self._user_manager.authenticate(self._bad_test_un, 84 | self._good_test_pw)) 85 | 86 | def test_username2dirname(self): 87 | dirname = self._user_manager.username2dirname(self._good_test_un) 88 | self.assertEqual(dirname, self._good_test_un) 89 | 90 | 91 | class SqliteUserManagerTest(SimpleUserManagerTest): 92 | file_descriptor, _test_auth_db_path = tempfile.mkstemp(suffix=".db") 93 | os.close(file_descriptor) 94 | os.unlink(_test_auth_db_path) 95 | 96 | def _create_test_auth_db(self, db_path, username, password): 97 | if os.path.exists(db_path): 98 | os.remove(db_path) 99 | 100 | salt = binascii.b2a_hex(os.urandom(8)) 101 | crypto_hash = hashlib.sha256(username+password+salt).hexdigest()+salt 102 | 103 | conn = sqlite3.connect(db_path) 104 | cursor = conn.cursor() 105 | 106 | cursor.execute("""CREATE TABLE IF NOT EXISTS auth 107 | (user VARCHAR PRIMARY KEY, hash VARCHAR)""") 108 | 109 | cursor.execute("INSERT INTO auth VALUES (?, ?)", (username, crypto_hash)) 110 | 111 | conn.commit() 112 | conn.close() 113 | 114 | def setUp(self): 115 | self._create_test_auth_db(self._test_auth_db_path, 116 | self._good_test_un, 117 | self._good_test_pw) 118 | self._user_manager = SqliteUserManager(self._test_auth_db_path) 119 | 120 | def tearDown(self): 121 | if os.path.exists(self._test_auth_db_path): 122 | os.remove(self._test_auth_db_path) 123 | 124 | def test_authenticate(self): 125 | self.assertTrue(self._user_manager.authenticate(self._good_test_un, 126 | self._good_test_pw)) 127 | 128 | self.assertFalse(self._user_manager.authenticate(self._bad_test_un, 129 | self._bad_test_pw)) 130 | 131 | self.assertFalse(self._user_manager.authenticate(self._good_test_un, 132 | self._bad_test_pw)) 133 | 134 | self.assertFalse(self._user_manager.authenticate(self._bad_test_un, 135 | self._good_test_pw)) 136 | 137 | 138 | class SimpleSessionManagerTest(unittest.TestCase): 139 | test_hkey = '1234567890' 140 | test_session = SyncUserSession('testName', 'testPath', None, None) 141 | 142 | def setUp(self): 143 | self.sessionManager = SimpleSessionManager() 144 | 145 | def tearDown(self): 146 | self.sessionManager = None 147 | 148 | def test_save(self): 149 | self.sessionManager.save(self.test_hkey, self.test_session) 150 | self.assertEqual(self.sessionManager.sessions[self.test_hkey].name, 151 | self.test_session.name) 152 | self.assertEqual(self.sessionManager.sessions[self.test_hkey].path, 153 | self.test_session.path) 154 | 155 | def test_delete(self): 156 | self.sessionManager.save(self.test_hkey, self.test_session) 157 | self.assertTrue(self.test_hkey in self.sessionManager.sessions) 158 | 159 | self.sessionManager.delete(self.test_hkey) 160 | 161 | self.assertTrue(self.test_hkey not in self.sessionManager.sessions) 162 | 163 | def test_load(self): 164 | self.sessionManager.save(self.test_hkey, self.test_session) 165 | self.assertTrue(self.test_hkey in self.sessionManager.sessions) 166 | 167 | loaded_session = self.sessionManager.load(self.test_hkey) 168 | self.assertEqual(loaded_session.name, self.test_session.name) 169 | self.assertEqual(loaded_session.path, self.test_session.path) 170 | 171 | 172 | class SqliteSessionManagerTest(SimpleSessionManagerTest): 173 | file_descriptor, _test_sess_db_path = tempfile.mkstemp(suffix=".db") 174 | os.close(file_descriptor) 175 | os.unlink(_test_sess_db_path) 176 | 177 | def setUp(self): 178 | self.sessionManager = SqliteSessionManager(self._test_sess_db_path) 179 | 180 | def tearDown(self): 181 | if os.path.exists(self._test_sess_db_path): 182 | os.remove(self._test_sess_db_path) 183 | 184 | def test_save(self): 185 | SimpleSessionManagerTest.test_save(self) 186 | self.assertTrue(os.path.exists(self._test_sess_db_path)) 187 | 188 | conn = sqlite3.connect(self._test_sess_db_path) 189 | cursor = conn.cursor() 190 | cursor.execute("SELECT user, path FROM session WHERE hkey=?", 191 | (self.test_hkey,)) 192 | res = cursor.fetchone() 193 | conn.close() 194 | 195 | self.assertEqual(res[0], self.test_session.name) 196 | self.assertEqual(res[1], self.test_session.path) 197 | 198 | 199 | class SyncAppTest(unittest.TestCase): 200 | pass 201 | 202 | 203 | if __name__ == '__main__': 204 | unittest.main() 205 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Anki Server 2 | =========== 3 | 4 | **NOTE: Maintenance of this project has moved to a** `new GitHub project `_. 5 | 6 | `Anki `_ is a powerful Open Source flashcard 7 | application, which helps you quickly and easily memorize facts over 8 | the long term utilizing a spaced repetition algorithm. 9 | 10 | Anki's main form is a desktop application (for Windows, Linux and 11 | MacOS) which can sync to a web version (AnkiWeb) and mobile versions 12 | for Android and iOS. 13 | 14 | This is a personal Anki Server, which you can sync against instead of 15 | AnkiWeb. 16 | 17 | It also includes a RESTful API, so that you could implement your 18 | own AnkiWeb-like site if you wanted. 19 | 20 | It was originally developed to support the flashcard functionality on 21 | `Bibliobird `_, a web application for 22 | language learning. 23 | 24 | Installing the easy way! 25 | ------------------------ 26 | 27 | If you have ``easy_install`` or ``pip`` on your system, you can 28 | simply run:: 29 | 30 | $ easy_install AnkiServer 31 | 32 | Or using ``pip``:: 33 | 34 | $ pip install AnkiServer 35 | 36 | This will give you the latest released version! 37 | 38 | However, if you want to try the latest bleeding edge version OR you 39 | want to help with development, you'll need to install from source. 40 | In that case, follow the instructions in the next two sections. 41 | 42 | Setting up a virtualenv 43 | ----------------------- 44 | 45 | If you want to install your Anki Server in an isolated Python 46 | environment using 47 | `virtualenv `_, please 48 | follow these instructions before going on to the next section. If 49 | not, just skip to the "Installing" section below. 50 | 51 | There are many reasons for installing into a virtualenv, rather 52 | than globally on your system: 53 | 54 | 55 | - You can keep the Anki Server's dependencies seperate from other 56 | Python applications. 57 | 58 | - You don't have permission to install globally on your system 59 | (like on a shared host). 60 | 61 | Here are step-by-step instruction for setting up your virtualenv: 62 | 63 | 1. First, you need to install "virtualenv". If your system has 64 | ``easy_install`` or ``pip``, this is just a matter of:: 65 | 66 | $ easy_install virtualenv 67 | 68 | Or using pip:: 69 | 70 | $ pip install virtualenv 71 | 72 | Or you can use your the package manager provided by your OS. 73 | 74 | 2. Next, create your a Python environment for running AnkiServer:: 75 | 76 | $ virtualenv AnkiServer.env 77 | 78 | 3. (Optional) Enter the virtualenv to save you on typing:: 79 | 80 | $ . AnkiServer.env/bin/activate 81 | 82 | 83 | If you skip step 3, you'll have to type 84 | ``AnkiServer.env/bin/python`` instead of ``python`` and 85 | ``AnkiServer.env/bin/paster`` instead of ``paster`` in the following 86 | sections. 87 | 88 | Also, remember that the environment change in step 3 only lasts as 89 | long as your current terminal session. You'll have to re-enter the 90 | environment if you enter that terminal and come back later. 91 | 92 | Installing your Anki Server from source 93 | --------------------------------------- 94 | 95 | 1. Install all the dependencies we need using ``easy_install`` or 96 | ``pip``:: 97 | 98 | $ easy_install webob PasteDeploy PasteScript sqlalchemy simplejson 99 | 100 | Or using pip:: 101 | 102 | $ pip install webob PasteDeploy PasteScript sqlalchemy simplejson 103 | 104 | Or you can use your the package manager provided by your OS. 105 | 106 | 2. Download and install libanki. You can find the latest release of 107 | Anki here: 108 | 109 | http://code.google.com/p/anki/downloads/list 110 | 111 | Look for a \*.tgz file with a Summary of "Anki Source". At the time 112 | of this writing that is anki-2.0.11.tgz. 113 | 114 | Download this file and extract. 115 | 116 | Then either: 117 | 118 | a. Run the 'make install', or 119 | 120 | b. Copy the entire directory to /usr/share/anki 121 | 122 | 3. Make the egg info files (so paster can see our app):: 123 | 124 | $ python setup.py egg_info 125 | 126 | Configuring and running your Anki Server 127 | ---------------------------------------- 128 | 129 | 1. Copy the example.ini to production.ini in your current directory 130 | and edit for your needs. 131 | 132 | a. If you installed from source, it'll be at the top-level. 133 | 134 | b. If you installed via 'easy_install' or 'pip', you'll find all 135 | the example configuration at 136 | ``python_prefix/lib/python2.X/site-packages/AnkiServer-2.X.X-py2.X.egg/examples`` 137 | (replacing ``python_prefix`` with the root of your Python and 138 | all the ``X`` with the correct versions). For example, it could 139 | be:: 140 | 141 | /usr/lib/python2.7/site-packages/AnkiServer-2.0.0a6-py2.7.egg/examples/example.ini 142 | 143 | 3. Create user:: 144 | 145 | $ ./ankiserverctl.py adduser 146 | 147 | 4. Test the server by starting it debug mode:: 148 | 149 | $ ./ankiserverctl.py debug 150 | 151 | If the output looks good, you can stop the server by pressing Ctrl-C and start it again in normal mode:: 152 | 153 | $ ./ankiserverctl.py start 154 | 155 | To stop AnkiServer, run:: 156 | 157 | $ ./ankiserverctl.py stop 158 | 159 | Point the Anki desktop program at it 160 | ------------------------------------ 161 | 162 | Unfortunately, there isn't currently any user interface in the Anki 163 | destop program to point it at your personal sync server instead of 164 | AnkiWeb, so you'll have to write a short "addon". 165 | 166 | Create a file like this in your Anki/addons folder called 167 | "mysyncserver.py":: 168 | 169 | import anki.sync 170 | anki.sync.SYNC_BASE = 'http://127.0.0.1:27701/' 171 | anki.sync.SYNC_MEDIA_BASE = 'http://127.0.0.1:27701/msync/' 172 | 173 | Be sure to change the SYNC_URL to point at your sync server. The 174 | address ``127.0.0.1`` refers to the local computer. 175 | 176 | If you are using TLS, add these lines to the configuration to verify 177 | the certificate against a custom certificate chain:: 178 | 179 | # Path to the certificate chain file, relative to the Anki/addons directory 180 | CERTPATH = 'server.pem' 181 | 182 | # Override TLS certificate path 183 | httpCon_anki = anki.sync.httpCon 184 | def httpCon_patch(): 185 | import os.path 186 | conn = httpCon_anki() 187 | conn.ca_certs = os.path.join(os.path.dirname(__file__), CERTPATH) 188 | return conn 189 | anki.sync.httpCon = httpCon_patch 190 | 191 | The certificate chain must include all intermediate certificates and the 192 | root certificate. For the popular free 193 | `Let's encrypt `_ CA, a sample certificate chain 194 | can be found 195 | `here `_. 196 | 197 | Unfortunately ``python-httplib2`` (used by Anki's sync client for issuing HTTP 198 | requests) does not support `SNI `_ 199 | for telling the web server during the TLS handshake which certificate to use. 200 | This will result in certificate validation errors if your Anki Server instance 201 | runs behind a web server that serves multiple domains using different 202 | certificates. This has `been fixed `_ 203 | in the ``python-httplib2`` source code and will be part of the upcoming 204 | ``0.9.3`` release. In the likely event that you are not using the latest version 205 | yet you will have to install the latest release from source using:: 206 | 207 | sudo pip install -e git+https://github.com/httplib2/httplib2.git#egg=httplib2 208 | 209 | Alternatively you can try adding these lines, to disable certificate validation 210 | entirely:: 211 | 212 | # Override TLS certificate path 213 | httpCon_anki = anki.sync.httpCon 214 | def httpCon_patch(): 215 | conn = httpCon_anki() 216 | conn.disable_ssl_certificate_validation = True 217 | return conn 218 | anki.sync.httpCon = httpCon_patch 219 | 220 | Restart Anki for your plugin to take effect. Now, everytime you sync, 221 | it will be to your personal sync server rather than AnkiWeb. 222 | 223 | However, if you just want to switch temporarily, rather than creating 224 | an addon, you can set the ``SYNC_URL`` environment variable when 225 | running from the command-line (on Linux):: 226 | 227 | export SYNC_URL=http://127.0.0.1:27701/sync/ 228 | ./runanki & 229 | 230 | Point the mobile apps at it 231 | --------------------------- 232 | 233 | As of AnkiDroid 2.6 the sync server can be changed in the settings: 234 | 235 | 1. Open the *Settings* screen from the menu 236 | 2. In the *Advanced* section, tap on *Custom sync server* 237 | 3. Check the *Use custom sync server* box 238 | 4. Change the *Sync URL* and *Media sync URL* to the values described above 239 | 5. The next sync should use the new sync server (if your previous username 240 | or password does not match AnkiDroid will ask you to log in again) 241 | 242 | At the moment, there isn't any way to get the Anki iOS app to point at 243 | your personal sync server. 😕 244 | 245 | Running with Supervisor 246 | ----------------------- 247 | 248 | If you want to run your Anki server persistantly on a Linux (or 249 | other UNIX-y) server, `Supervisor `_ is a 250 | great tool to monitor and manage it. It will allow you to start it 251 | when your server boots, restart it if it crashes and easily access 252 | it's logs. 253 | 254 | 1. Install Supervisor on your system. If it's Debian or Ubuntu this 255 | will work:: 256 | 257 | $ sudo apt-get install supervisor 258 | 259 | If you're using a different OS, please try 260 | `these instructions `_. 261 | 262 | 2. Copy ``supervisor-anki-server.conf`` to ``/etc/supervisor/conf.d/anki-server.conf``:: 263 | 264 | $ sudo cp supervisor-anki-server.conf /etc/supervisor/conf.d/anki-server.conf 265 | 266 | 3. Modify ``/etc/supervisor/conf.d/anki-server.conf`` to match your 267 | system and how you setup your Anki Server in the section above. 268 | 269 | 4. Reload Supervisor's configuration:: 270 | 271 | $ sudo supervisorctl reload 272 | 273 | 5. Check the logs from the Anki Server to make sure everything is 274 | fine:: 275 | 276 | $ sudo supervisorctl tail anki-server 277 | 278 | If it's empty - then everything's fine! Otherwise, you'll see an 279 | error message. 280 | 281 | Later if you manually want to stop, start or restart it, you can 282 | use:: 283 | 284 | $ sudo supervisorctl stop anki-server 285 | 286 | $ sudo supervisorctl start anki-server 287 | 288 | $ sudo supervisorctl restart anki-server 289 | 290 | See the `Supervisor documentation `_ for 291 | more info! 292 | 293 | Using with Apache 294 | ----------------- 295 | 296 | If you're already serving your website via Apache (on port 80) and 297 | want to also allow users to sync against a URL on port 80, you can 298 | forward requests from Apache to the Anki server. 299 | 300 | On Bibliobird.com, I have a special anki.bibliobird.com virtual host 301 | which users can synch against. Here is an excerpt from my Apache 302 | conf:: 303 | 304 | 305 | ServerAdmin support@lingwo.org 306 | ServerName anki.bibliobird.com 307 | 308 | # The Anki server handles gzip itself! 309 | SetEnv no-gzip 1 310 | 311 | 312 | ProxyPass http://localhost:27701/ 313 | ProxyPassReverse http://localhost:27701/ 314 | 315 | 316 | 317 | It may also be possible to use `mod_wsgi 318 | `_, however, I have no experience 319 | with that. 320 | 321 | Using with nginx 322 | ---------------- 323 | 324 | If you happen to use nginx, you can use the following configuration to 325 | proxy requests from nginx to your Anki Server:: 326 | 327 | server { 328 | # Allow access via HTTP 329 | listen 80; 330 | listen [::]:80; 331 | 332 | # Allow access via HTTPS 333 | listen 443 ssl spdy; 334 | listen [::]:443 ssl spdy; 335 | 336 | # Set server names for access 337 | server_name anki.server.name; 338 | 339 | # Set TLS certificates to use for HTTPS access 340 | ssl_certificate /path/to/fullchain.pem; 341 | ssl_certificate_key /path/to/privkey.pem; 342 | 343 | location / { 344 | # Prevent nginx from rejecting larger media files 345 | client_max_body_size 0; 346 | 347 | proxy_pass http://anki:27701; 348 | include proxy_params; 349 | } 350 | } 351 | 352 | AnkiDroid will not verify the TLS certificate, Anki Desktop will by 353 | default reject all but AnkiWeb's certificate, see the 354 | `Anki addon section <#point-the-anki-desktop-program-at-it>`_ for 355 | how to change this. 356 | 357 | How to get help 358 | --------------- 359 | 360 | If you're having any problems installing or using Anki Server, please 361 | create an issue on GitHub (or find an existing issue about your problem): 362 | 363 | https://github.com/dsnopek/anki-sync-server/issues 364 | 365 | Be sure to let us know which operating system and version you're using 366 | and how you intend to use the Anki Server! 367 | 368 | -------------------------------------------------------------------------------- /tests/test_rest_app.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import os 4 | import shutil 5 | import tempfile 6 | import unittest 7 | import logging 8 | import time 9 | from pprint import pprint 10 | 11 | import mock 12 | from mock import MagicMock 13 | 14 | import AnkiServer 15 | from AnkiServer.collection import CollectionManager 16 | from AnkiServer.apps.rest_app import RestApp, RestHandlerRequest, CollectionHandler, ImportExportHandler, NoteHandler, ModelHandler, DeckHandler, CardHandler 17 | 18 | from CollectionTestBase import CollectionTestBase 19 | 20 | from webob.exc import * 21 | 22 | import anki 23 | import anki.storage 24 | 25 | class RestAppTest(unittest.TestCase): 26 | def setUp(self): 27 | self.temp_dir = tempfile.mkdtemp() 28 | self.collection_manager = CollectionManager() 29 | self.rest_app = RestApp(self.temp_dir, collection_manager=self.collection_manager) 30 | 31 | # disable all but critical errors! 32 | logging.disable(logging.CRITICAL) 33 | 34 | def tearDown(self): 35 | self.collection_manager.shutdown() 36 | self.collection_manager = None 37 | self.rest_app = None 38 | shutil.rmtree(self.temp_dir) 39 | 40 | def test_list_collections(self): 41 | os.mkdir(os.path.join(self.temp_dir, 'test1')) 42 | os.mkdir(os.path.join(self.temp_dir, 'test2')) 43 | 44 | with open(os.path.join(self.temp_dir, 'test1', 'collection.anki2'), 'wt') as fd: 45 | fd.write('Testing!') 46 | 47 | self.assertEqual(self.rest_app.list_collections(), ['test1']) 48 | 49 | def test_parsePath(self): 50 | tests = [ 51 | ('collection/user', ('collection', 'index', ['user'])), 52 | ('collection/user/handler', ('collection', 'handler', ['user'])), 53 | ('collection/user/note/123', ('note', 'index', ['user', '123'])), 54 | ('collection/user/note/123/handler', ('note', 'handler', ['user', '123'])), 55 | ('collection/user/deck/name', ('deck', 'index', ['user', 'name'])), 56 | ('collection/user/deck/name/handler', ('deck', 'handler', ['user', 'name'])), 57 | #('collection/user/deck/name/card/123', ('card', 'index', ['user', 'name', '123'])), 58 | #('collection/user/deck/name/card/123/handler', ('card', 'handler', ['user', 'name', '123'])), 59 | ('collection/user/card/123', ('card', 'index', ['user', '123'])), 60 | ('collection/user/card/123/handler', ('card', 'handler', ['user', '123'])), 61 | # the leading slash should make no difference! 62 | ('/collection/user', ('collection', 'index', ['user'])), 63 | ] 64 | 65 | for path, result in tests: 66 | self.assertEqual(self.rest_app._parsePath(path), result) 67 | 68 | def test_parsePath_not_found(self): 69 | tests = [ 70 | 'bad', 71 | 'bad/oaeu', 72 | 'collection', 73 | 'collection/user/handler/bad', 74 | '', 75 | '/', 76 | ] 77 | 78 | for path in tests: 79 | self.assertRaises(HTTPNotFound, self.rest_app._parsePath, path) 80 | 81 | def test_getCollectionPath(self): 82 | def fullpath(collection_id): 83 | return os.path.normpath(os.path.join(self.temp_dir, collection_id, 'collection.anki2')) 84 | 85 | # This is simple and straight forward! 86 | self.assertEqual(self.rest_app._getCollectionPath('user'), fullpath('user')) 87 | 88 | # These are dangerous - the user is trying to hack us! 89 | dangerous = ['../user', '/etc/passwd', '/tmp/aBaBaB', '/root/.ssh/id_rsa'] 90 | for collection_id in dangerous: 91 | self.assertRaises(HTTPBadRequest, self.rest_app._getCollectionPath, collection_id) 92 | 93 | def test_getHandler(self): 94 | def handlerOne(): 95 | pass 96 | 97 | def handlerTwo(): 98 | pass 99 | handlerTwo.hasReturnValue = False 100 | 101 | self.rest_app.add_handler('collection', 'handlerOne', handlerOne) 102 | self.rest_app.add_handler('deck', 'handlerTwo', handlerTwo) 103 | 104 | (handler, hasReturnValue) = self.rest_app._getHandler('collection', 'handlerOne') 105 | self.assertEqual(handler, handlerOne) 106 | self.assertEqual(hasReturnValue, True) 107 | 108 | (handler, hasReturnValue) = self.rest_app._getHandler('deck', 'handlerTwo') 109 | self.assertEqual(handler, handlerTwo) 110 | self.assertEqual(hasReturnValue, False) 111 | 112 | # try some bad handler names and types 113 | self.assertRaises(HTTPNotFound, self.rest_app._getHandler, 'collection', 'nonExistantHandler') 114 | self.assertRaises(HTTPNotFound, self.rest_app._getHandler, 'nonExistantType', 'handlerOne') 115 | 116 | def test_parseRequestBody(self): 117 | req = MagicMock() 118 | req.body = '{"key":"value"}' 119 | 120 | data = self.rest_app._parseRequestBody(req) 121 | self.assertEqual(data, {'key': 'value'}) 122 | self.assertEqual(data.keys(), ['key']) 123 | self.assertEqual(type(data.keys()[0]), str) 124 | 125 | # test some bad data 126 | req.body = '{aaaaaaa}' 127 | self.assertRaises(HTTPBadRequest, self.rest_app._parseRequestBody, req) 128 | 129 | class CollectionHandlerTest(CollectionTestBase): 130 | def setUp(self): 131 | super(CollectionHandlerTest, self).setUp() 132 | self.handler = CollectionHandler() 133 | 134 | def execute(self, name, data): 135 | ids = ['collection_name'] 136 | func = getattr(self.handler, name) 137 | req = RestHandlerRequest(self.mock_app, data, ids, {}) 138 | return func(self.collection, req) 139 | 140 | def test_list_decks(self): 141 | data = {} 142 | ret = self.execute('list_decks', data) 143 | 144 | # It contains only the 'Default' deck 145 | self.assertEqual(len(ret), 1) 146 | self.assertEqual(ret[0]['name'], 'Default') 147 | 148 | def test_select_deck(self): 149 | data = {'deck': 1} 150 | ret = self.execute('select_deck', data) 151 | self.assertEqual(ret, None); 152 | 153 | def test_create_dynamic_deck_simple(self): 154 | self.add_default_note(5) 155 | 156 | data = { 157 | 'name': 'Dyn deck', 158 | 'mode': 'random', 159 | 'count': 2, 160 | 'query': "deck:\"Default\" (tag:'Tag1' or tag:'Tag2') (-tag:'Tag3')", 161 | } 162 | ret = self.execute('create_dynamic_deck', data) 163 | self.assertEqual(ret['name'], 'Dyn deck') 164 | self.assertEqual(ret['dyn'], True) 165 | 166 | cards = self.collection.findCards('deck:"Dyn deck"') 167 | self.assertEqual(len(cards), 2) 168 | 169 | def test_list_models(self): 170 | data = {} 171 | ret = self.execute('list_models', data) 172 | 173 | # get a sorted name list that we can actually check 174 | names = [model['name'] for model in ret] 175 | names.sort() 176 | 177 | # These are the default models created by Anki in a new collection 178 | default_models = [ 179 | 'Basic', 180 | 'Basic (and reversed card)', 181 | 'Basic (optional reversed card)', 182 | 'Cloze' 183 | ] 184 | 185 | self.assertEqual(names, default_models) 186 | 187 | def test_find_model_by_name(self): 188 | data = {'model': 'Basic'} 189 | ret = self.execute('find_model_by_name', data) 190 | self.assertEqual(ret['name'], 'Basic') 191 | 192 | def test_find_notes(self): 193 | ret = self.execute('find_notes', {}) 194 | self.assertEqual(ret, []) 195 | 196 | # add a note programatically 197 | self.add_default_note() 198 | 199 | # get the id for the one note on this collection 200 | note_id = self.collection.findNotes('')[0] 201 | 202 | ret = self.execute('find_notes', {}) 203 | self.assertEqual(ret, [{'id': note_id}]) 204 | 205 | ret = self.execute('find_notes', {'query': 'tag:Tag1'}) 206 | self.assertEqual(ret, [{'id': note_id}]) 207 | 208 | ret = self.execute('find_notes', {'query': 'tag:TagX'}) 209 | self.assertEqual(ret, []) 210 | 211 | ret = self.execute('find_notes', {'preload': True}) 212 | self.assertEqual(len(ret), 1) 213 | self.assertEqual(ret[0]['id'], note_id) 214 | self.assertEqual(ret[0]['model']['name'], 'Basic') 215 | 216 | def test_add_note(self): 217 | # make sure there are no notes (yet) 218 | self.assertEqual(self.collection.findNotes(''), []) 219 | 220 | # add a note programatically 221 | note = { 222 | 'model': 'Basic', 223 | 'fields': { 224 | 'Front': 'The front', 225 | 'Back': 'The back', 226 | }, 227 | 'tags': "Tag1 Tag2", 228 | } 229 | self.execute('add_note', note) 230 | 231 | notes = self.collection.findNotes('') 232 | self.assertEqual(len(notes), 1) 233 | 234 | note_id = notes[0] 235 | note = self.collection.getNote(note_id) 236 | 237 | self.assertEqual(note.model()['name'], 'Basic') 238 | self.assertEqual(note['Front'], 'The front') 239 | self.assertEqual(note['Back'], 'The back') 240 | self.assertEqual(note.tags, ['Tag1', 'Tag2']) 241 | 242 | def test_list_tags(self): 243 | ret = self.execute('list_tags', {}) 244 | self.assertEqual(ret, []) 245 | 246 | self.add_default_note() 247 | 248 | ret = self.execute('list_tags', {}) 249 | ret.sort() 250 | self.assertEqual(ret, ['Tag1', 'Tag2']) 251 | 252 | def test_set_language(self): 253 | import anki.lang 254 | 255 | self.assertEqual(anki.lang._('Again'), 'Again') 256 | 257 | try: 258 | data = {'code': 'pl'} 259 | self.execute('set_language', data) 260 | self.assertEqual(anki.lang._('Again'), u'Znowu') 261 | finally: 262 | # return everything to normal! 263 | anki.lang.setLang('en') 264 | 265 | def test_reset_scheduler(self): 266 | self.add_default_note(3) 267 | 268 | ret = self.execute('reset_scheduler', {'deck': 'Default'}) 269 | self.assertEqual(ret, { 270 | 'new_cards': 3, 271 | 'learning_cards': 0, 272 | 'review_cards': 0, 273 | }) 274 | 275 | def test_next_card(self): 276 | ret = self.execute('next_card', {}) 277 | self.assertEqual(ret, None) 278 | 279 | # add a note programatically 280 | self.add_default_note() 281 | 282 | # get the id for the one card and note on this collection 283 | note_id = self.collection.findNotes('')[0] 284 | card_id = self.collection.findCards('')[0] 285 | 286 | self.collection.sched.reset() 287 | ret = self.execute('next_card', {}) 288 | self.assertEqual(ret['id'], card_id) 289 | self.assertEqual(ret['nid'], note_id) 290 | self.assertEqual(ret['css'], '') 291 | self.assertEqual(ret['question'], 'The front') 292 | self.assertEqual(ret['answer'], 'The front\n\n
\n\nThe back') 293 | self.assertEqual(ret['answer_buttons'], [ 294 | {'ease': 1, 295 | 'label': 'Again', 296 | 'string_label': 'Again', 297 | 'interval': 60, 298 | 'string_interval': '<1 minute'}, 299 | {'ease': 2, 300 | 'label': 'Good', 301 | 'string_label': 'Good', 302 | 'interval': 600, 303 | 'string_interval': '<10 minutes'}, 304 | {'ease': 3, 305 | 'label': 'Easy', 306 | 'string_label': 'Easy', 307 | 'interval': 345600, 308 | 'string_interval': '4 days'}]) 309 | 310 | def test_next_card_translation(self): 311 | # add a note programatically 312 | self.add_default_note() 313 | 314 | # get the card in Polish so we can test translation too 315 | anki.lang.setLang('pl') 316 | try: 317 | ret = self.execute('next_card', {}) 318 | finally: 319 | anki.lang.setLang('en') 320 | 321 | self.assertEqual(ret['answer_buttons'], [ 322 | {'ease': 1, 323 | 'label': 'Again', 324 | 'string_label': u'Znowu', 325 | 'interval': 60, 326 | 'string_interval': '<1 minuta'}, 327 | {'ease': 2, 328 | 'label': 'Good', 329 | 'string_label': u'Dobra', 330 | 'interval': 600, 331 | 'string_interval': '<10 minut'}, 332 | {'ease': 3, 333 | 'label': 'Easy', 334 | 'string_label': u'Łatwa', 335 | 'interval': 345600, 336 | 'string_interval': '4 dni'}]) 337 | 338 | def test_next_card_five_times(self): 339 | self.add_default_note(5) 340 | for idx in range(0, 5): 341 | ret = self.execute('next_card', {}) 342 | self.assertTrue(ret is not None) 343 | 344 | def test_answer_card(self): 345 | import time 346 | 347 | self.add_default_note() 348 | 349 | # instantiate a deck handler to get the card 350 | card = self.execute('next_card', {}) 351 | self.assertEqual(card['reps'], 0) 352 | 353 | self.execute('answer_card', {'id': card['id'], 'ease': 2, 'timerStarted': time.time()}) 354 | 355 | # reset the scheduler and try to get the next card again - there should be none! 356 | self.collection.sched.reset() 357 | card = self.execute('next_card', {}) 358 | self.assertEqual(card['reps'], 1) 359 | 360 | def test_suspend_cards(self): 361 | # add a note programatically 362 | self.add_default_note() 363 | 364 | # get the id for the one card on this collection 365 | card_id = self.collection.findCards('')[0] 366 | 367 | # suspend it 368 | self.execute('suspend_cards', {'ids': [card_id]}) 369 | 370 | # test that getting the next card will be None 371 | card = self.collection.sched.getCard() 372 | self.assertEqual(card, None) 373 | 374 | # unsuspend it 375 | self.execute('unsuspend_cards', {'ids': [card_id]}) 376 | 377 | # test that now we're getting the next card! 378 | self.collection.sched.reset() 379 | card = self.collection.sched.getCard() 380 | self.assertEqual(card.id, card_id) 381 | 382 | def test_cards_recent_ease(self): 383 | self.add_default_note() 384 | card_id = self.collection.findCards('')[0] 385 | 386 | # answer the card 387 | self.collection.reset() 388 | card = self.collection.sched.getCard() 389 | card.startTimer() 390 | # answer multiple times to see that we only get the latest! 391 | self.collection.sched.answerCard(card, 1) 392 | self.collection.sched.answerCard(card, 3) 393 | self.collection.sched.answerCard(card, 2) 394 | 395 | # pull the latest revision 396 | ret = self.execute('cards_recent_ease', {}) 397 | self.assertEqual(ret[0]['id'], card_id) 398 | self.assertEqual(ret[0]['ease'], 2) 399 | 400 | class ImportExportHandlerTest(CollectionTestBase): 401 | export_rows = [ 402 | ['Card front 1', 'Card back 1', 'Tag1 Tag2'], 403 | ['Card front 2', 'Card back 2', 'Tag1 Tag3'], 404 | ] 405 | 406 | def setUp(self): 407 | super(ImportExportHandlerTest, self).setUp() 408 | self.handler = ImportExportHandler() 409 | 410 | def execute(self, name, data): 411 | ids = ['collection_name'] 412 | func = getattr(self.handler, name) 413 | req = RestHandlerRequest(self.mock_app, data, ids, {}) 414 | return func(self.collection, req) 415 | 416 | def generate_text_export(self): 417 | # Create a simple export file 418 | export_data = '' 419 | for row in self.export_rows: 420 | export_data += '\t'.join(row) + '\n' 421 | export_path = os.path.join(self.temp_dir, 'export.txt') 422 | with file(export_path, 'wt') as fd: 423 | fd.write(export_data) 424 | 425 | return (export_data, export_path) 426 | 427 | def check_import(self): 428 | note_ids = self.collection.findNotes('') 429 | notes = [self.collection.getNote(note_id) for note_id in note_ids] 430 | self.assertEqual(len(notes), len(self.export_rows)) 431 | 432 | for index, test_data in enumerate(self.export_rows): 433 | self.assertEqual(notes[index]['Front'], test_data[0]) 434 | self.assertEqual(notes[index]['Back'], test_data[1]) 435 | self.assertEqual(' '.join(notes[index].tags), test_data[2]) 436 | 437 | def test_import_text_data(self): 438 | (export_data, export_path) = self.generate_text_export() 439 | 440 | data = { 441 | 'filetype': 'text', 442 | 'data': export_data, 443 | } 444 | ret = self.execute('import_file', data) 445 | self.check_import() 446 | 447 | def test_import_text_url(self): 448 | (export_data, export_path) = self.generate_text_export() 449 | 450 | data = { 451 | 'filetype': 'text', 452 | 'url': 'file://' + os.path.realpath(export_path), 453 | } 454 | ret = self.execute('import_file', data) 455 | self.check_import() 456 | 457 | class NoteHandlerTest(CollectionTestBase): 458 | def setUp(self): 459 | super(NoteHandlerTest, self).setUp() 460 | self.handler = NoteHandler() 461 | 462 | def execute(self, name, data, note_id): 463 | ids = ['collection_name', note_id] 464 | func = getattr(self.handler, name) 465 | req = RestHandlerRequest(self.mock_app, data, ids, {}) 466 | return func(self.collection, req) 467 | 468 | def test_index(self): 469 | self.add_default_note() 470 | 471 | note_id = self.collection.findNotes('')[0] 472 | 473 | ret = self.execute('index', {}, note_id) 474 | self.assertEqual(ret['id'], note_id) 475 | self.assertEqual(len(ret['fields']), 2) 476 | self.assertEqual(ret['flags'], 0) 477 | self.assertEqual(ret['model']['name'], 'Basic') 478 | self.assertEqual(ret['tags'], ['Tag1', 'Tag2']) 479 | self.assertEqual(ret['string_tags'], 'Tag1 Tag2') 480 | self.assertEqual(ret['usn'], -1) 481 | 482 | def test_update(self): 483 | self.add_default_note() 484 | 485 | note_id = self.collection.findNotes('')[0] 486 | 487 | data = self.execute('index', {}, note_id) 488 | data['fields']['Front'] = 'The new front' 489 | data['fields']['Back'] = 'The new back' 490 | data['tags'] = ['new1', 'new2'] 491 | self.execute('update', data, note_id) 492 | 493 | note = self.collection.getNote(note_id) 494 | self.assertEqual(note['Front'], data['fields']['Front']) 495 | self.assertEqual(note['Back'], data['fields']['Back']) 496 | self.assertEqual(note.tags, data['tags']) 497 | 498 | def test_delete(self): 499 | self.add_default_note() 500 | 501 | note_id = self.collection.findNotes('')[0] 502 | res = self.collection.findNotes('nid:%s' % note_id) 503 | self.assertNotEqual(res, []) 504 | 505 | self.execute('delete', {}, note_id) 506 | 507 | res = self.collection.findNotes('nid:%s' % note_id) 508 | self.assertEqual(res, []) 509 | 510 | def test_add_tags(self): 511 | self.add_default_note() 512 | note_id = self.collection.findNotes('')[0] 513 | note = self.collection.getNote(note_id) 514 | old_mod = note.mod 515 | self.assertFalse('NT1' in note.tags) 516 | self.assertFalse('NT2' in note.tags) 517 | 518 | time.sleep(1) 519 | self.execute('add_tags', {'tags': ['NT1', 'NT2']}, note_id) 520 | note = self.collection.getNote(note_id) 521 | self.assertTrue('NT1' in note.tags) 522 | self.assertTrue('NT2' in note.tags) 523 | self.assertTrue(note.mod > old_mod) 524 | 525 | def test_add_tags_no_mod_update(self): 526 | self.add_default_note() 527 | note_id = self.collection.findNotes('')[0] 528 | note = self.collection.getNote(note_id) 529 | old_mod = note.mod 530 | self.assertFalse('NT1' in note.tags) 531 | self.assertFalse('NT2' in note.tags) 532 | 533 | time.sleep(1) 534 | self.execute('add_tags', {'tags': ['NT1', 'NT2'], 'update_mod': False}, note_id) 535 | note = self.collection.getNote(note_id) 536 | self.assertTrue('NT1' in note.tags) 537 | self.assertTrue('NT2' in note.tags) 538 | self.assertEqual(note.mod, old_mod) 539 | 540 | def test_remove_tags(self): 541 | self.add_default_note() 542 | note_id = self.collection.findNotes('')[0] 543 | note = self.collection.getNote(note_id) 544 | old_mod = note.mod 545 | self.assertTrue('Tag1' in note.tags) 546 | self.assertTrue('Tag2' in note.tags) 547 | 548 | time.sleep(1) 549 | self.execute('remove_tags', {'tags': ['Tag1', 'Tag2']}, note_id) 550 | note = self.collection.getNote(note_id) 551 | self.assertFalse('Tag1' in note.tags) 552 | self.assertFalse('Tag2' in note.tags) 553 | self.assertTrue(note.mod > old_mod) 554 | 555 | def test_remove_tags_no_mod_update(self): 556 | self.add_default_note() 557 | note_id = self.collection.findNotes('')[0] 558 | note = self.collection.getNote(note_id) 559 | old_mod = note.mod 560 | self.assertTrue('Tag1' in note.tags) 561 | self.assertTrue('Tag2' in note.tags) 562 | 563 | time.sleep(1) 564 | self.execute('remove_tags', {'tags': ['Tag1', 'Tag2'], 'update_mod': False}, note_id) 565 | note = self.collection.getNote(note_id) 566 | self.assertFalse('Tag1' in note.tags) 567 | self.assertFalse('Tag2' in note.tags) 568 | self.assertEqual(note.mod, old_mod) 569 | 570 | class DeckHandlerTest(CollectionTestBase): 571 | def setUp(self): 572 | super(DeckHandlerTest, self).setUp() 573 | self.handler = DeckHandler() 574 | 575 | def execute(self, name, data): 576 | ids = ['collection_name', '1'] 577 | func = getattr(self.handler, name) 578 | req = RestHandlerRequest(self.mock_app, data, ids, {}) 579 | return func(self.collection, req) 580 | 581 | def test_index(self): 582 | ret = self.execute('index', {}) 583 | #pprint(ret) 584 | self.assertEqual(ret['name'], 'Default') 585 | self.assertEqual(ret['id'], 1) 586 | self.assertEqual(ret['dyn'], False) 587 | 588 | def test_next_card(self): 589 | self.mock_app.execute_handler.return_value = None 590 | 591 | ret = self.execute('next_card', {}) 592 | self.assertEqual(ret, None) 593 | self.mock_app.execute_handler.assert_called_with('collection', 'next_card', self.collection, RestHandlerRequest(self.mock_app, {'deck': '1'}, ['collection_name'], {})) 594 | 595 | def test_get_conf(self): 596 | ret = self.execute('get_conf', {}) 597 | #pprint(ret) 598 | self.assertEqual(ret['name'], 'Default') 599 | self.assertEqual(ret['id'], 1) 600 | self.assertEqual(ret['dyn'], False) 601 | 602 | class CardHandlerTest(CollectionTestBase): 603 | def setUp(self): 604 | super(CardHandlerTest, self).setUp() 605 | self.handler = CardHandler() 606 | 607 | def execute(self, name, data, card_id): 608 | ids = ['collection_name', card_id] 609 | func = getattr(self.handler, name) 610 | req = RestHandlerRequest(self.mock_app, data, ids, {}) 611 | return func(self.collection, req) 612 | 613 | def test_index_simple(self): 614 | self.add_default_note() 615 | 616 | note_id = self.collection.findNotes('')[0] 617 | card_id = self.collection.findCards('')[0] 618 | 619 | ret = self.execute('index', {}, card_id) 620 | self.assertEqual(ret['id'], card_id) 621 | self.assertEqual(ret['nid'], note_id) 622 | self.assertEqual(ret['did'], 1) 623 | self.assertFalse(ret.has_key('note')) 624 | self.assertFalse(ret.has_key('deck')) 625 | 626 | def test_index_load(self): 627 | self.add_default_note() 628 | 629 | note_id = self.collection.findNotes('')[0] 630 | card_id = self.collection.findCards('')[0] 631 | 632 | ret = self.execute('index', {'load_note': 1, 'load_deck': 1}, card_id) 633 | self.assertEqual(ret['id'], card_id) 634 | self.assertEqual(ret['nid'], note_id) 635 | self.assertEqual(ret['did'], 1) 636 | self.assertEqual(ret['note']['id'], note_id) 637 | self.assertEqual(ret['note']['model']['name'], 'Basic') 638 | self.assertEqual(ret['deck']['name'], 'Default') 639 | 640 | if __name__ == '__main__': 641 | unittest.main() 642 | 643 | -------------------------------------------------------------------------------- /AnkiServer/apps/sync_app.py: -------------------------------------------------------------------------------- 1 | 2 | # AnkiServer - A personal Anki sync server 3 | # Copyright (C) 2013 David Snopek 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Affero General Public License as 7 | # published by the Free Software Foundation, either version 3 of the 8 | # License, or (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU Affero General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Affero General Public License 16 | # along with this program. If not, see . 17 | 18 | from webob.dec import wsgify 19 | from webob.exc import * 20 | from webob import Response 21 | 22 | import os 23 | import hashlib 24 | import logging 25 | import random 26 | import string 27 | import unicodedata 28 | import zipfile 29 | 30 | import AnkiServer 31 | 32 | import anki 33 | from anki.db import DB 34 | from anki.sync import Syncer, MediaSyncer 35 | from anki.utils import intTime, checksum, isMac 36 | from anki.consts import SYNC_ZIP_SIZE, SYNC_ZIP_COUNT 37 | 38 | try: 39 | import simplejson as json 40 | except ImportError: 41 | import json 42 | 43 | try: 44 | from cStringIO import StringIO 45 | except ImportError: 46 | from StringIO import StringIO 47 | 48 | try: 49 | from pysqlite2 import dbapi2 as sqlite 50 | except ImportError: 51 | from sqlite3 import dbapi2 as sqlite 52 | 53 | class SyncCollectionHandler(Syncer): 54 | operations = ['meta', 'applyChanges', 'start', 'chunk', 'applyChunk', 'sanityCheck2', 'finish'] 55 | 56 | def __init__(self, col): 57 | # So that 'server' (the 3rd argument) can't get set 58 | Syncer.__init__(self, col) 59 | 60 | def meta(self, cv=None): 61 | # Make sure the media database is open! 62 | if self.col.media.db is None: 63 | self.col.media.connect() 64 | 65 | if cv is not None: 66 | client, version, platform = cv.split(',') 67 | else: 68 | client = 'ankidesktop' 69 | version = '2.0.12' 70 | platform = 'unknown' 71 | 72 | version_int = [ int(str(x).translate(None, string.ascii_letters)) 73 | for x in version.split('.') ] 74 | 75 | # Some insanity added in Anki 2.0.13 76 | if (client == 'ankidroid' and version_int[0] >=2 and version_int[1] >= 3) \ 77 | or (client == 'ankidesktop' and version_int[0] >= 2 and version_int[1] >= 0 and version_int[2] >= 13): 78 | return { 79 | 'scm': self.col.scm, 80 | 'ts': intTime(), 81 | 'mod': self.col.mod, 82 | 'usn': self.col._usn, 83 | 'musn': self.col.media.lastUsn(), 84 | 'msg': '', 85 | 'cont': True, 86 | } 87 | else: 88 | return (self.col.mod, self.col.scm, self.col._usn, intTime(), self.col.media.lastUsn()) 89 | 90 | class SyncMediaHandler(MediaSyncer): 91 | operations = ['begin', 'mediaChanges', 'mediaSanity', 'mediaList', 'uploadChanges', 'downloadFiles'] 92 | 93 | def __init__(self, col): 94 | MediaSyncer.__init__(self, col) 95 | 96 | def begin(self, skey): 97 | return json.dumps({ 98 | 'data':{ 99 | 'sk':skey, 100 | 'usn':self.col.media.lastUsn() 101 | }, 102 | 'err':'' 103 | }) 104 | 105 | def uploadChanges(self, data, skey): 106 | """ 107 | The zip file contains files the client hasn't synced with the server 108 | yet ('dirty'), and info on files it has deleted from its own media dir. 109 | """ 110 | 111 | self._check_zip_data(data) 112 | 113 | processed_count = self._adopt_media_changes_from_zip(data) 114 | 115 | # We increment our lastUsn once for each file we processed. 116 | # (lastUsn - processed_count) must equal the client's lastUsn. 117 | our_last_usn = self.col.media.lastUsn() 118 | self.col.media.setLastUsn(our_last_usn + processed_count) 119 | 120 | return json.dumps( 121 | { 122 | 'data': [processed_count, 123 | self.col.media.lastUsn()], 124 | 'err': '' 125 | } 126 | ) 127 | 128 | @staticmethod 129 | def _check_zip_data(zip_data): 130 | max_zip_size = 100*1024*1024 131 | max_meta_file_size = 100000 132 | 133 | file_buffer = StringIO(zip_data) 134 | zip_file = zipfile.ZipFile(file_buffer, 'r') 135 | 136 | meta_file_size = zip_file.getinfo("_meta").file_size 137 | sum_file_sizes = sum(info.file_size for info in zip_file.infolist()) 138 | 139 | zip_file.close() 140 | file_buffer.close() 141 | 142 | if meta_file_size > max_meta_file_size: 143 | raise ValueError("Zip file's metadata file is larger than %s " 144 | "Bytes." % max_meta_file_size) 145 | elif sum_file_sizes > max_zip_size: 146 | raise ValueError("Zip file contents are larger than %s Bytes." % 147 | max_zip_size) 148 | 149 | def _adopt_media_changes_from_zip(self, zip_data): 150 | """ 151 | Adds and removes files to/from the database and media directory 152 | according to the data in zip file zipData. 153 | """ 154 | 155 | file_buffer = StringIO(zip_data) 156 | zip_file = zipfile.ZipFile(file_buffer, 'r') 157 | 158 | # Get meta info first. 159 | meta = json.loads(zip_file.read("_meta")) 160 | 161 | # Remove media files that were removed on the client. 162 | media_to_remove = [] 163 | for normname, ordinal in meta: 164 | if ordinal == '': 165 | media_to_remove.append(self._normalize_filename(normname)) 166 | 167 | # Add media files that were added on the client. 168 | media_to_add = [] 169 | for i in zip_file.infolist(): 170 | if i.filename == "_meta": # Ignore previously retrieved metadata. 171 | continue 172 | else: 173 | file_data = zip_file.read(i) 174 | csum = checksum(file_data) 175 | filename = self._normalize_filename(meta[int(i.filename)][0]) 176 | file_path = os.path.join(self.col.media.dir(), filename) 177 | 178 | # Save file to media directory. 179 | open(file_path, 'wb').write(file_data) 180 | mtime = self.col.media._mtime(file_path) 181 | 182 | media_to_add.append((filename, csum, mtime, 0)) 183 | 184 | # We count all files we are to remove, even if we don't have them in 185 | # our media directory and our db doesn't know about them. 186 | processed_count = len(media_to_remove) + len(media_to_add) 187 | 188 | assert len(meta) == processed_count # sanity check 189 | 190 | if media_to_remove: 191 | self._remove_media_files(media_to_remove) 192 | 193 | if media_to_add: 194 | self.col.media.db.executemany( 195 | "INSERT OR REPLACE INTO media VALUES (?,?,?,?)", media_to_add) 196 | 197 | return processed_count 198 | 199 | @staticmethod 200 | def _normalize_filename(filename): 201 | """ 202 | Performs unicode normalization for file names. Logic taken from Anki's 203 | MediaManager.addFilesFromZip(). 204 | """ 205 | 206 | if not isinstance(filename, unicode): 207 | filename = unicode(filename, "utf8") 208 | 209 | # Normalize name for platform. 210 | if isMac: # global 211 | filename = unicodedata.normalize("NFD", filename) 212 | else: 213 | filename = unicodedata.normalize("NFC", filename) 214 | 215 | return filename 216 | 217 | def _remove_media_files(self, filenames): 218 | """ 219 | Marks all files in list filenames as deleted and removes them from the 220 | media directory. 221 | """ 222 | 223 | # Mark the files as deleted in our db. 224 | self.col.media.db.executemany("UPDATE media " + 225 | "SET csum = NULL " + 226 | " WHERE fname = ?", 227 | [(f, ) for f in filenames]) 228 | 229 | # Remove the files from our media directory if it is present. 230 | logging.debug('Removing %d files from media dir.' % len(filenames)) 231 | for filename in filenames: 232 | try: 233 | os.remove(os.path.join(self.col.media.dir(), filename)) 234 | except OSError as err: 235 | logging.error("Error when removing file '%s' from media dir: " 236 | "%s" % (filename, str(err))) 237 | 238 | def downloadFiles(self, files): 239 | import zipfile 240 | 241 | flist = {} 242 | cnt = 0 243 | sz = 0 244 | f = StringIO() 245 | z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED) 246 | 247 | for fname in files: 248 | z.write(os.path.join(self.col.media.dir(), fname), str(cnt)) 249 | flist[str(cnt)] = fname 250 | sz += os.path.getsize(os.path.join(self.col.media.dir(), fname)) 251 | if sz > SYNC_ZIP_SIZE or cnt > SYNC_ZIP_COUNT: 252 | break 253 | cnt += 1 254 | 255 | z.writestr("_meta", json.dumps(flist)) 256 | z.close() 257 | 258 | return f.getvalue() 259 | 260 | def mediaChanges(self, lastUsn, skey): 261 | result = [] 262 | usn = self.col.media.lastUsn() 263 | fname = csum = None 264 | 265 | if lastUsn < usn or lastUsn == 0: 266 | for fname,mtime,csum, in self.col.media.db.execute("select fname,mtime,csum from media"): 267 | result.append([fname, usn, csum]) 268 | 269 | return json.dumps({'data':result, 'err':''}) 270 | 271 | def mediaSanity(self, local=None): 272 | if self.col.media.mediaCount() == local: 273 | result = "OK" 274 | else: 275 | result = "FAILED" 276 | 277 | return json.dumps({'data':result, 'err':''}) 278 | 279 | class SyncUserSession(object): 280 | def __init__(self, name, path, collection_manager, setup_new_collection=None): 281 | import time 282 | self.skey = self._generate_session_key() 283 | self.name = name 284 | self.path = path 285 | self.collection_manager = collection_manager 286 | self.setup_new_collection = setup_new_collection 287 | self.version = 0 288 | self.client_version = '' 289 | self.created = time.time() 290 | 291 | # make sure the user path exists 292 | if not os.path.exists(path): 293 | os.mkdir(path) 294 | 295 | self.collection_handler = None 296 | self.media_handler = None 297 | 298 | def _generate_session_key(self): 299 | return checksum(str(random.random()))[:8] 300 | 301 | def get_collection_path(self): 302 | return os.path.realpath(os.path.join(self.path, 'collection.anki2')) 303 | 304 | def get_thread(self): 305 | return self.collection_manager.get_collection(self.get_collection_path(), self.setup_new_collection) 306 | 307 | def get_handler_for_operation(self, operation, col): 308 | if operation in SyncCollectionHandler.operations: 309 | cache_name, handler_class = 'collection_handler', SyncCollectionHandler 310 | else: 311 | cache_name, handler_class = 'media_handler', SyncMediaHandler 312 | 313 | if getattr(self, cache_name) is None: 314 | setattr(self, cache_name, handler_class(col)) 315 | handler = getattr(self, cache_name) 316 | # The col object may actually be new now! This happens when we close a collection 317 | # for inactivity and then later re-open it (creating a new Collection object). 318 | handler.col = col 319 | return handler 320 | 321 | class SimpleSessionManager(object): 322 | """A simple session manager that keeps the sessions in memory.""" 323 | 324 | def __init__(self): 325 | self.sessions = {} 326 | 327 | def load(self, hkey, session_factory=None): 328 | return self.sessions.get(hkey) 329 | 330 | def load_from_skey(self, skey, session_factory=None): 331 | for i in self.sessions: 332 | if self.sessions[i].skey == skey: 333 | return self.sessions[i] 334 | 335 | def save(self, hkey, session): 336 | self.sessions[hkey] = session 337 | 338 | def delete(self, hkey): 339 | del self.sessions[hkey] 340 | 341 | class SimpleUserManager(object): 342 | """A simple user manager that always allows any user.""" 343 | 344 | def authenticate(self, username, password): 345 | """ 346 | Returns True if this username is allowed to connect with this password. False otherwise. 347 | Override this to change how users are authenticated. 348 | """ 349 | 350 | return True 351 | 352 | def username2dirname(self, username): 353 | """ 354 | Returns the directory name for the given user. By default, this is just the username. 355 | Override this to adjust the mapping between users and their directory. 356 | """ 357 | 358 | return username 359 | 360 | class SyncApp(object): 361 | valid_urls = SyncCollectionHandler.operations + SyncMediaHandler.operations + ['hostKey', 'upload', 'download', 'getDecks'] 362 | 363 | def __init__(self, **kw): 364 | from AnkiServer.threading import getCollectionManager 365 | 366 | self.data_root = os.path.abspath(kw.get('data_root', '.')) 367 | self.base_url = kw.get('base_url', '/') 368 | self.base_media_url = kw.get('base_media_url', '/') 369 | self.setup_new_collection = kw.get('setup_new_collection') 370 | self.hook_pre_sync = kw.get('hook_pre_sync') 371 | self.hook_post_sync = kw.get('hook_post_sync') 372 | self.hook_download = kw.get('hook_download') 373 | self.hook_upload = kw.get('hook_upload') 374 | 375 | try: 376 | self.session_manager = kw['session_manager'] 377 | except KeyError: 378 | self.session_manager = SimpleSessionManager() 379 | 380 | try: 381 | self.user_manager = kw['user_manager'] 382 | except KeyError: 383 | self.user_manager = SimpleUserManager() 384 | 385 | try: 386 | self.collection_manager = kw['collection_manager'] 387 | except KeyError: 388 | self.collection_manager = getCollectionManager() 389 | 390 | # make sure the base_url has a trailing slash 391 | if not self.base_url.endswith('/'): 392 | self.base_url += '/' 393 | if not self.base_media_url.endswith('/'): 394 | self.base_media_url += '/' 395 | 396 | def generateHostKey(self, username): 397 | """Generates a new host key to be used by the given username to identify their session. 398 | This values is random.""" 399 | 400 | import hashlib, time, random, string 401 | chars = string.ascii_letters + string.digits 402 | val = ':'.join([username, str(int(time.time())), ''.join(random.choice(chars) for x in range(8))]) 403 | return hashlib.md5(val).hexdigest() 404 | 405 | def create_session(self, username, user_path): 406 | return SyncUserSession(username, 407 | user_path, 408 | self.collection_manager, 409 | self.setup_new_collection) 410 | 411 | def _create_session_for_user(self, username): 412 | """ 413 | Creates a session object for the user and creates a hkey by which we 414 | can retrieve it on later requests by that user during the same sync 415 | session. 416 | Returns the hkey. 417 | """ 418 | 419 | dirname = self.user_manager.username2dirname(username) 420 | if dirname is None: 421 | raise HTTPForbidden() 422 | 423 | hkey = self.generateHostKey(username) 424 | logging.debug("generated session key '%s' for user '%s'" 425 | % (hkey, username)) 426 | 427 | user_path = os.path.join(self.data_root, dirname) 428 | 429 | session = self.create_session(username, user_path) 430 | 431 | self.session_manager.save(hkey, session) 432 | 433 | return hkey 434 | 435 | def _decode_data(self, data, compression=0): 436 | import gzip 437 | 438 | if compression: 439 | buf = gzip.GzipFile(mode="rb", fileobj=StringIO(data)) 440 | data = buf.read() 441 | buf.close() 442 | 443 | # really lame check for JSON 444 | if data[0] == '{' and data[-1] == '}': 445 | data = json.loads(data) 446 | else: 447 | data = {'data': data} 448 | 449 | return data 450 | 451 | def operation_upload(self, col, data, session): 452 | # Verify integrity of the received database file before replacing our 453 | # existing db. 454 | temp_db_path = session.get_collection_path() + ".tmp" 455 | with open(temp_db_path, 'wb') as f: 456 | f.write(data) 457 | 458 | try: 459 | test_db = DB(temp_db_path) 460 | if test_db.scalar("pragma integrity_check") != "ok": 461 | raise HTTPBadRequest("Integrity check failed for uploaded " 462 | "collection database file.") 463 | test_db.close() 464 | except sqlite.Error as e: 465 | raise HTTPBadRequest("Uploaded collection database file is " 466 | "corrupt.") 467 | 468 | # Overwrite existing db. 469 | col.close() 470 | try: 471 | os.rename(temp_db_path, session.get_collection_path()) 472 | finally: 473 | col.reopen() 474 | 475 | # If everything went fine, run hook_upload if one is defined. 476 | if self.hook_upload is not None: 477 | self.hook_upload(col, session) 478 | 479 | return True 480 | 481 | def operation_download(self, col, session): 482 | # run hook_download if one is defined 483 | if self.hook_download is not None: 484 | self.hook_download(col, session) 485 | 486 | col.close() 487 | try: 488 | data = open(session.get_collection_path(), 'rb').read() 489 | finally: 490 | col.reopen() 491 | return data 492 | 493 | @wsgify 494 | def __call__(self, req): 495 | # Get and verify the session 496 | try: 497 | hkey = req.POST['k'] 498 | except KeyError: 499 | hkey = None 500 | 501 | session = self.session_manager.load(hkey, self.create_session) 502 | 503 | if session is None: 504 | try: 505 | skey = req.POST['sk'] 506 | session = self.session_manager.load_from_skey(skey, self.create_session) 507 | except KeyError: 508 | skey = None 509 | 510 | try: 511 | compression = int(req.POST['c']) 512 | except KeyError: 513 | compression = 0 514 | 515 | try: 516 | data = req.POST['data'].file.read() 517 | data = self._decode_data(data, compression) 518 | except KeyError: 519 | data = {} 520 | except ValueError: 521 | # Bad JSON 522 | raise HTTPBadRequest() 523 | 524 | if req.path.startswith(self.base_url): 525 | url = req.path[len(self.base_url):] 526 | if url not in self.valid_urls: 527 | raise HTTPNotFound() 528 | 529 | if url == 'getDecks': 530 | # This is an Anki 1.x client! Tell them to upgrade. 531 | import zlib, logging 532 | u = req.params.getone('u') 533 | if u: 534 | logging.warn("'%s' is attempting to sync with an Anki 1.x client" % u) 535 | return Response( 536 | status='200 OK', 537 | content_type='application/json', 538 | content_encoding='deflate', 539 | body=zlib.compress(json.dumps({'status': 'oldVersion'}))) 540 | 541 | if url == 'hostKey': 542 | try: 543 | u = data['u'] 544 | p = data['p'] 545 | except KeyError: 546 | raise HTTPForbidden('Must pass username and password') 547 | if self.user_manager.authenticate(u, p): 548 | hkey = self._create_session_for_user(u) 549 | 550 | result = {'key': hkey} 551 | return Response( 552 | status='200 OK', 553 | content_type='application/json', 554 | body=json.dumps(result)) 555 | else: 556 | # TODO: do I have to pass 'null' for the client to receive None? 557 | raise HTTPForbidden('null') 558 | 559 | if session is None: 560 | raise HTTPForbidden() 561 | 562 | if url in SyncCollectionHandler.operations + SyncMediaHandler.operations: 563 | # 'meta' passes the SYNC_VER but it isn't used in the handler 564 | if url == 'meta': 565 | if session.skey == None and req.POST.has_key('s'): 566 | session.skey = req.POST['s'] 567 | if data.has_key('v'): 568 | session.version = data['v'] 569 | del data['v'] 570 | if data.has_key('cv'): 571 | session.client_version = data['cv'] 572 | self.session_manager.save(hkey, session) 573 | session = self.session_manager.load(hkey, self.create_session) 574 | 575 | thread = session.get_thread() 576 | 577 | # run hook_pre_sync if one is defined 578 | if url == 'start': 579 | if self.hook_pre_sync is not None: 580 | thread.execute(self.hook_pre_sync, [session]) 581 | 582 | result = self._execute_handler_method_in_thread(url, data, session) 583 | 584 | # If it's a complex data type, we convert it to JSON 585 | if type(result) not in (str, unicode): 586 | result = json.dumps(result) 587 | 588 | if url == 'finish': 589 | # TODO: Apparently 'finish' isn't when we're done because 'mediaList' comes 590 | # after it... When can we possibly delete the session? 591 | #self.session_manager.delete(hkey) 592 | 593 | # run hook_post_sync if one is defined 594 | if self.hook_post_sync is not None: 595 | thread.execute(self.hook_post_sync, [session]) 596 | 597 | return Response( 598 | status='200 OK', 599 | content_type='application/json', 600 | body=result) 601 | 602 | elif url == 'upload': 603 | thread = session.get_thread() 604 | result = thread.execute(self.operation_upload, [data['data'], session]) 605 | return Response( 606 | status='200 OK', 607 | content_type='text/plain', 608 | body='OK' if result else 'Error') 609 | 610 | elif url == 'download': 611 | thread = session.get_thread() 612 | result = thread.execute(self.operation_download, [session]) 613 | return Response( 614 | status='200 OK', 615 | content_type='text/plain', 616 | body=result) 617 | 618 | # This was one of our operations but it didn't get handled... Oops! 619 | raise HTTPInternalServerError() 620 | 621 | # media sync 622 | elif req.path.startswith(self.base_media_url): 623 | if session is None: 624 | raise HTTPForbidden() 625 | 626 | url = req.path[len(self.base_media_url):] 627 | 628 | if url not in self.valid_urls: 629 | raise HTTPNotFound() 630 | 631 | if url == 'begin' or url == 'mediaChanges' or url == 'uploadChanges': 632 | data['skey'] = session.skey 633 | 634 | return self._execute_handler_method_in_thread(url, data, session) 635 | 636 | return Response(status='200 OK', content_type='text/plain', body='Anki Sync Server') 637 | 638 | @staticmethod 639 | def _execute_handler_method_in_thread(method_name, keyword_args, session): 640 | """ 641 | Gets and runs the handler method specified by method_name inside the 642 | thread for session. The handler method will access the collection as 643 | self.col. 644 | """ 645 | 646 | def run_func(col): 647 | # Retrieve the correct handler method. 648 | handler = session.get_handler_for_operation(method_name, col) 649 | handler_method = getattr(handler, method_name) 650 | 651 | res = handler_method(**keyword_args) 652 | 653 | col.save() 654 | return res 655 | 656 | run_func.func_name = method_name # More useful debugging messages. 657 | 658 | # Send the closure to the thread for execution. 659 | thread = session.get_thread() 660 | result = thread.execute(run_func) 661 | 662 | return result 663 | 664 | 665 | class SqliteSessionManager(SimpleSessionManager): 666 | """Stores sessions in a SQLite database to prevent the user from being logged out 667 | everytime the SyncApp is restarted.""" 668 | 669 | def __init__(self, session_db_path): 670 | SimpleSessionManager.__init__(self) 671 | 672 | self.session_db_path = os.path.abspath(session_db_path) 673 | 674 | def _conn(self): 675 | new = not os.path.exists(self.session_db_path) 676 | conn = sqlite.connect(self.session_db_path) 677 | if new: 678 | cursor = conn.cursor() 679 | cursor.execute("CREATE TABLE session (hkey VARCHAR PRIMARY KEY, skey VARCHAR, user VARCHAR, path VARCHAR)") 680 | return conn 681 | 682 | def load(self, hkey, session_factory=None): 683 | session = SimpleSessionManager.load(self, hkey) 684 | if session is not None: 685 | return session 686 | 687 | conn = self._conn() 688 | cursor = conn.cursor() 689 | 690 | cursor.execute("SELECT skey, user, path FROM session WHERE hkey=?", (hkey,)) 691 | res = cursor.fetchone() 692 | 693 | if res is not None: 694 | session = self.sessions[hkey] = session_factory(res[1], res[2]) 695 | session.skey = res[0] 696 | return session 697 | 698 | def load_from_skey(self, skey, session_factory=None): 699 | session = SimpleSessionManager.load_from_skey(self, skey) 700 | if session is not None: 701 | return session 702 | 703 | conn = self._conn() 704 | cursor = conn.cursor() 705 | 706 | cursor.execute("SELECT hkey, user, path FROM session WHERE skey=?", (skey,)) 707 | res = cursor.fetchone() 708 | 709 | if res is not None: 710 | session = self.sessions[res[0]] = session_factory(res[1], res[2]) 711 | session.skey = skey 712 | return session 713 | 714 | def save(self, hkey, session): 715 | SimpleSessionManager.save(self, hkey, session) 716 | 717 | conn = self._conn() 718 | cursor = conn.cursor() 719 | 720 | cursor.execute("INSERT OR REPLACE INTO session (hkey, skey, user, path) VALUES (?, ?, ?, ?)", 721 | (hkey, session.skey, session.name, session.path)) 722 | conn.commit() 723 | 724 | def delete(self, hkey): 725 | SimpleSessionManager.delete(self, hkey) 726 | 727 | conn = self._conn() 728 | cursor = conn.cursor() 729 | 730 | cursor.execute("DELETE FROM session WHERE hkey=?", (hkey,)) 731 | conn.commit() 732 | 733 | class SqliteUserManager(SimpleUserManager): 734 | """Authenticates users against a SQLite database.""" 735 | 736 | def __init__(self, auth_db_path): 737 | self.auth_db_path = os.path.abspath(auth_db_path) 738 | 739 | def authenticate(self, username, password): 740 | """Returns True if this username is allowed to connect with this password. False otherwise.""" 741 | 742 | conn = sqlite.connect(self.auth_db_path) 743 | cursor = conn.cursor() 744 | param = (username,) 745 | 746 | cursor.execute("SELECT hash FROM auth WHERE user=?", param) 747 | 748 | db_ret = cursor.fetchone() 749 | 750 | if db_ret != None: 751 | db_hash = str(db_ret[0]) 752 | salt = db_hash[-16:] 753 | hashobj = hashlib.sha256() 754 | 755 | hashobj.update(username+password+salt) 756 | 757 | conn.close() 758 | 759 | return (db_ret != None and hashobj.hexdigest()+salt == db_hash) 760 | 761 | # Our entry point 762 | def make_app(global_conf, **local_conf): 763 | if local_conf.has_key('session_db_path'): 764 | local_conf['session_manager'] = SqliteSessionManager(local_conf['session_db_path']) 765 | if local_conf.has_key('auth_db_path'): 766 | local_conf['user_manager'] = SqliteUserManager(local_conf['auth_db_path']) 767 | return SyncApp(**local_conf) 768 | 769 | def main(): 770 | from wsgiref.simple_server import make_server 771 | from AnkiServer.threading import shutdown 772 | 773 | ankiserver = SyncApp() 774 | httpd = make_server('', 8001, ankiserver) 775 | try: 776 | print "Starting..." 777 | httpd.serve_forever() 778 | except KeyboardInterrupt: 779 | print "Exiting ..." 780 | finally: 781 | shutdown() 782 | 783 | if __name__ == '__main__': main() 784 | -------------------------------------------------------------------------------- /AnkiServer/apps/rest_app.py: -------------------------------------------------------------------------------- 1 | 2 | # AnkiServer - A personal Anki sync server 3 | # Copyright (C) 2013 David Snopek 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Affero General Public License as 7 | # published by the Free Software Foundation, either version 3 of the 8 | # License, or (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU Affero General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Affero General Public License 16 | # along with this program. If not, see . 17 | 18 | from webob.dec import wsgify 19 | from webob.exc import * 20 | from webob import Response 21 | 22 | #from pprint import pprint 23 | 24 | try: 25 | import simplejson as json 26 | from simplejson import JSONDecodeError 27 | except ImportError: 28 | import json 29 | JSONDecodeError = ValueError 30 | 31 | import os, logging 32 | 33 | import anki.consts 34 | import anki.lang 35 | from anki.lang import _ as t 36 | from anki.utils import intTime 37 | 38 | import AnkiServer 39 | 40 | __all__ = ['RestApp', 'RestHandlerBase', 'noReturnValue'] 41 | 42 | def noReturnValue(func): 43 | func.hasReturnValue = False 44 | return func 45 | 46 | class RestHandlerBase(object): 47 | """Parent class for a handler group.""" 48 | hasReturnValue = True 49 | 50 | class _RestHandlerWrapper(RestHandlerBase): 51 | """Wrapper for functions that we can't modify.""" 52 | def __init__(self, func_name, func, hasReturnValue=True): 53 | self.func_name = func_name 54 | self.func = func 55 | self.hasReturnValue = hasReturnValue 56 | def __call__(self, *args, **kw): 57 | return self.func(*args, **kw) 58 | 59 | class RestHandlerRequest(object): 60 | def __init__(self, app, data, ids, session): 61 | self.app = app 62 | self.data = data 63 | self.ids = ids 64 | self.session = session 65 | 66 | def copy(self): 67 | return RestHandlerRequest(self.app, self.data.copy(), self.ids[:], self.session) 68 | 69 | def __eq__(self, other): 70 | return self.app == other.app and self.data == other.data and self.ids == other.ids and self.session == other.session 71 | 72 | class RestApp(object): 73 | """A WSGI app that implements RESTful operations on Collections, Decks and Cards.""" 74 | 75 | # Defines not only the valid handler types, but their position in the URL string 76 | handler_types = ['collection', ['model', 'note', 'deck', 'card']] 77 | 78 | def __init__(self, data_root, **kw): 79 | from AnkiServer.threading import getCollectionManager 80 | 81 | self.data_root = os.path.abspath(data_root) 82 | self.allowed_hosts = kw.get('allowed_hosts', '*') 83 | self.setup_new_collection = kw.get('setup_new_collection') 84 | self.hook_pre_execute = kw.get('hook_pre_execute') 85 | self.hook_post_execute = kw.get('hook_post_execute') 86 | 87 | if kw.get('collection_manager') is not None: 88 | self.collection_manager = kw['collection_manager'] 89 | else: 90 | self.collection_manager = getCollectionManager() 91 | 92 | self.handlers = {} 93 | for type_list in self.handler_types: 94 | if type(type_list) is not list: 95 | type_list = [type_list] 96 | for handler_type in type_list: 97 | self.handlers[handler_type] = {} 98 | 99 | if kw.get('use_default_handlers', True): 100 | self.add_handler_group('collection', CollectionHandler()) 101 | self.add_handler_group('note', NoteHandler()) 102 | self.add_handler_group('model', ModelHandler()) 103 | self.add_handler_group('deck', DeckHandler()) 104 | self.add_handler_group('card', CardHandler()) 105 | 106 | # hold per collection session data 107 | self.sessions = {} 108 | 109 | def add_handler(self, type, name, handler): 110 | """Adds a callback handler for a type (collection, deck, card) with a unique name. 111 | 112 | - 'type' is the item that will be worked on, for example: collection, deck, and card. 113 | 114 | - 'name' is a unique name for the handler that gets used in the URL. 115 | 116 | - 'handler' is a callable that takes (collection, data, ids). 117 | """ 118 | 119 | if self.handlers[type].has_key(name): 120 | raise "Handler already for %(type)s/%(name)s exists!" 121 | self.handlers[type][name] = handler 122 | 123 | def add_handler_group(self, type, group): 124 | """Adds several handlers for every public method on an object descended from RestHandlerBase. 125 | 126 | This allows you to create a single class with several methods, so that you can quickly 127 | create a group of related handlers.""" 128 | 129 | import inspect 130 | for name, method in inspect.getmembers(group, predicate=inspect.ismethod): 131 | if not name.startswith('_'): 132 | if hasattr(group, 'hasReturnValue') and not hasattr(method, 'hasReturnValue'): 133 | method = _RestHandlerWrapper(group.__class__.__name__ + '.' + name, method, group.hasReturnValue) 134 | self.add_handler(type, name, method) 135 | 136 | def execute_handler(self, type, name, col, req): 137 | """Executes the handler with the given type and name, passing in the col and req as arguments.""" 138 | 139 | handler, hasReturnValue = self._getHandler(type, name) 140 | ret = handler(col, req) 141 | if hasReturnValue: 142 | return ret 143 | 144 | def list_collections(self): 145 | """Returns an array of valid collection names in our self.data_path.""" 146 | return [x for x in os.listdir(self.data_root) if os.path.exists(os.path.join(self.data_root, x, 'collection.anki2'))] 147 | 148 | def _checkRequest(self, req): 149 | """Raises an exception if the request isn't allowed or valid for some reason.""" 150 | if self.allowed_hosts != '*': 151 | try: 152 | remote_addr = req.headers['X-Forwarded-For'] 153 | except KeyError: 154 | remote_addr = req.remote_addr 155 | if remote_addr != self.allowed_hosts: 156 | raise HTTPForbidden() 157 | 158 | if req.path == '/': 159 | if req.method != 'GET': 160 | raise HTTPMethodNotAllowed(allow=['GET']) 161 | elif req.method != 'POST': 162 | raise HTTPMethodNotAllowed(allow=['POST']) 163 | 164 | def _parsePath(self, path): 165 | """Takes a request path and returns a tuple containing the handler type, name 166 | and a list of ids. 167 | 168 | Raises an HTTPNotFound exception if the path is invalid.""" 169 | 170 | if path in ('', '/'): 171 | raise HTTPNotFound() 172 | 173 | # split the URL into a list of parts 174 | if path[0] == '/': 175 | path = path[1:] 176 | parts = path.split('/') 177 | 178 | # pull the type and context from the URL parts 179 | handler_type = None 180 | ids = [] 181 | for type_list in self.handler_types: 182 | if len(parts) == 0: 183 | break 184 | 185 | # some URL positions can have multiple types 186 | if type(type_list) is not list: 187 | type_list = [type_list] 188 | 189 | # get the handler_type 190 | if parts[0] not in type_list: 191 | break 192 | handler_type = parts.pop(0) 193 | 194 | # add the id to the id list 195 | if len(parts) > 0: 196 | ids.append(parts.pop(0)) 197 | # break if we don't have enough parts to make a new type/id pair 198 | if len(parts) < 2: 199 | break 200 | 201 | # sanity check to make sure the URL is valid 202 | if len(parts) > 1 or len(ids) == 0: 203 | raise HTTPNotFound() 204 | 205 | # get the handler name 206 | if len(parts) == 0: 207 | name = 'index' 208 | else: 209 | name = parts[0] 210 | 211 | return (handler_type, name, ids) 212 | 213 | def _getCollectionPath(self, collection_id): 214 | """Returns the path to the collection based on the collection_id from the request. 215 | 216 | Raises HTTPBadRequest if the collection_id is invalid.""" 217 | 218 | path = os.path.normpath(os.path.join(self.data_root, collection_id, 'collection.anki2')) 219 | if path[0:len(self.data_root)] != self.data_root: 220 | # attempting to escape our data jail! 221 | raise HTTPBadRequest('"%s" is not a valid collection' % collection_id) 222 | 223 | return path 224 | 225 | def _getHandler(self, type, name): 226 | """Returns a tuple containing handler function for this type and name, and a boolean flag 227 | if that handler has a return value. 228 | 229 | Raises an HTTPNotFound exception if the handler doesn't exist.""" 230 | 231 | # get the handler function 232 | try: 233 | handler = self.handlers[type][name] 234 | except KeyError: 235 | raise HTTPNotFound() 236 | 237 | # get if we have a return value 238 | hasReturnValue = True 239 | if hasattr(handler, 'hasReturnValue'): 240 | hasReturnValue = handler.hasReturnValue 241 | 242 | return (handler, hasReturnValue) 243 | 244 | def _parseRequestBody(self, req): 245 | """Parses the request body (JSON) into a Python dict and returns it. 246 | 247 | Raises an HTTPBadRequest exception if the request isn't valid JSON.""" 248 | 249 | try: 250 | data = json.loads(req.body) 251 | except JSONDecodeError, e: 252 | logging.error(req.path+': Unable to parse JSON: '+str(e), exc_info=True) 253 | raise HTTPBadRequest() 254 | 255 | # fix for a JSON encoding 'quirk' in PHP 256 | if type(data) == list and len(data) == 0: 257 | data = {} 258 | 259 | # make the keys into non-unicode strings 260 | data = dict([(str(k), v) for k, v in data.items()]) 261 | 262 | return data 263 | 264 | def _execute_handler(self, col, req, handler): 265 | if self.hook_pre_execute is not None: 266 | self.hook_pre_execute(col, req) 267 | result = handler(col, req) 268 | if self.hook_post_execute is not None: 269 | self.hook_post_execute(col, req, result) 270 | return result 271 | 272 | @wsgify 273 | def __call__(self, req): 274 | # make sure the request is valid 275 | self._checkRequest(req) 276 | 277 | # special non-collection paths 278 | if req.path == '/': 279 | return Response('AnkiServer ' + str(AnkiServer.__version__), content_type='text/plain') 280 | if req.path == '/list_collections': 281 | return Response(json.dumps(self.list_collections()), content_type='application/json') 282 | 283 | # parse the path 284 | type, name, ids = self._parsePath(req.path) 285 | 286 | # get the collection path 287 | collection_path = self._getCollectionPath(ids[0]) 288 | print collection_path 289 | 290 | # get the handler function 291 | handler, hasReturnValue = self._getHandler(type, name) 292 | 293 | # parse the request body 294 | data = self._parseRequestBody(req) 295 | 296 | # get the users session 297 | try: 298 | session = self.sessions[ids[0]] 299 | except KeyError: 300 | session = self.sessions[ids[0]] = {} 301 | 302 | # debug 303 | from pprint import pprint 304 | pprint(data) 305 | 306 | # run it! 307 | try: 308 | col = self.collection_manager.get_collection(collection_path, self.setup_new_collection) 309 | handler_request = RestHandlerRequest(self, data, ids, session) 310 | output = col.execute(self._execute_handler, [handler_request, handler], {}, hasReturnValue) 311 | except HTTPError, e: 312 | # we pass these on through! 313 | raise 314 | except Exception, e: 315 | logging.error(e) 316 | return HTTPInternalServerError() 317 | 318 | if output is None: 319 | return Response('', content_type='text/plain') 320 | else: 321 | return Response(json.dumps(output), content_type='application/json') 322 | 323 | class CollectionHandler(RestHandlerBase): 324 | """Default handler group for 'collection' type.""" 325 | 326 | # 327 | # MODELS - Store fields definitions and templates for notes 328 | # 329 | 330 | def list_models(self, col, req): 331 | # This is already a list of dicts, so it doesn't need to be serialized 332 | return col.models.all() 333 | 334 | def find_model_by_name(self, col, req): 335 | # This is already a list of dicts, so it doesn't need to be serialized 336 | return col.models.byName(req.data['model']) 337 | 338 | # 339 | # NOTES - Information (in fields per the model) that can generate a card 340 | # (based on a template from the model). 341 | # 342 | 343 | def find_notes(self, col, req): 344 | query = req.data.get('query', '') 345 | ids = col.findNotes(query) 346 | 347 | if req.data.get('preload', False): 348 | notes = [NoteHandler._serialize(col.getNote(id)) for id in ids] 349 | else: 350 | notes = [{'id': id} for id in ids] 351 | 352 | return notes 353 | 354 | def latest_notes(self, col, req): 355 | # TODO: use SQLAlchemy objects to do this 356 | sql = "SELECT n.id FROM notes AS n"; 357 | args = [] 358 | if req.data.has_key('updated_since'): 359 | sql += ' WHERE n.mod > ?' 360 | args.append(req.data['updated_since']) 361 | sql += ' ORDER BY n.mod DESC' 362 | sql += ' LIMIT ' + str(req.data.get('limit', 10)) 363 | ids = col.db.list(sql, *args) 364 | 365 | if req.data.get('preload', False): 366 | notes = [NoteHandler._serialize(col.getNote(id)) for id in ids] 367 | else: 368 | notes = [{'id': id} for id in ids] 369 | 370 | return notes 371 | 372 | @noReturnValue 373 | def add_note(self, col, req): 374 | from anki.notes import Note 375 | 376 | # TODO: I think this would be better with 'model' for the name 377 | # and 'mid' for the model id. 378 | if type(req.data['model']) in (str, unicode): 379 | model = col.models.byName(req.data['model']) 380 | else: 381 | model = col.models.get(req.data['model']) 382 | 383 | note = Note(col, model) 384 | for name, value in req.data['fields'].items(): 385 | note[name] = value 386 | 387 | if req.data.has_key('tags'): 388 | note.setTagsFromStr(req.data['tags']) 389 | 390 | col.addNote(note) 391 | 392 | def list_tags(self, col, req): 393 | return col.tags.all() 394 | 395 | # 396 | # DECKS - Groups of cards 397 | # 398 | 399 | def list_decks(self, col, req): 400 | # This is already a list of dicts, so it doesn't need to be serialized 401 | return col.decks.all() 402 | 403 | @noReturnValue 404 | def select_deck(self, col, req): 405 | deck = DeckHandler._get_deck(col, req.data['deck']) 406 | col.decks.select(deck['id']) 407 | 408 | dyn_modes = { 409 | 'random': anki.consts.DYN_RANDOM, 410 | 'added': anki.consts.DYN_ADDED, 411 | 'due': anki.consts.DYN_DUE, 412 | } 413 | 414 | def create_dynamic_deck(self, col, req): 415 | name = req.data.get('name', t('Custom Study Session')) 416 | deck = col.decks.byName(name) 417 | if deck: 418 | if not deck['dyn']: 419 | raise HTTPBadRequest("There is an existing non-dynamic deck with the name %s" % name) 420 | 421 | # safe to empty it because it's a dynamic deck 422 | # TODO: maybe this should be an option? 423 | col.sched.emptyDyn(deck['id']) 424 | else: 425 | deck = col.decks.get(col.decks.newDyn(name)) 426 | 427 | query = req.data.get('query', '') 428 | count = int(req.data.get('count', 100)) 429 | mode = req.data.get('mode', 'random') 430 | 431 | try: 432 | mode = self.dyn_modes[mode] 433 | except KeyError: 434 | raise HTTPBadRequest("Unknown mode: %s" % mode) 435 | 436 | deck['terms'][0] = [query, count, mode] 437 | 438 | if mode != anki.consts.DYN_RANDOM: 439 | deck['resched'] = True 440 | else: 441 | deck['resched'] = False 442 | 443 | if not col.sched.rebuildDyn(deck['id']): 444 | raise HTTPBadRequest("No cards matched the criteria you provided") 445 | 446 | col.decks.save(deck) 447 | col.sched.reset() 448 | 449 | return deck 450 | 451 | def empty_dynamic_deck(self, col, req): 452 | name = req.data.get('name', t('Custom Study Session')) 453 | deck = col.decks.byName(name) 454 | 455 | if not deck: 456 | raise HTTPBadRequest("Cannot find a deck with the given name: %s" % name) 457 | 458 | if not deck['dyn']: 459 | raise HTTPBadRequest("The given deck is not dynamic: %s" % name) 460 | 461 | col.sched.emptyDyn(deck['id']) 462 | 463 | # 464 | # CARD - A specific card in a deck with a history of review (generated from 465 | # a note based on the template). 466 | # 467 | 468 | def find_cards(self, col, req): 469 | from AnkiServer.find import Finder 470 | 471 | query = req.data.get('query', '') 472 | order = req.data.get('order', False) 473 | 474 | # TODO: patch Anki to support limit/offset and then remove this crazy hack! 475 | finder = Finder(col) 476 | finder.limit = int(req.data.get('limit', 0)) 477 | finder.offset = int(req.data.get('offset', 0)) 478 | ids = finder.findCards(query, order) 479 | 480 | if req.data.get('preload', False): 481 | cards = [CardHandler._serialize(col.getCard(id), req.data) for id in ids] 482 | else: 483 | cards = [{'id': id} for id in ids] 484 | 485 | return cards 486 | 487 | def latest_cards(self, col, req): 488 | # TODO: use SQLAlchemy objects to do this 489 | sql = "SELECT c.id FROM notes AS n INNER JOIN cards AS c ON c.nid = n.id"; 490 | args = [] 491 | if req.data.has_key('updated_since'): 492 | sql += ' WHERE n.mod > ?' 493 | args.append(req.data['updated_since']) 494 | sql += ' ORDER BY n.mod DESC' 495 | sql += ' LIMIT ' + str(req.data.get('limit', 10)) 496 | ids = col.db.list(sql, *args) 497 | 498 | if req.data.get('preload', False): 499 | cards = [CardHandler._serialize(col.getCard(id), req.data) for id in ids] 500 | else: 501 | cards = [{'id': id} for id in ids] 502 | 503 | return cards 504 | 505 | # 506 | # SCHEDULER - Controls card review, ie. intervals, what cards are due, answering a card, etc. 507 | # 508 | 509 | def reset_scheduler(self, col, req): 510 | if req.data.has_key('deck'): 511 | deck = DeckHandler._get_deck(col, req.data['deck']) 512 | col.decks.select(deck['id']) 513 | 514 | col.sched.reset() 515 | counts = col.sched.counts() 516 | return { 517 | 'new_cards': counts[0], 518 | 'learning_cards': counts[1], 519 | 'review_cards': counts[1], 520 | } 521 | 522 | def extend_scheduler_limits(self, col, req): 523 | new_cards = int(req.data.get('new_cards', 0)) 524 | review_cards = int(req.data.get('review_cards', 0)) 525 | col.sched.extendLimits(new_cards, review_cards) 526 | col.sched.reset() 527 | 528 | button_labels = ['Easy', 'Good', 'Hard'] 529 | 530 | def _get_answer_buttons(self, col, card): 531 | l = [] 532 | 533 | # Put the correct number of buttons 534 | cnt = col.sched.answerButtons(card) 535 | for idx in range(0, cnt - 1): 536 | l.append(self.button_labels[idx]) 537 | l.append('Again') 538 | l.reverse() 539 | 540 | # Loop through and add the ease, estimated time (in seconds) and other info 541 | return [{ 542 | 'ease': ease, 543 | 'label': label, 544 | 'string_label': t(label), 545 | 'interval': col.sched.nextIvl(card, ease), 546 | 'string_interval': col.sched.nextIvlStr(card, ease), 547 | } for ease, label in enumerate(l, 1)] 548 | 549 | def next_card(self, col, req): 550 | if req.data.has_key('deck'): 551 | deck = DeckHandler._get_deck(col, req.data['deck']) 552 | col.decks.select(deck['id']) 553 | 554 | card = col.sched.getCard() 555 | if card is None: 556 | return None 557 | 558 | # put it into the card cache to be removed when we answer it 559 | #if not req.session.has_key('cards'): 560 | # req.session['cards'] = {} 561 | #req.session['cards'][long(card.id)] = card 562 | 563 | card.startTimer() 564 | 565 | result = CardHandler._serialize(card, req.data) 566 | result['answer_buttons'] = self._get_answer_buttons(col, card) 567 | 568 | return result 569 | 570 | # TODO: calling answer_card() when the scheduler is not setup can 571 | # be an error! This can happen after a collection has been closed 572 | # for inactivity, and opened later. But since we're using 573 | # @noReturnValue, no error will be passed up. :-/ What to do? 574 | @noReturnValue 575 | def answer_card(self, col, req): 576 | import time 577 | 578 | card_id = long(req.data['id']) 579 | ease = int(req.data['ease']) 580 | 581 | card = col.getCard(card_id) 582 | if req.data.has_key('timerStarted'): 583 | card.timerStarted = float(req.data['timerStarted']) 584 | else: 585 | card.timerStarted = time.time() 586 | 587 | col.sched.answerCard(card, ease) 588 | 589 | @noReturnValue 590 | def suspend_cards(self, col, req): 591 | card_ids = req.data['ids'] 592 | col.sched.suspendCards(card_ids) 593 | 594 | @noReturnValue 595 | def unsuspend_cards(self, col, req): 596 | card_ids = req.data['ids'] 597 | col.sched.unsuspendCards(card_ids) 598 | 599 | def cards_recent_ease(self, col, req): 600 | """Returns the most recent ease for each card.""" 601 | 602 | # TODO: Use sqlalchemy to build this query! 603 | sql = "SELECT r.cid, r.ease, r.id FROM revlog AS r INNER JOIN (SELECT cid, MAX(id) AS id FROM revlog GROUP BY cid) AS q ON r.cid = q.cid AND r.id = q.id" 604 | where = [] 605 | if req.data.has_key('ids'): 606 | where.append('ids IN (' + (','.join(["'%s'" % x for x in req.data['ids']])) + ')') 607 | if len(where) > 0: 608 | sql += ' WHERE ' + ' AND '.join(where) 609 | 610 | result = [] 611 | for r in col.db.all(sql): 612 | result.append({'id': r[0], 'ease': r[1], 'timestamp': int(r[2] / 1000)}) 613 | 614 | return result 615 | 616 | def latest_revlog(self, col, req): 617 | """Returns recent entries from the revlog.""" 618 | 619 | # TODO: Use sqlalchemy to build this query! 620 | sql = "SELECT r.id, r.ease, r.cid, r.usn, r.ivl, r.lastIvl, r.factor, r.time, r.type FROM revlog AS r" 621 | args = [] 622 | if req.data.has_key('updated_since'): 623 | sql += ' WHERE r.id > ?' 624 | args.append(long(req.data['updated_since']) * 1000) 625 | sql += ' ORDER BY r.id DESC' 626 | sql += ' LIMIT ' + str(req.data.get('limit', 100)) 627 | 628 | revlog = col.db.all(sql, *args) 629 | return [{ 630 | 'id': r[0], 631 | 'ease': r[1], 632 | 'timestamp': int(r[0] / 1000), 633 | 'card_id': r[2], 634 | 'usn': r[3], 635 | 'interval': r[4], 636 | 'last_interval': r[5], 637 | 'factor': r[6], 638 | 'time': r[7], 639 | 'type': r[8], 640 | } for r in revlog] 641 | 642 | stats_reports = { 643 | 'today': 'todayStats', 644 | 'due': 'dueGraph', 645 | 'reps': 'repsGraph', 646 | 'interval': 'ivlGraph', 647 | 'hourly': 'hourGraph', 648 | 'ease': 'easeGraph', 649 | 'card': 'cardGraph', 650 | 'footer': 'footer', 651 | } 652 | stats_reports_order = ['today', 'due', 'reps', 'interval', 'hourly', 'ease', 'card', 'footer'] 653 | 654 | def stats_report(self, col, req): 655 | import anki.stats 656 | import re 657 | 658 | stats = anki.stats.CollectionStats(col) 659 | stats.width = int(req.data.get('width', 600)) 660 | stats.height = int(req.data.get('height', 200)) 661 | reports = req.data.get('reports', self.stats_reports_order) 662 | include_css = req.data.get('include_css', False) 663 | include_jquery = req.data.get('include_jquery', False) 664 | include_flot = req.data.get('include_flot', False) 665 | 666 | if include_css: 667 | from anki.statsbg import bg 668 | html = stats.css % bg 669 | else: 670 | html = '' 671 | 672 | for name in reports: 673 | if not self.stats_reports.has_key(name): 674 | raise HTTPBadRequest("Unknown report name: %s" % name) 675 | func = getattr(stats, self.stats_reports[name]) 676 | 677 | html += '
' % name 678 | html += func() 679 | html += '
' 680 | 681 | # fix an error in some inline styles 682 | # TODO: submit a patch to Anki! 683 | html = re.sub(r'style="width:([0-9\.]+); height:([0-9\.]+);"', r'style="width:\1px; height: \2px;"', html) 684 | html = re.sub(r'-webkit-transform: ([^;]+);', r'-webkit-transform: \1; -moz-transform: \1; -ms-transform: \1; -o-transform: \1; transform: \1;', html) 685 | 686 | scripts = [] 687 | if include_jquery or include_flot: 688 | import anki.js 689 | if include_jquery: 690 | scripts.append(anki.js.jquery) 691 | if include_flot: 692 | scripts.append(anki.js.plot) 693 | if len(scripts) > 0: 694 | html = "" % ''.join(scripts) + html 695 | 696 | return html 697 | 698 | # 699 | # GLOBAL / MISC 700 | # 701 | 702 | @noReturnValue 703 | def set_language(self, col, req): 704 | anki.lang.setLang(req.data['code']) 705 | 706 | class ImportExportHandler(RestHandlerBase): 707 | """Handler group for the 'collection' type, but it's not added by default.""" 708 | 709 | def _get_filedata(self, data): 710 | import urllib2 711 | 712 | if data.has_key('data'): 713 | return data['data'] 714 | 715 | fd = None 716 | try: 717 | fd = urllib2.urlopen(data['url']) 718 | filedata = fd.read() 719 | finally: 720 | if fd is not None: 721 | fd.close() 722 | 723 | return filedata 724 | 725 | def _get_importer_class(self, data): 726 | filetype = data['filetype'] 727 | 728 | from AnkiServer.importer import get_importer_class 729 | importer_class = get_importer_class(filetype) 730 | if importer_class is None: 731 | raise HTTPBadRequest("Unknown filetype '%s'" % filetype) 732 | 733 | return importer_class 734 | 735 | def import_file(self, col, req): 736 | import AnkiServer.importer 737 | import tempfile 738 | 739 | # get the importer class 740 | importer_class = self._get_importer_class(req.data) 741 | 742 | # get the file data 743 | filedata = self._get_filedata(req.data) 744 | 745 | # write the file data to a temporary file 746 | try: 747 | path = None 748 | with tempfile.NamedTemporaryFile('wt', delete=False) as fd: 749 | path = fd.name 750 | fd.write(filedata) 751 | 752 | AnkiServer.importer.import_file(importer_class, col, path) 753 | finally: 754 | if path is not None: 755 | os.unlink(path) 756 | 757 | class ModelHandler(RestHandlerBase): 758 | """Default handler group for 'model' type.""" 759 | 760 | def field_names(self, col, req): 761 | model = col.models.get(req.ids[1]) 762 | if model is None: 763 | raise HTTPNotFound() 764 | return col.models.fieldNames(model) 765 | 766 | class NoteHandler(RestHandlerBase): 767 | """Default handler group for 'note' type.""" 768 | 769 | @staticmethod 770 | def _serialize(note): 771 | d = { 772 | 'id': note.id, 773 | 'guid': note.guid, 774 | 'model': note.model(), 775 | 'mid': note.mid, 776 | 'mod': note.mod, 777 | 'scm': note.scm, 778 | 'tags': note.tags, 779 | 'string_tags': ' '.join(note.tags), 780 | 'fields': {}, 781 | 'flags': note.flags, 782 | 'usn': note.usn, 783 | } 784 | 785 | # add all the fields 786 | for name, value in note.items(): 787 | d['fields'][name] = value 788 | 789 | return d 790 | 791 | def index(self, col, req): 792 | note = col.getNote(req.ids[1]) 793 | return self._serialize(note) 794 | 795 | def update(self, col, req): 796 | note = col.getNote(req.ids[1]) 797 | if note: 798 | # update fields 799 | for name in note.keys(): 800 | note[name] = req.data['fields'].get(name, '') 801 | 802 | # update tags 803 | note.tags = req.data['tags'] 804 | 805 | # optionally, we can prevent note.mod from getting updated - 806 | # this is useful when adding the 'marked' tag or other changes 807 | # we don't want to really "count" 808 | if req.data.get('update_mod', True): 809 | mod = intTime() 810 | else: 811 | mod = note.mod 812 | 813 | note.flush(mod) 814 | 815 | def delete(self, col, req): 816 | col.remNotes([req.ids[1]]) 817 | 818 | @noReturnValue 819 | def add_tags(self, col, req): 820 | note = col.getNote(req.ids[1]) 821 | 822 | # optionally, we can prevent note.mod from getting updated - 823 | # this is useful when adding the 'marked' tag or other changes 824 | # we don't want to really "count" 825 | if req.data.get('update_mod', True): 826 | mod = intTime() 827 | else: 828 | mod = note.mod 829 | 830 | for tag in req.data['tags']: 831 | note.addTag(tag) 832 | 833 | note.flush(mod) 834 | 835 | @noReturnValue 836 | def remove_tags(self, col, req): 837 | note = col.getNote(req.ids[1]) 838 | 839 | # optionally, we can prevent note.mod from getting updated - 840 | # this is useful when adding the 'marked' tag or other changes 841 | # we don't want to really "count" 842 | if req.data.get('update_mod', True): 843 | mod = intTime() 844 | else: 845 | mod = note.mod 846 | 847 | for tag in req.data['tags']: 848 | note.delTag(tag) 849 | 850 | note.flush(mod) 851 | 852 | class DeckHandler(RestHandlerBase): 853 | """Default handler group for 'deck' type.""" 854 | 855 | @staticmethod 856 | def _get_deck(col, val): 857 | try: 858 | did = long(val) 859 | deck = col.decks.get(did, False) 860 | except ValueError: 861 | deck = col.decks.byName(val) 862 | 863 | if deck is None: 864 | raise HTTPNotFound('No deck with id or name: ' + str(val)) 865 | 866 | return deck 867 | 868 | def index(self, col, req): 869 | return self._get_deck(col, req.ids[1]) 870 | 871 | def next_card(self, col, req): 872 | req_copy = req.copy() 873 | req_copy.data['deck'] = req.ids[1] 874 | del req_copy.ids[1] 875 | 876 | # forward this to the CollectionHandler 877 | return req.app.execute_handler('collection', 'next_card', col, req_copy) 878 | 879 | def get_conf(self, col, req): 880 | # TODO: should probably live in a ConfHandler 881 | return col.decks.confForDid(req.ids[1]) 882 | 883 | @noReturnValue 884 | def set_update_conf(self, col, req): 885 | data = req.data.copy() 886 | del data['id'] 887 | 888 | conf = col.decks.confForDid(req.ids[1]) 889 | conf = conf.copy() 890 | conf.update(data) 891 | 892 | col.decks.updateConf(conf) 893 | 894 | class CardHandler(RestHandlerBase): 895 | """Default handler group for 'card' type.""" 896 | 897 | @staticmethod 898 | def _serialize(card, opts): 899 | d = { 900 | 'id': card.id, 901 | 'isEmpty': card.isEmpty(), 902 | 'css': card.css(), 903 | 'question': card._getQA()['q'], 904 | 'answer': card._getQA()['a'], 905 | 'did': card.did, 906 | 'due': card.due, 907 | 'factor': card.factor, 908 | 'ivl': card.ivl, 909 | 'lapses': card.lapses, 910 | 'left': card.left, 911 | 'mod': card.mod, 912 | 'nid': card.nid, 913 | 'odid': card.odid, 914 | 'odue': card.odue, 915 | 'ord': card.ord, 916 | 'queue': card.queue, 917 | 'reps': card.reps, 918 | 'type': card.type, 919 | 'usn': card.usn, 920 | 'timerStarted': card.timerStarted, 921 | } 922 | 923 | if opts.get('load_note', False): 924 | d['note'] = NoteHandler._serialize(card.col.getNote(card.nid)) 925 | 926 | if opts.get('load_deck', False): 927 | d['deck'] = card.col.decks.get(card.did) 928 | 929 | if opts.get('load_latest_revlog', False): 930 | d['latest_revlog'] = CardHandler._latest_revlog(card.col, card.id) 931 | 932 | return d 933 | 934 | @staticmethod 935 | def _latest_revlog(col, card_id): 936 | r = col.db.first("SELECT r.id, r.ease FROM revlog AS r WHERE r.cid = ? ORDER BY id DESC LIMIT 1", card_id) 937 | if r: 938 | return {'id': r[0], 'ease': r[1], 'timestamp': int(r[0] / 1000)} 939 | 940 | def index(self, col, req): 941 | card = col.getCard(req.ids[1]) 942 | return self._serialize(card, req.data) 943 | 944 | def _forward_to_note(self, col, req, name): 945 | card = col.getCard(req.ids[1]) 946 | 947 | req_copy = req.copy() 948 | req_copy.ids[1] = card.nid 949 | 950 | return req.app.execute_handler('note', name, col, req) 951 | 952 | @noReturnValue 953 | def add_tags(self, col, req): 954 | self._forward_to_note(col, req, 'add_tags') 955 | 956 | @noReturnValue 957 | def remove_tags(self, col, req): 958 | self._forward_to_note(col, req, 'remove_tags') 959 | 960 | def stats_report(self, col, req): 961 | card = col.getCard(req.ids[1]) 962 | return col.cardStats(card) 963 | 964 | def latest_revlog(self, col, req): 965 | return self._latest_revlog(col, req.ids[1]) 966 | 967 | # Our entry point 968 | def make_app(global_conf, **local_conf): 969 | # TODO: we should setup the default language from conf! 970 | 971 | # setup the logger 972 | from AnkiServer.utils import setup_logging 973 | setup_logging(local_conf.get('logging.config_file')) 974 | 975 | return RestApp( 976 | data_root=local_conf.get('data_root', '.'), 977 | allowed_hosts=local_conf.get('allowed_hosts', '*') 978 | ) 979 | 980 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | GNU AFFERO GENERAL PUBLIC LICENSE 3 | Version 3, 19 November 2007 4 | 5 | Copyright (C) 2007 Free Software Foundation, Inc. 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | Preamble 10 | 11 | The GNU Affero General Public License is a free, copyleft license for 12 | software and other kinds of works, specifically designed to ensure 13 | cooperation with the community in the case of network server software. 14 | 15 | The licenses for most software and other practical works are designed 16 | to take away your freedom to share and change the works. By contrast, 17 | our General Public Licenses are intended to guarantee your freedom to 18 | share and change all versions of a program--to make sure it remains free 19 | software for all its users. 20 | 21 | When we speak of free software, we are referring to freedom, not 22 | price. Our General Public Licenses are designed to make sure that you 23 | have the freedom to distribute copies of free software (and charge for 24 | them if you wish), that you receive source code or can get it if you 25 | want it, that you can change the software or use pieces of it in new 26 | free programs, and that you know you can do these things. 27 | 28 | Developers that use our General Public Licenses protect your rights 29 | with two steps: (1) assert copyright on the software, and (2) offer 30 | you this License which gives you legal permission to copy, distribute 31 | and/or modify the software. 32 | 33 | A secondary benefit of defending all users' freedom is that 34 | improvements made in alternate versions of the program, if they 35 | receive widespread use, become available for other developers to 36 | incorporate. Many developers of free software are heartened and 37 | encouraged by the resulting cooperation. However, in the case of 38 | software used on network servers, this result may fail to come about. 39 | The GNU General Public License permits making a modified version and 40 | letting the public access it on a server without ever releasing its 41 | source code to the public. 42 | 43 | The GNU Affero General Public License is designed specifically to 44 | ensure that, in such cases, the modified source code becomes available 45 | to the community. It requires the operator of a network server to 46 | provide the source code of the modified version running there to the 47 | users of that server. Therefore, public use of a modified version, on 48 | a publicly accessible server, gives the public access to the source 49 | code of the modified version. 50 | 51 | An older license, called the Affero General Public License and 52 | published by Affero, was designed to accomplish similar goals. This is 53 | a different license, not a version of the Affero GPL, but Affero has 54 | released a new version of the Affero GPL which permits relicensing under 55 | this license. 56 | 57 | The precise terms and conditions for copying, distribution and 58 | modification follow. 59 | 60 | TERMS AND CONDITIONS 61 | 62 | 0. Definitions. 63 | 64 | "This License" refers to version 3 of the GNU Affero General Public License. 65 | 66 | "Copyright" also means copyright-like laws that apply to other kinds of 67 | works, such as semiconductor masks. 68 | 69 | "The Program" refers to any copyrightable work licensed under this 70 | License. Each licensee is addressed as "you". "Licensees" and 71 | "recipients" may be individuals or organizations. 72 | 73 | To "modify" a work means to copy from or adapt all or part of the work 74 | in a fashion requiring copyright permission, other than the making of an 75 | exact copy. The resulting work is called a "modified version" of the 76 | earlier work or a work "based on" the earlier work. 77 | 78 | A "covered work" means either the unmodified Program or a work based 79 | on the Program. 80 | 81 | To "propagate" a work means to do anything with it that, without 82 | permission, would make you directly or secondarily liable for 83 | infringement under applicable copyright law, except executing it on a 84 | computer or modifying a private copy. Propagation includes copying, 85 | distribution (with or without modification), making available to the 86 | public, and in some countries other activities as well. 87 | 88 | To "convey" a work means any kind of propagation that enables other 89 | parties to make or receive copies. Mere interaction with a user through 90 | a computer network, with no transfer of a copy, is not conveying. 91 | 92 | An interactive user interface displays "Appropriate Legal Notices" 93 | to the extent that it includes a convenient and prominently visible 94 | feature that (1) displays an appropriate copyright notice, and (2) 95 | tells the user that there is no warranty for the work (except to the 96 | extent that warranties are provided), that licensees may convey the 97 | work under this License, and how to view a copy of this License. If 98 | the interface presents a list of user commands or options, such as a 99 | menu, a prominent item in the list meets this criterion. 100 | 101 | 1. Source Code. 102 | 103 | The "source code" for a work means the preferred form of the work 104 | for making modifications to it. "Object code" means any non-source 105 | form of a work. 106 | 107 | A "Standard Interface" means an interface that either is an official 108 | standard defined by a recognized standards body, or, in the case of 109 | interfaces specified for a particular programming language, one that 110 | is widely used among developers working in that language. 111 | 112 | The "System Libraries" of an executable work include anything, other 113 | than the work as a whole, that (a) is included in the normal form of 114 | packaging a Major Component, but which is not part of that Major 115 | Component, and (b) serves only to enable use of the work with that 116 | Major Component, or to implement a Standard Interface for which an 117 | implementation is available to the public in source code form. A 118 | "Major Component", in this context, means a major essential component 119 | (kernel, window system, and so on) of the specific operating system 120 | (if any) on which the executable work runs, or a compiler used to 121 | produce the work, or an object code interpreter used to run it. 122 | 123 | The "Corresponding Source" for a work in object code form means all 124 | the source code needed to generate, install, and (for an executable 125 | work) run the object code and to modify the work, including scripts to 126 | control those activities. However, it does not include the work's 127 | System Libraries, or general-purpose tools or generally available free 128 | programs which are used unmodified in performing those activities but 129 | which are not part of the work. For example, Corresponding Source 130 | includes interface definition files associated with source files for 131 | the work, and the source code for shared libraries and dynamically 132 | linked subprograms that the work is specifically designed to require, 133 | such as by intimate data communication or control flow between those 134 | subprograms and other parts of the work. 135 | 136 | The Corresponding Source need not include anything that users 137 | can regenerate automatically from other parts of the Corresponding 138 | Source. 139 | 140 | The Corresponding Source for a work in source code form is that 141 | same work. 142 | 143 | 2. Basic Permissions. 144 | 145 | All rights granted under this License are granted for the term of 146 | copyright on the Program, and are irrevocable provided the stated 147 | conditions are met. This License explicitly affirms your unlimited 148 | permission to run the unmodified Program. The output from running a 149 | covered work is covered by this License only if the output, given its 150 | content, constitutes a covered work. This License acknowledges your 151 | rights of fair use or other equivalent, as provided by copyright law. 152 | 153 | You may make, run and propagate covered works that you do not 154 | convey, without conditions so long as your license otherwise remains 155 | in force. You may convey covered works to others for the sole purpose 156 | of having them make modifications exclusively for you, or provide you 157 | with facilities for running those works, provided that you comply with 158 | the terms of this License in conveying all material for which you do 159 | not control copyright. Those thus making or running the covered works 160 | for you must do so exclusively on your behalf, under your direction 161 | and control, on terms that prohibit them from making any copies of 162 | your copyrighted material outside their relationship with you. 163 | 164 | Conveying under any other circumstances is permitted solely under 165 | the conditions stated below. Sublicensing is not allowed; section 10 166 | makes it unnecessary. 167 | 168 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 169 | 170 | No covered work shall be deemed part of an effective technological 171 | measure under any applicable law fulfilling obligations under article 172 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 173 | similar laws prohibiting or restricting circumvention of such 174 | measures. 175 | 176 | When you convey a covered work, you waive any legal power to forbid 177 | circumvention of technological measures to the extent such circumvention 178 | is effected by exercising rights under this License with respect to 179 | the covered work, and you disclaim any intention to limit operation or 180 | modification of the work as a means of enforcing, against the work's 181 | users, your or third parties' legal rights to forbid circumvention of 182 | technological measures. 183 | 184 | 4. Conveying Verbatim Copies. 185 | 186 | You may convey verbatim copies of the Program's source code as you 187 | receive it, in any medium, provided that you conspicuously and 188 | appropriately publish on each copy an appropriate copyright notice; 189 | keep intact all notices stating that this License and any 190 | non-permissive terms added in accord with section 7 apply to the code; 191 | keep intact all notices of the absence of any warranty; and give all 192 | recipients a copy of this License along with the Program. 193 | 194 | You may charge any price or no price for each copy that you convey, 195 | and you may offer support or warranty protection for a fee. 196 | 197 | 5. Conveying Modified Source Versions. 198 | 199 | You may convey a work based on the Program, or the modifications to 200 | produce it from the Program, in the form of source code under the 201 | terms of section 4, provided that you also meet all of these conditions: 202 | 203 | a) The work must carry prominent notices stating that you modified 204 | it, and giving a relevant date. 205 | 206 | b) The work must carry prominent notices stating that it is 207 | released under this License and any conditions added under section 208 | 7. This requirement modifies the requirement in section 4 to 209 | "keep intact all notices". 210 | 211 | c) You must license the entire work, as a whole, under this 212 | License to anyone who comes into possession of a copy. This 213 | License will therefore apply, along with any applicable section 7 214 | additional terms, to the whole of the work, and all its parts, 215 | regardless of how they are packaged. This License gives no 216 | permission to license the work in any other way, but it does not 217 | invalidate such permission if you have separately received it. 218 | 219 | d) If the work has interactive user interfaces, each must display 220 | Appropriate Legal Notices; however, if the Program has interactive 221 | interfaces that do not display Appropriate Legal Notices, your 222 | work need not make them do so. 223 | 224 | A compilation of a covered work with other separate and independent 225 | works, which are not by their nature extensions of the covered work, 226 | and which are not combined with it such as to form a larger program, 227 | in or on a volume of a storage or distribution medium, is called an 228 | "aggregate" if the compilation and its resulting copyright are not 229 | used to limit the access or legal rights of the compilation's users 230 | beyond what the individual works permit. Inclusion of a covered work 231 | in an aggregate does not cause this License to apply to the other 232 | parts of the aggregate. 233 | 234 | 6. Conveying Non-Source Forms. 235 | 236 | You may convey a covered work in object code form under the terms 237 | of sections 4 and 5, provided that you also convey the 238 | machine-readable Corresponding Source under the terms of this License, 239 | in one of these ways: 240 | 241 | a) Convey the object code in, or embodied in, a physical product 242 | (including a physical distribution medium), accompanied by the 243 | Corresponding Source fixed on a durable physical medium 244 | customarily used for software interchange. 245 | 246 | b) Convey the object code in, or embodied in, a physical product 247 | (including a physical distribution medium), accompanied by a 248 | written offer, valid for at least three years and valid for as 249 | long as you offer spare parts or customer support for that product 250 | model, to give anyone who possesses the object code either (1) a 251 | copy of the Corresponding Source for all the software in the 252 | product that is covered by this License, on a durable physical 253 | medium customarily used for software interchange, for a price no 254 | more than your reasonable cost of physically performing this 255 | conveying of source, or (2) access to copy the 256 | Corresponding Source from a network server at no charge. 257 | 258 | c) Convey individual copies of the object code with a copy of the 259 | written offer to provide the Corresponding Source. This 260 | alternative is allowed only occasionally and noncommercially, and 261 | only if you received the object code with such an offer, in accord 262 | with subsection 6b. 263 | 264 | d) Convey the object code by offering access from a designated 265 | place (gratis or for a charge), and offer equivalent access to the 266 | Corresponding Source in the same way through the same place at no 267 | further charge. You need not require recipients to copy the 268 | Corresponding Source along with the object code. If the place to 269 | copy the object code is a network server, the Corresponding Source 270 | may be on a different server (operated by you or a third party) 271 | that supports equivalent copying facilities, provided you maintain 272 | clear directions next to the object code saying where to find the 273 | Corresponding Source. Regardless of what server hosts the 274 | Corresponding Source, you remain obligated to ensure that it is 275 | available for as long as needed to satisfy these requirements. 276 | 277 | e) Convey the object code using peer-to-peer transmission, provided 278 | you inform other peers where the object code and Corresponding 279 | Source of the work are being offered to the general public at no 280 | charge under subsection 6d. 281 | 282 | A separable portion of the object code, whose source code is excluded 283 | from the Corresponding Source as a System Library, need not be 284 | included in conveying the object code work. 285 | 286 | A "User Product" is either (1) a "consumer product", which means any 287 | tangible personal property which is normally used for personal, family, 288 | or household purposes, or (2) anything designed or sold for incorporation 289 | into a dwelling. In determining whether a product is a consumer product, 290 | doubtful cases shall be resolved in favor of coverage. For a particular 291 | product received by a particular user, "normally used" refers to a 292 | typical or common use of that class of product, regardless of the status 293 | of the particular user or of the way in which the particular user 294 | actually uses, or expects or is expected to use, the product. A product 295 | is a consumer product regardless of whether the product has substantial 296 | commercial, industrial or non-consumer uses, unless such uses represent 297 | the only significant mode of use of the product. 298 | 299 | "Installation Information" for a User Product means any methods, 300 | procedures, authorization keys, or other information required to install 301 | and execute modified versions of a covered work in that User Product from 302 | a modified version of its Corresponding Source. The information must 303 | suffice to ensure that the continued functioning of the modified object 304 | code is in no case prevented or interfered with solely because 305 | modification has been made. 306 | 307 | If you convey an object code work under this section in, or with, or 308 | specifically for use in, a User Product, and the conveying occurs as 309 | part of a transaction in which the right of possession and use of the 310 | User Product is transferred to the recipient in perpetuity or for a 311 | fixed term (regardless of how the transaction is characterized), the 312 | Corresponding Source conveyed under this section must be accompanied 313 | by the Installation Information. But this requirement does not apply 314 | if neither you nor any third party retains the ability to install 315 | modified object code on the User Product (for example, the work has 316 | been installed in ROM). 317 | 318 | The requirement to provide Installation Information does not include a 319 | requirement to continue to provide support service, warranty, or updates 320 | for a work that has been modified or installed by the recipient, or for 321 | the User Product in which it has been modified or installed. Access to a 322 | network may be denied when the modification itself materially and 323 | adversely affects the operation of the network or violates the rules and 324 | protocols for communication across the network. 325 | 326 | Corresponding Source conveyed, and Installation Information provided, 327 | in accord with this section must be in a format that is publicly 328 | documented (and with an implementation available to the public in 329 | source code form), and must require no special password or key for 330 | unpacking, reading or copying. 331 | 332 | 7. Additional Terms. 333 | 334 | "Additional permissions" are terms that supplement the terms of this 335 | License by making exceptions from one or more of its conditions. 336 | Additional permissions that are applicable to the entire Program shall 337 | be treated as though they were included in this License, to the extent 338 | that they are valid under applicable law. If additional permissions 339 | apply only to part of the Program, that part may be used separately 340 | under those permissions, but the entire Program remains governed by 341 | this License without regard to the additional permissions. 342 | 343 | When you convey a copy of a covered work, you may at your option 344 | remove any additional permissions from that copy, or from any part of 345 | it. (Additional permissions may be written to require their own 346 | removal in certain cases when you modify the work.) You may place 347 | additional permissions on material, added by you to a covered work, 348 | for which you have or can give appropriate copyright permission. 349 | 350 | Notwithstanding any other provision of this License, for material you 351 | add to a covered work, you may (if authorized by the copyright holders of 352 | that material) supplement the terms of this License with terms: 353 | 354 | a) Disclaiming warranty or limiting liability differently from the 355 | terms of sections 15 and 16 of this License; or 356 | 357 | b) Requiring preservation of specified reasonable legal notices or 358 | author attributions in that material or in the Appropriate Legal 359 | Notices displayed by works containing it; or 360 | 361 | c) Prohibiting misrepresentation of the origin of that material, or 362 | requiring that modified versions of such material be marked in 363 | reasonable ways as different from the original version; or 364 | 365 | d) Limiting the use for publicity purposes of names of licensors or 366 | authors of the material; or 367 | 368 | e) Declining to grant rights under trademark law for use of some 369 | trade names, trademarks, or service marks; or 370 | 371 | f) Requiring indemnification of licensors and authors of that 372 | material by anyone who conveys the material (or modified versions of 373 | it) with contractual assumptions of liability to the recipient, for 374 | any liability that these contractual assumptions directly impose on 375 | those licensors and authors. 376 | 377 | All other non-permissive additional terms are considered "further 378 | restrictions" within the meaning of section 10. If the Program as you 379 | received it, or any part of it, contains a notice stating that it is 380 | governed by this License along with a term that is a further 381 | restriction, you may remove that term. If a license document contains 382 | a further restriction but permits relicensing or conveying under this 383 | License, you may add to a covered work material governed by the terms 384 | of that license document, provided that the further restriction does 385 | not survive such relicensing or conveying. 386 | 387 | If you add terms to a covered work in accord with this section, you 388 | must place, in the relevant source files, a statement of the 389 | additional terms that apply to those files, or a notice indicating 390 | where to find the applicable terms. 391 | 392 | Additional terms, permissive or non-permissive, may be stated in the 393 | form of a separately written license, or stated as exceptions; 394 | the above requirements apply either way. 395 | 396 | 8. Termination. 397 | 398 | You may not propagate or modify a covered work except as expressly 399 | provided under this License. Any attempt otherwise to propagate or 400 | modify it is void, and will automatically terminate your rights under 401 | this License (including any patent licenses granted under the third 402 | paragraph of section 11). 403 | 404 | However, if you cease all violation of this License, then your 405 | license from a particular copyright holder is reinstated (a) 406 | provisionally, unless and until the copyright holder explicitly and 407 | finally terminates your license, and (b) permanently, if the copyright 408 | holder fails to notify you of the violation by some reasonable means 409 | prior to 60 days after the cessation. 410 | 411 | Moreover, your license from a particular copyright holder is 412 | reinstated permanently if the copyright holder notifies you of the 413 | violation by some reasonable means, this is the first time you have 414 | received notice of violation of this License (for any work) from that 415 | copyright holder, and you cure the violation prior to 30 days after 416 | your receipt of the notice. 417 | 418 | Termination of your rights under this section does not terminate the 419 | licenses of parties who have received copies or rights from you under 420 | this License. If your rights have been terminated and not permanently 421 | reinstated, you do not qualify to receive new licenses for the same 422 | material under section 10. 423 | 424 | 9. Acceptance Not Required for Having Copies. 425 | 426 | You are not required to accept this License in order to receive or 427 | run a copy of the Program. Ancillary propagation of a covered work 428 | occurring solely as a consequence of using peer-to-peer transmission 429 | to receive a copy likewise does not require acceptance. However, 430 | nothing other than this License grants you permission to propagate or 431 | modify any covered work. These actions infringe copyright if you do 432 | not accept this License. Therefore, by modifying or propagating a 433 | covered work, you indicate your acceptance of this License to do so. 434 | 435 | 10. Automatic Licensing of Downstream Recipients. 436 | 437 | Each time you convey a covered work, the recipient automatically 438 | receives a license from the original licensors, to run, modify and 439 | propagate that work, subject to this License. You are not responsible 440 | for enforcing compliance by third parties with this License. 441 | 442 | An "entity transaction" is a transaction transferring control of an 443 | organization, or substantially all assets of one, or subdividing an 444 | organization, or merging organizations. If propagation of a covered 445 | work results from an entity transaction, each party to that 446 | transaction who receives a copy of the work also receives whatever 447 | licenses to the work the party's predecessor in interest had or could 448 | give under the previous paragraph, plus a right to possession of the 449 | Corresponding Source of the work from the predecessor in interest, if 450 | the predecessor has it or can get it with reasonable efforts. 451 | 452 | You may not impose any further restrictions on the exercise of the 453 | rights granted or affirmed under this License. For example, you may 454 | not impose a license fee, royalty, or other charge for exercise of 455 | rights granted under this License, and you may not initiate litigation 456 | (including a cross-claim or counterclaim in a lawsuit) alleging that 457 | any patent claim is infringed by making, using, selling, offering for 458 | sale, or importing the Program or any portion of it. 459 | 460 | 11. Patents. 461 | 462 | A "contributor" is a copyright holder who authorizes use under this 463 | License of the Program or a work on which the Program is based. The 464 | work thus licensed is called the contributor's "contributor version". 465 | 466 | A contributor's "essential patent claims" are all patent claims 467 | owned or controlled by the contributor, whether already acquired or 468 | hereafter acquired, that would be infringed by some manner, permitted 469 | by this License, of making, using, or selling its contributor version, 470 | but do not include claims that would be infringed only as a 471 | consequence of further modification of the contributor version. For 472 | purposes of this definition, "control" includes the right to grant 473 | patent sublicenses in a manner consistent with the requirements of 474 | this License. 475 | 476 | Each contributor grants you a non-exclusive, worldwide, royalty-free 477 | patent license under the contributor's essential patent claims, to 478 | make, use, sell, offer for sale, import and otherwise run, modify and 479 | propagate the contents of its contributor version. 480 | 481 | In the following three paragraphs, a "patent license" is any express 482 | agreement or commitment, however denominated, not to enforce a patent 483 | (such as an express permission to practice a patent or covenant not to 484 | sue for patent infringement). To "grant" such a patent license to a 485 | party means to make such an agreement or commitment not to enforce a 486 | patent against the party. 487 | 488 | If you convey a covered work, knowingly relying on a patent license, 489 | and the Corresponding Source of the work is not available for anyone 490 | to copy, free of charge and under the terms of this License, through a 491 | publicly available network server or other readily accessible means, 492 | then you must either (1) cause the Corresponding Source to be so 493 | available, or (2) arrange to deprive yourself of the benefit of the 494 | patent license for this particular work, or (3) arrange, in a manner 495 | consistent with the requirements of this License, to extend the patent 496 | license to downstream recipients. "Knowingly relying" means you have 497 | actual knowledge that, but for the patent license, your conveying the 498 | covered work in a country, or your recipient's use of the covered work 499 | in a country, would infringe one or more identifiable patents in that 500 | country that you have reason to believe are valid. 501 | 502 | If, pursuant to or in connection with a single transaction or 503 | arrangement, you convey, or propagate by procuring conveyance of, a 504 | covered work, and grant a patent license to some of the parties 505 | receiving the covered work authorizing them to use, propagate, modify 506 | or convey a specific copy of the covered work, then the patent license 507 | you grant is automatically extended to all recipients of the covered 508 | work and works based on it. 509 | 510 | A patent license is "discriminatory" if it does not include within 511 | the scope of its coverage, prohibits the exercise of, or is 512 | conditioned on the non-exercise of one or more of the rights that are 513 | specifically granted under this License. You may not convey a covered 514 | work if you are a party to an arrangement with a third party that is 515 | in the business of distributing software, under which you make payment 516 | to the third party based on the extent of your activity of conveying 517 | the work, and under which the third party grants, to any of the 518 | parties who would receive the covered work from you, a discriminatory 519 | patent license (a) in connection with copies of the covered work 520 | conveyed by you (or copies made from those copies), or (b) primarily 521 | for and in connection with specific products or compilations that 522 | contain the covered work, unless you entered into that arrangement, 523 | or that patent license was granted, prior to 28 March 2007. 524 | 525 | Nothing in this License shall be construed as excluding or limiting 526 | any implied license or other defenses to infringement that may 527 | otherwise be available to you under applicable patent law. 528 | 529 | 12. No Surrender of Others' Freedom. 530 | 531 | If conditions are imposed on you (whether by court order, agreement or 532 | otherwise) that contradict the conditions of this License, they do not 533 | excuse you from the conditions of this License. If you cannot convey a 534 | covered work so as to satisfy simultaneously your obligations under this 535 | License and any other pertinent obligations, then as a consequence you may 536 | not convey it at all. For example, if you agree to terms that obligate you 537 | to collect a royalty for further conveying from those to whom you convey 538 | the Program, the only way you could satisfy both those terms and this 539 | License would be to refrain entirely from conveying the Program. 540 | 541 | 13. Remote Network Interaction; Use with the GNU General Public License. 542 | 543 | Notwithstanding any other provision of this License, if you modify the 544 | Program, your modified version must prominently offer all users 545 | interacting with it remotely through a computer network (if your version 546 | supports such interaction) an opportunity to receive the Corresponding 547 | Source of your version by providing access to the Corresponding Source 548 | from a network server at no charge, through some standard or customary 549 | means of facilitating copying of software. This Corresponding Source 550 | shall include the Corresponding Source for any work covered by version 3 551 | of the GNU General Public License that is incorporated pursuant to the 552 | following paragraph. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the work with which it is combined will remain governed by version 560 | 3 of the GNU General Public License. 561 | 562 | 14. Revised Versions of this License. 563 | 564 | The Free Software Foundation may publish revised and/or new versions of 565 | the GNU Affero General Public License from time to time. Such new versions 566 | will be similar in spirit to the present version, but may differ in detail to 567 | address new problems or concerns. 568 | 569 | Each version is given a distinguishing version number. If the 570 | Program specifies that a certain numbered version of the GNU Affero General 571 | Public License "or any later version" applies to it, you have the 572 | option of following the terms and conditions either of that numbered 573 | version or of any later version published by the Free Software 574 | Foundation. If the Program does not specify a version number of the 575 | GNU Affero General Public License, you may choose any version ever published 576 | by the Free Software Foundation. 577 | 578 | If the Program specifies that a proxy can decide which future 579 | versions of the GNU Affero General Public License can be used, that proxy's 580 | public statement of acceptance of a version permanently authorizes you 581 | to choose that version for the Program. 582 | 583 | Later license versions may give you additional or different 584 | permissions. However, no additional obligations are imposed on any 585 | author or copyright holder as a result of your choosing to follow a 586 | later version. 587 | 588 | 15. Disclaimer of Warranty. 589 | 590 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 591 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 592 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 593 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 594 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 595 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 596 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 597 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 598 | 599 | 16. Limitation of Liability. 600 | 601 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 602 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 603 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 604 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 605 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 606 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 607 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 608 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 609 | SUCH DAMAGES. 610 | 611 | 17. Interpretation of Sections 15 and 16. 612 | 613 | If the disclaimer of warranty and limitation of liability provided 614 | above cannot be given local legal effect according to their terms, 615 | reviewing courts shall apply local law that most closely approximates 616 | an absolute waiver of all civil liability in connection with the 617 | Program, unless a warranty or assumption of liability accompanies a 618 | copy of the Program in return for a fee. 619 | 620 | END OF TERMS AND CONDITIONS 621 | 622 | How to Apply These Terms to Your New Programs 623 | 624 | If you develop a new program, and you want it to be of the greatest 625 | possible use to the public, the best way to achieve this is to make it 626 | free software which everyone can redistribute and change under these terms. 627 | 628 | To do so, attach the following notices to the program. It is safest 629 | to attach them to the start of each source file to most effectively 630 | state the exclusion of warranty; and each file should have at least 631 | the "copyright" line and a pointer to where the full notice is found. 632 | 633 | 634 | Copyright (C) 635 | 636 | This program is free software: you can redistribute it and/or modify 637 | it under the terms of the GNU Affero General Public License as published by 638 | the Free Software Foundation, either version 3 of the License, or 639 | (at your option) any later version. 640 | 641 | This program is distributed in the hope that it will be useful, 642 | but WITHOUT ANY WARRANTY; without even the implied warranty of 643 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 644 | GNU Affero General Public License for more details. 645 | 646 | You should have received a copy of the GNU Affero General Public License 647 | along with this program. If not, see . 648 | 649 | Also add information on how to contact you by electronic and paper mail. 650 | 651 | If your software can interact with users remotely through a computer 652 | network, you should also make sure that it provides a way for users to 653 | get its source. For example, if your program is a web application, its 654 | interface could display a "Source" link that leads users to an archive 655 | of the code. There are many ways you could offer source, and different 656 | solutions will be better for different programs; see section 13 for the 657 | specific requirements. 658 | 659 | You should also get your employer (if you work as a programmer) or school, 660 | if any, to sign a "copyright disclaimer" for the program, if necessary. 661 | For more information on this, and how to apply and follow the GNU AGPL, see 662 | . 663 | --------------------------------------------------------------------------------