├── doc ├── requirements.txt ├── modules.rst ├── todoist.rst ├── models.rst ├── index.rst ├── managers.rst ├── Makefile ├── make.bat └── conf.py ├── todoist ├── managers │ ├── __init__.py │ ├── backups.py │ ├── activity.py │ ├── quick.py │ ├── user_settings.py │ ├── locations.py │ ├── completed.py │ ├── collaborator_states.py │ ├── collaborators.py │ ├── emails.py │ ├── business_users.py │ ├── templates.py │ ├── uploads.py │ ├── biz_invitations.py │ ├── invitations.py │ ├── live_notifications.py │ ├── generic.py │ ├── reminders.py │ ├── labels.py │ ├── filters.py │ ├── user.py │ ├── notes.py │ ├── sections.py │ ├── projects.py │ ├── archive.py │ └── items.py ├── __init__.py ├── models.py └── api.py ├── setup.cfg ├── pytest.ini.sample ├── tox.ini ├── .gitignore ├── MANIFEST.in ├── .isort.cfg ├── mypy.ini ├── .pre-commit-config.yaml ├── setup.py ├── LICENSE ├── tests ├── conftest.py └── test_api.py ├── README.md └── CHANGELOG.md /doc/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | sphinx_rtd_theme 3 | -------------------------------------------------------------------------------- /todoist/managers/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | -------------------------------------------------------------------------------- /todoist/__init__.py: -------------------------------------------------------------------------------- 1 | from .api import TodoistAPI # noqa: F401 2 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal = 1 3 | 4 | [flake8] 5 | max-line-length = 88 6 | -------------------------------------------------------------------------------- /pytest.ini.sample: -------------------------------------------------------------------------------- 1 | [pytest] 2 | token = test-api-token 3 | token2 = another-test-api-token 4 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py27,py37 3 | [testenv] 4 | deps = pytest 5 | commands = pytest {posargs} 6 | -------------------------------------------------------------------------------- /doc/modules.rst: -------------------------------------------------------------------------------- 1 | todoist 2 | ======= 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | 7 | todoist 8 | models 9 | managers 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.tox 2 | /.cache 3 | /doc/_build 4 | /pytest.ini 5 | /build 6 | /dist 7 | env 8 | 9 | .*.swp 10 | *.pyc 11 | *.egg-info 12 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | recursive-include doc * 3 | recursive-exclude doc *.pyc 4 | recursive-exclude doc *.pyo 5 | prune doc/_build 6 | -------------------------------------------------------------------------------- /doc/todoist.rst: -------------------------------------------------------------------------------- 1 | todoist.api 2 | ----------- 3 | 4 | .. automodule:: todoist.api 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /doc/models.rst: -------------------------------------------------------------------------------- 1 | todoist.models 2 | -------------- 3 | 4 | .. automodule:: todoist.models 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | known_third_party=pytest,requests,setuptools 3 | multi_line_output=3 4 | include_trailing_comma=True 5 | force_grid_wrap=0 6 | use_parentheses=True 7 | line_length=88 8 | -------------------------------------------------------------------------------- /todoist/managers/backups.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .generic import Manager 3 | 4 | 5 | class BackupsManager(Manager): 6 | def get(self): 7 | """ 8 | Get backups. 9 | """ 10 | params = {"token": self.token} 11 | return self.api._get("backups/get", params=params) 12 | -------------------------------------------------------------------------------- /todoist/managers/activity.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .generic import Manager 3 | 4 | 5 | class ActivityManager(Manager): 6 | def get(self, **kwargs): 7 | """ 8 | Get events from the activity log. 9 | """ 10 | params = {"token": self.token} 11 | params.update(kwargs) 12 | return self.api._get("activity/get", params=params) 13 | -------------------------------------------------------------------------------- /todoist/managers/quick.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .generic import Manager 3 | 4 | 5 | class QuickManager(Manager): 6 | def add(self, text, **kwargs): 7 | """ 8 | Quick add task implementation. 9 | """ 10 | params = {"token": self.token, "text": text} 11 | params.update(kwargs) 12 | return self.api._get("quick/add", params=params) 13 | -------------------------------------------------------------------------------- /todoist/managers/user_settings.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .generic import Manager 3 | 4 | 5 | class UserSettingsManager(Manager): 6 | def update(self, **kwargs): 7 | """ 8 | Updates the user's settings. 9 | """ 10 | cmd = { 11 | "type": "user_settings_update", 12 | "uuid": self.api.generate_uuid(), 13 | "args": kwargs, 14 | } 15 | self.queue.append(cmd) 16 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | python_version = 2.7 3 | follow_imports = silent 4 | scripts_are_modules = true 5 | 6 | # We had to ignore missing imports, because of third-party libraries installed 7 | # inside the virtualenv, and apparently there's no easy way for mypy to respect 8 | # packages inside the virtualenv. That's the option pre-commit-config runs with 9 | # by default, but we add it here as well for the sake of uniformity of the 10 | # output 11 | ignore_missing_imports = true 12 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | .. todoist-python documentation master file, created by 2 | sphinx-quickstart on Tue Jul 22 11:37:51 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Overview 7 | ======== 8 | 9 | The official Todoist Python API library. 10 | 11 | Modules 12 | ======= 13 | 14 | .. toctree:: 15 | :maxdepth: 2 16 | 17 | modules 18 | 19 | Indices and tables 20 | ================== 21 | 22 | * :ref:`genindex` 23 | * :ref:`modindex` 24 | 25 | -------------------------------------------------------------------------------- /todoist/managers/locations.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .generic import AllMixin, Manager, SyncMixin 3 | 4 | 5 | class LocationsManager(Manager, AllMixin, SyncMixin): 6 | 7 | state_name = "locations" 8 | object_type = None # there is no local state associated 9 | 10 | def clear(self): 11 | """ 12 | Clears the locations. 13 | """ 14 | cmd = { 15 | "type": "clear_locations", 16 | "uuid": self.api.generate_uuid(), 17 | "args": {}, 18 | } 19 | self.queue.append(cmd) 20 | -------------------------------------------------------------------------------- /todoist/managers/completed.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .generic import Manager 3 | 4 | 5 | class CompletedManager(Manager): 6 | def get_stats(self): 7 | """ 8 | Returns the user's recent productivity stats. 9 | """ 10 | return self.api._get("completed/get_stats", params={"token": self.token}) 11 | 12 | def get_all(self, **kwargs): 13 | """ 14 | Returns all user's completed items. 15 | """ 16 | params = {"token": self.token} 17 | params.update(kwargs) 18 | return self.api._get("completed/get_all", params=params) 19 | -------------------------------------------------------------------------------- /todoist/managers/collaborator_states.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .generic import Manager, SyncMixin 3 | 4 | 5 | class CollaboratorStatesManager(Manager, SyncMixin): 6 | 7 | state_name = "collaborator_states" 8 | object_type = None # there is no object type associated 9 | 10 | def get_by_ids(self, project_id, user_id): 11 | """ 12 | Finds and returns the collaborator state based on the project and user 13 | ids. 14 | """ 15 | for obj in self.state[self.state_name]: 16 | if obj["project_id"] == project_id and obj["user_id"] == user_id: 17 | return obj 18 | return None 19 | -------------------------------------------------------------------------------- /todoist/managers/collaborators.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .generic import GetByIdMixin, Manager, SyncMixin 3 | 4 | 5 | class CollaboratorsManager(Manager, GetByIdMixin, SyncMixin): 6 | 7 | state_name = "collaborators" 8 | object_type = None # there is no object type associated 9 | 10 | def delete(self, project_id, email): 11 | """ 12 | Deletes a collaborator from a shared project. 13 | """ 14 | cmd = { 15 | "type": "delete_collaborator", 16 | "uuid": self.api.generate_uuid(), 17 | "args": {"project_id": project_id, "email": email}, 18 | } 19 | self.queue.append(cmd) 20 | -------------------------------------------------------------------------------- /todoist/managers/emails.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .generic import Manager 3 | 4 | 5 | class EmailsManager(Manager): 6 | def get_or_create(self, obj_type, obj_id, **kwargs): 7 | """ 8 | Get or create email to an object. 9 | """ 10 | params = {"token": self.token, "obj_type": obj_type, "obj_id": obj_id} 11 | params.update(kwargs) 12 | return self.api._get("emails/get_or_create", params=params) 13 | 14 | def disable(self, obj_type, obj_id, **kwargs): 15 | """ 16 | Disable email to an object. 17 | """ 18 | params = {"token": self.token, "obj_type": obj_type, "obj_id": obj_id} 19 | params.update(kwargs) 20 | return self.api._get("emails/disable", params=params) 21 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v2.3.0 5 | hooks: 6 | - id: flake8 7 | - id: trailing-whitespace 8 | - id: end-of-file-fixer 9 | - id: mixed-line-ending 10 | - id: check-merge-conflict 11 | - id: check-case-conflict 12 | - id: debug-statements 13 | 14 | - repo: https://github.com/python/black 15 | rev: stable 16 | hooks: 17 | - id: black 18 | language_version: python3.7 19 | 20 | - repo: https://github.com/asottile/seed-isort-config 21 | rev: v1.9.2 22 | hooks: 23 | - id: seed-isort-config 24 | 25 | - repo: https://github.com/pre-commit/mirrors-mypy 26 | rev: v0.750 27 | hooks: 28 | - id: mypy 29 | 30 | - repo: https://github.com/pre-commit/mirrors-isort 31 | rev: v4.3.21 32 | hooks: 33 | - id: isort 34 | -------------------------------------------------------------------------------- /todoist/managers/business_users.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import json 3 | 4 | from .generic import Manager 5 | 6 | 7 | class BusinessUsersManager(Manager): 8 | def invite(self, email_list): 9 | """ 10 | Send a business user invitation. 11 | """ 12 | params = {"token": self.token, "email_list": json.dumps(email_list)} 13 | return self.api._get("business/users/invite", params=params) 14 | 15 | def accept_invitation(self, id, secret): 16 | """ 17 | Accept a business user invitation. 18 | """ 19 | params = {"token": self.token, "id": id, "secret": secret} 20 | return self.api._get("business/users/accept_invitation", params=params) 21 | 22 | def reject_invitation(self, id, secret): 23 | """ 24 | Reject a business user invitation. 25 | """ 26 | params = {"token": self.token, "id": id, "secret": secret} 27 | return self.api._get("business/users/reject_invitation", params=params) 28 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | 4 | from setuptools import setup 5 | 6 | 7 | def read(fname): 8 | try: 9 | return open(os.path.join(os.path.dirname(__file__), fname)).read() 10 | except Exception: 11 | return "" 12 | 13 | 14 | setup( 15 | name="todoist-python", 16 | version="8.1.3", 17 | packages=["todoist", "todoist.managers"], 18 | author="Doist Team", 19 | author_email="integrations@todoist.com", 20 | license="MIT", 21 | description="todoist-python - The official Todoist Python API library", 22 | long_description=read("README.md"), 23 | install_requires=[ 24 | "requests", 25 | "typing;python_version<'3.5'", 26 | ], 27 | # see here for complete list of classifiers 28 | # http://pypi.python.org/pypi?%3Aaction=list_classifiers 29 | classifiers=( 30 | "Intended Audience :: Developers", 31 | "License :: OSI Approved :: MIT License" 32 | "Programming Language :: Python", 33 | ), 34 | ) 35 | -------------------------------------------------------------------------------- /todoist/managers/templates.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .generic import Manager 3 | 4 | 5 | class TemplatesManager(Manager): 6 | def import_into_project(self, project_id, filename, **kwargs): 7 | """ 8 | Imports a template into a project. 9 | """ 10 | data = {"token": self.token, "project_id": project_id} 11 | data.update(kwargs) 12 | files = {"file": open(filename, "r")} 13 | return self.api._post("templates/import_into_project", data=data, files=files) 14 | 15 | def export_as_file(self, project_id, **kwargs): 16 | """ 17 | Exports a template as a file. 18 | """ 19 | data = {"token": self.token, "project_id": project_id} 20 | data.update(kwargs) 21 | return self.api._post("templates/export_as_file", data=data) 22 | 23 | def export_as_url(self, project_id, **kwargs): 24 | """ 25 | Exports a template as a URL. 26 | """ 27 | data = {"token": self.token, "project_id": project_id} 28 | data.update(kwargs) 29 | return self.api._post("templates/export_as_url", data=data) 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2017 Doist 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /todoist/managers/uploads.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .generic import Manager 3 | 4 | 5 | class UploadsManager(Manager): 6 | def add(self, filename, **kwargs): 7 | """ 8 | Uploads a file. 9 | 10 | param filename: (str) name of file to upload 11 | """ 12 | data = {"token": self.token} 13 | data.update(kwargs) 14 | files = {"file": open(filename, "rb")} 15 | return self.api._post("uploads/add", data=data, files=files) 16 | 17 | def get(self, **kwargs): 18 | """ 19 | Returns all user's uploads. 20 | 21 | kwargs: 22 | limit: (int, optional) number of results (1-50) 23 | last_id: (int, optional) return results with id>> import todoist 38 | >>> api = todoist.TodoistAPI('0123456789abcdef0123456789abcdef01234567') 39 | >>> api.sync() 40 | >>> full_name = api.state['user']['full_name'] 41 | >>> print(full_name) 42 | John Doe 43 | >>> for project in api.state['projects']: 44 | ... print(project['name']) 45 | ... 46 | Personal 47 | Shopping 48 | Work 49 | Errands 50 | Movies to watch 51 | ``` 52 | 53 | The `state` attribute has all the data of your full sync and the `sync` method 54 | does the job of keeping things in sync in the best way possible. 55 | 56 | You can add or change data as well. Let's add a task and change it as examples: 57 | 58 | 59 | ```python 60 | $ python 61 | >>> import todoist 62 | >>> api = todoist.TodoistAPI('0123456789abcdef0123456789abcdef01234567') 63 | >>> item = api.items.add('My taks') # oh no, typo! 64 | >>> api.commit() # commit the changes to the server 65 | {'id': 1234567890, u'content': u'My taks', u'user_id': 1, ...} 66 | >>> 67 | >>> api.items.update(item['id'], content='My task') 68 | >>> api.commit() # never forget to commit! 69 | {'id': 1234567890, u'content': u'My task', u'user_id': 1, ...} 70 | ``` 71 | 72 | That's it! To know what actions are available for each object, refer to 73 | `Managers` in our [official documentation](https://todoist-python.readthedocs.io). 74 | 75 | We also document all the actions available on this library along with our 76 | official API documentation. Here is one example of the [add task 77 | endpoint](https://developer.todoist.com/sync/v8/?python#add-an-item). Check the 78 | _python_ tab on the examples for actions related to this library. 79 | 80 | 81 | ## Development 82 | 83 | ### Build Project 84 | 85 | This project still supports Python 2.7 but **we recommend Python 3**. 86 | 87 | We recommend using [virtualenv](https://pypi.python.org/pypi/virtualenv) and 88 | [pip](https://pypi.python.org/pypi/pip) for the project bootstrap. Below is a 89 | step by step of the bootstrap process: 90 | 91 | Clone the repo and enter it: 92 | 93 | $ git clone git@github.com:Doist/todoist-python.git ; cd todoist-python 94 | 95 | Create an environment: 96 | 97 | $ virtualenv --system-site-packages -p /usr/bin/python2.7 env # if you need python2.7 98 | $ virtualenv --system-site-packages env # use only python3 if you run this 99 | 100 | Activate the environment: 101 | 102 | $ source env/bin/activate 103 | 104 | Install the library along with all the dependencies (just `requests` for this project): 105 | 106 | $ pip install -e . 107 | 108 | 109 | ### Build Documentation 110 | 111 | If you want to build the documentation as well, install some extra packages: 112 | 113 | $ pip install -r doc/requirements.txt 114 | 115 | Build the documentation: 116 | 117 | $ (cd doc ; make html) 118 | 119 | Read the built documentation by opening: 120 | 121 | doc/_build/html/index.html 122 | 123 | ## Testing the library 124 | 125 | We have a set of tests to ensure we support Python both 2.x and 3.x. 126 | 127 | To test it out, please make sure you have python 2 and python 3 installed in 128 | your system. Then install "tox" either globally (preferred way) or in your local 129 | environment. 130 | 131 | # apt-get install python-tox 132 | 133 | or 134 | 135 | $ pip install tox 136 | 137 | 138 | You will also need to have the `pytest.ini` file. We are providing a 139 | `pytest.ini.sample` that you can copy and paste to create your own 140 | `pytest.ini`. 141 | 142 | You will need two different tokens (`token` and `token2` keys on on 143 | `pytest.ini`) to be able to run all the tests successfully, since we have tests 144 | for the sharing features. 145 | 146 | With the setup done, you can just run: 147 | 148 | $ tox 149 | 150 | Keep in mind that running the whole test suit may cause some tests to fail as 151 | you will certaily hit some limits of API usage. We recommend only running the 152 | test for your feature. 153 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [Unreleased] 4 | 5 | * Add support for items and sections archive manager. 6 | * The `items.move()` method now supports `section_id` to move an item to a 7 | different section 8 | 9 | ## [8.1.1] - 2019-10-29 10 | - Add `__contains__()` to `Model`. 11 | 12 | ## [8.1.0] - 2019-10-11 13 | - Add support for sections. 14 | 15 | ## [8.0.2] - 2019-10-07 16 | - Fix the parameters of `update_date_complete()`. 17 | 18 | ## [8.0.1] - 2019-10-07 19 | - Fix the default API endpoint. 20 | 21 | ## [8.0] - 2019-04-18 22 | 23 | * All arguments expecting a date/time must be formatted according to [RFC 24 | 3339](https://tools.ietf.org/html/rfc3339), and all return values are also 25 | using the same format. 26 | * The `item_order` and `indent` properties of projects, that denoted a visual 27 | hierarchy for the projects (the order of all the projects and the level of 28 | indent of each one of them), were replaced by `parent_id` and `child_order`, 29 | which denote a real hierarchy (the parent project of a project and the order 30 | of all children of a specific parent project). 31 | * The `projects.add()` method now expects a `parent_id` and `child_order` 32 | parameter, instead of the `item_order` and `indent` parameters. 33 | * The `projects.update()` method doesn't expect an `item_order` and `indent` 34 | parameters anymore, but it doesn't accept the new `parent_id` and 35 | `child_order` parameters as well, as the way to change the hierarchy is now 36 | different (see the `projects.move()` and `projects.reorder()` methods). 37 | * The new `projects.move()` method must be used to move a project to become 38 | the child of another project or become a root project. 39 | * The new `projects.reorder()` method must be used to reorder projects in 40 | relation to their siblings with the same parent. 41 | * The `projects.delete()` method now expects only an `id` parameter, instead 42 | of the `ids` parameter, and it deletes the project and all the projects's 43 | descendants. 44 | * The `projects.archive()` method now expects the `id` parameter, instead of 45 | the `ids` parameter, and it archives the project and all the project's 46 | descendants. 47 | * The `projects.uncomplete()` method now expects an `id` parameter, instead 48 | of the `ids` parameter, and it restores the project as a root project. 49 | * The `projects.update_orders_indents()` method was removed. 50 | * The `date_string`, `date_lang`, `due_date_utc` properties of items were 51 | replaced by the `due` object. 52 | * The `item_order` and `indent` properties of items, that denoted a visual 53 | hierarchy for the items (the order of all the items and the level of indent 54 | of each one of them), were replaced by `parent_id` and `child_order`, which 55 | denote a real hierarchy (the parent item of an item and the order of all 56 | children of a specific parent item). 57 | * The `items.add()` method now expects a `parent_id` and `child_order` 58 | parameter, instead of the `item_order` and `indent` parameters. 59 | * The `items.add()` and `items.update()` methods now expect a `due` parameter, 60 | instead of the `date_string`, `date_lang` and/or `due_date_utc` parameters. 61 | * The `items.update()` method doesn't expect an `item_order` and `indent` 62 | parameters anymore, but it doesn't accept the new `parent_id` and 63 | `child_order` parameters as well, as the way to change the hierarchy is now 64 | different (see `item_move` and `item_reorder`). 65 | * The `items.move()` method does not accept the `project_items` and 66 | `to_project` parameters, but a new set of parameters specifically `id`, and 67 | one of `project_id` or `parent_id`. Another difference stemming from this is 68 | that only a single item can be moved at a time, and also that in order to 69 | move an item to become the child of another parent (or become a root level 70 | item) the `item_move` command must be used as well. 71 | * The `items.update_orders_indents()` method was removed. 72 | * The new `items.reorder()` method must be used to reorder items in relation 73 | to their siblings with the same parent. 74 | * The `items.delete` method now expects only an `id` parameter, instead of 75 | the `ids` parameter, and it deletes the item and all the item's descendants. 76 | * The `items.complete()` method now expects the `id` parameter, instead of 77 | the `ids` parameter, and it completes the item and all the item's 78 | descendants. In addition the new `date_completed` parameter can also be 79 | specified. 80 | * The `items.uncomplete()` method now expects an `id` parameter, instead of 81 | the `ids` parameter, and it uncompletes all the item's ancestors. 82 | * The new `items.archive()` method can be used to move an item to history. 83 | * The new `items.unarchive()` method can be used to move an item out of 84 | history. 85 | * The `items.update_date_complete()` method now expects a `due` parameter, 86 | instead of `new_date_utc`, `date_string` and/or `is_forward` parameters. 87 | * The possible color values of filters changed from `0-12` to `30-49`. 88 | * The `date_string`, `date_lang`, `due_date_utc` properties of reminders were 89 | replaced by the `due` object. 90 | * The `reminders.add()` and `reminders.update()` methods now expect a `due` 91 | parameter, instead of the `date_string`, `date_lang` and/or `due_date_utc` 92 | parameters. 93 | * The state now includes an additional new resource type called 94 | `user_settings`. 95 | * The user object now includes the `days_off` property. 96 | * The `since` and `until` parameters of the `activity/get` method are 97 | deprecated, and are replaced by the new `page` parameter. 98 | -------------------------------------------------------------------------------- /todoist/managers/archive.py: -------------------------------------------------------------------------------- 1 | """ 2 | Managers to get the list of archived items and sections. 3 | 4 | Manager makers available as "items_archive" and "sections_archive" attributes of 5 | API object. 6 | 7 | 8 | Usage example (for items). 9 | 10 | ```python 11 | 12 | # Create an API object 13 | import todoist 14 | api = todoist.TodoistAPI(...) 15 | 16 | # Get project ID (take inbox) 17 | project_id = api.user.get()['inbox_project'] 18 | 19 | # Initiate ItemsArchiveManager 20 | archive = api.items_archive.for_project(project_id) 21 | 22 | # Iterate over the list of completed items for the archive 23 | for item in archive.items(): 24 | print(item["date_completed"], item["content"]) 25 | ``` 26 | """ 27 | from typing import TYPE_CHECKING, Dict, Iterator, Optional 28 | 29 | from ..models import Item, Model, Section 30 | 31 | if TYPE_CHECKING: 32 | from ..api import TodoistAPI 33 | 34 | 35 | class ArchiveManager(object): 36 | 37 | object_model = Model 38 | 39 | def __init__(self, api, element_type): 40 | # type: (TodoistAPI, str) -> None 41 | assert element_type in {"sections", "items"} 42 | self.api = api 43 | self.element_type = element_type 44 | 45 | def next_page(self, cursor): 46 | # type: (Optional[str]) -> Dict 47 | """Return response for the next page of the archive.""" 48 | resp = self.api.session.get( 49 | self._next_url(), 50 | params=self._next_query_params(cursor), 51 | headers=self._request_headers(), 52 | ) 53 | resp.raise_for_status() 54 | return resp.json() 55 | 56 | def _next_url(self): 57 | return "{0}/sync/{1}/archive/{2}".format( 58 | self.api.api_endpoint, self.api.api_version, self.element_type 59 | ) 60 | 61 | def _next_query_params(self, cursor): 62 | # type: (Optional[str]) -> Dict 63 | ret = {} 64 | if cursor: 65 | ret["cursor"] = cursor 66 | return ret 67 | 68 | def _request_headers(self): 69 | return {"Authorization": "Bearer {}".format(self.api.token)} 70 | 71 | def _iterate(self): 72 | has_more = True 73 | cursor = None 74 | 75 | while True: 76 | if not has_more: 77 | break 78 | 79 | resp = self.next_page(cursor) 80 | 81 | elements = [self._make_element(data) for data in resp[self.element_type]] 82 | has_more = resp["has_more"] 83 | cursor = resp.get("next_cursor") 84 | for el in elements: 85 | yield el 86 | 87 | def _make_element(self, data): 88 | return self.object_model(data, self.api) 89 | 90 | 91 | class SectionsArchiveManagerMaker(object): 92 | def __init__(self, api): 93 | self.api = api 94 | 95 | def __repr__(self): 96 | return "{}()".format(self.__class__.__name__) 97 | 98 | def for_project(self, project_id): 99 | """Get manager to iterate over all archived sections for project.""" 100 | return SectionsArchiveManager(api=self.api, project_id=project_id) 101 | 102 | 103 | class SectionsArchiveManager(ArchiveManager): 104 | 105 | object_model = Section 106 | 107 | def __init__(self, api, project_id): 108 | super(SectionsArchiveManager, self).__init__(api, "sections") 109 | self.project_id = project_id 110 | 111 | def __repr__(self): 112 | return "SectionsArchiveManager(project_id={})".format(self.project_id) 113 | 114 | def sections(self): 115 | # type: () -> Iterator[Section] 116 | """Iterate over all archived sections.""" 117 | for obj in self._iterate(): 118 | yield obj 119 | 120 | def _next_query_params(self, cursor): 121 | ret = super(SectionsArchiveManager, self)._next_query_params(cursor) 122 | ret["project_id"] = self.project_id 123 | return ret 124 | 125 | 126 | class ItemsArchiveManagerMaker(object): 127 | def __init__(self, api): 128 | self.api = api 129 | 130 | def __repr__(self): 131 | return "{}()".format(self.__class__.__name__) 132 | 133 | def for_project(self, project_id): 134 | """Get manager to iterate over all top-level archived items for project.""" 135 | return ItemsArchiveManager(api=self.api, project_id=project_id) 136 | 137 | def for_section(self, section_id): 138 | """Get manager to iterate over all top-level archived items for section.""" 139 | return ItemsArchiveManager(api=self.api, section_id=section_id) 140 | 141 | def for_parent(self, parent_id): 142 | """Get manager to iterate over all archived sub-tasks for an item.""" 143 | return ItemsArchiveManager(api=self.api, parent_id=parent_id) 144 | 145 | 146 | class ItemsArchiveManager(ArchiveManager): 147 | 148 | object_model = Item 149 | 150 | def __init__(self, api, project_id=None, section_id=None, parent_id=None): 151 | super(ItemsArchiveManager, self).__init__(api, "items") 152 | assert sum([bool(project_id), bool(section_id), bool(parent_id)]) == 1 153 | self.project_id = project_id 154 | self.section_id = section_id 155 | self.parent_id = parent_id 156 | 157 | def __repr__(self): 158 | k, v = self._key_value() 159 | return "ItemsArchiveManager({}={})".format(k, v) 160 | 161 | def items(self): 162 | # type: () -> Iterator[Item] 163 | """Iterate over all archived items.""" 164 | for obj in self._iterate(): 165 | yield obj 166 | 167 | def _next_query_params(self, cursor): 168 | ret = super(ItemsArchiveManager, self)._next_query_params(cursor) 169 | k, v = self._key_value() 170 | ret[k] = v 171 | return ret 172 | 173 | def _key_value(self): 174 | if self.project_id: 175 | return "project_id", self.project_id 176 | elif self.section_id: 177 | return "section_id", self.section_id 178 | else: # if self.parent_id: 179 | return "parent_id", self.parent_id 180 | -------------------------------------------------------------------------------- /todoist/managers/items.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .. import models 3 | from .generic import AllMixin, GetByIdMixin, Manager, SyncMixin 4 | 5 | 6 | class ItemsManager(Manager, AllMixin, GetByIdMixin, SyncMixin): 7 | 8 | state_name = "items" 9 | object_type = "item" 10 | 11 | def add(self, content, **kwargs): 12 | """ 13 | Creates a local item object. 14 | """ 15 | project_id = kwargs.get("project_id") 16 | if not project_id: 17 | project_id = self.state["user"]["inbox_project"] 18 | obj = models.Item({"content": content, "project_id": project_id}, self.api) 19 | obj.temp_id = obj["id"] = self.api.generate_uuid() 20 | obj.data.update(kwargs) 21 | self.state[self.state_name].append(obj) 22 | cmd = { 23 | "type": "item_add", 24 | "temp_id": obj.temp_id, 25 | "uuid": self.api.generate_uuid(), 26 | "args": {key: obj.data[key] for key in obj.data if key != "id"}, 27 | } 28 | self.queue.append(cmd) 29 | return obj 30 | 31 | def update(self, item_id, **kwargs): 32 | """ 33 | Updates an item remotely. 34 | """ 35 | args = {"id": item_id} 36 | args.update(kwargs) 37 | cmd = { 38 | "type": "item_update", 39 | "uuid": self.api.generate_uuid(), 40 | "args": args, 41 | } 42 | self.queue.append(cmd) 43 | 44 | def delete(self, item_id): 45 | """ 46 | Deletes an item remotely. 47 | """ 48 | cmd = { 49 | "type": "item_delete", 50 | "uuid": self.api.generate_uuid(), 51 | "args": {"id": item_id}, 52 | } 53 | self.queue.append(cmd) 54 | 55 | def move(self, item_id, **kwargs): 56 | """ 57 | Moves item to another parent, project, or section remotely. 58 | """ 59 | args = { 60 | "id": item_id, 61 | } 62 | if "parent_id" in kwargs: 63 | args["parent_id"] = kwargs.get("parent_id") 64 | elif "project_id" in kwargs: 65 | args["project_id"] = kwargs.get("project_id") 66 | elif "section_id" in kwargs: 67 | args["section_id"] = kwargs.get("section_id") 68 | else: 69 | raise TypeError("move() takes one of parent_id, project_id, or section_id arguments") 70 | cmd = {"type": "item_move", "uuid": self.api.generate_uuid(), "args": args} 71 | self.queue.append(cmd) 72 | 73 | def close(self, item_id): 74 | """ 75 | Marks item as done 76 | """ 77 | cmd = { 78 | "type": "item_close", 79 | "uuid": self.api.generate_uuid(), 80 | "args": {"id": item_id}, 81 | } 82 | self.queue.append(cmd) 83 | 84 | def complete(self, item_id, date_completed=None, force_history=None): 85 | """ 86 | Marks item as completed remotely. 87 | """ 88 | args = { 89 | "id": item_id, 90 | } 91 | if date_completed is not None: 92 | args["date_completed"] = date_completed 93 | if force_history is not None: 94 | args["force_history"] = force_history 95 | cmd = { 96 | "type": "item_complete", 97 | "uuid": self.api.generate_uuid(), 98 | "args": args, 99 | } 100 | self.queue.append(cmd) 101 | 102 | def uncomplete(self, item_id): 103 | """ 104 | Marks item as uncompleted remotely. 105 | """ 106 | cmd = { 107 | "type": "item_uncomplete", 108 | "uuid": self.api.generate_uuid(), 109 | "args": {"id": item_id}, 110 | } 111 | self.queue.append(cmd) 112 | 113 | def archive(self, item_id): 114 | """ 115 | Marks item as archived remotely. 116 | """ 117 | cmd = { 118 | "type": "item_archive", 119 | "uuid": self.api.generate_uuid(), 120 | "args": {"id": item_id}, 121 | } 122 | self.queue.append(cmd) 123 | 124 | def unarchive(self, item_id): 125 | """ 126 | Marks item as unarchived remotely. 127 | """ 128 | cmd = { 129 | "type": "item_unarchive", 130 | "uuid": self.api.generate_uuid(), 131 | "args": {"id": item_id}, 132 | } 133 | self.queue.append(cmd) 134 | 135 | def update_date_complete(self, item_id, due=None): 136 | """ 137 | Completes a recurring task remotely. 138 | """ 139 | args = { 140 | "id": item_id, 141 | } 142 | if due: 143 | args["due"] = due 144 | cmd = { 145 | "type": "item_update_date_complete", 146 | "uuid": self.api.generate_uuid(), 147 | "args": args, 148 | } 149 | self.queue.append(cmd) 150 | 151 | def reorder(self, items): 152 | """ 153 | Updates the child_order of the specified items. 154 | """ 155 | cmd = { 156 | "type": "item_reorder", 157 | "uuid": self.api.generate_uuid(), 158 | "args": {"items": items}, 159 | } 160 | self.queue.append(cmd) 161 | 162 | def update_day_orders(self, ids_to_orders): 163 | """ 164 | Updates in the local state the day orders of multiple items remotely. 165 | """ 166 | cmd = { 167 | "type": "item_update_day_orders", 168 | "uuid": self.api.generate_uuid(), 169 | "args": {"ids_to_orders": ids_to_orders}, 170 | } 171 | self.queue.append(cmd) 172 | 173 | def get_completed(self, project_id, **kwargs): 174 | """ 175 | Returns a project's completed items. 176 | """ 177 | params = {"token": self.token, "project_id": project_id} 178 | params.update(kwargs) 179 | return self.api._get("items/get_completed", params=params) 180 | 181 | def get(self, item_id): 182 | """ 183 | Gets an existing item. 184 | """ 185 | params = {"token": self.token, "item_id": item_id} 186 | obj = self.api._get("items/get", params=params) 187 | if obj and "error" in obj: 188 | return None 189 | 190 | data = {"projects": [], "items": [], "notes": []} 191 | if obj.get("project"): 192 | data["projects"].append(obj.get("project")) 193 | if obj.get("item"): 194 | data["items"].append(obj.get("item")) 195 | if obj.get("notes"): 196 | data["notes"] += obj.get("notes") 197 | self.api._update_state(data) 198 | return obj 199 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/todoist-python.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/todoist-python.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/todoist-python" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/todoist-python" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\todoist-python.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\todoist-python.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /todoist/models.py: -------------------------------------------------------------------------------- 1 | from pprint import pformat 2 | 3 | 4 | class Model(object): 5 | """ 6 | Implements a generic object. 7 | """ 8 | 9 | def __init__(self, data, api): 10 | self.temp_id = "" 11 | self.data = data 12 | self.api = api 13 | 14 | def __setitem__(self, key, value): 15 | self.data[key] = value 16 | 17 | def __getitem__(self, key): 18 | return self.data[key] 19 | 20 | def __repr__(self): 21 | formatted_dict = pformat(dict(self.data)) 22 | classname = self.__class__.__name__ 23 | return "%s(%s)" % (classname, formatted_dict) 24 | 25 | def __contains__(self, value): 26 | return value in self.data 27 | 28 | 29 | class Collaborator(Model): 30 | """ 31 | Implements a collaborator. 32 | """ 33 | 34 | def delete(self, project_id): 35 | """ 36 | Deletes a collaborator from a shared project. 37 | """ 38 | self.api.collaborators.delete(project_id, self["email"]) 39 | 40 | 41 | class CollaboratorState(Model): 42 | """ 43 | Implements a collaborator state. 44 | """ 45 | 46 | pass 47 | 48 | 49 | class Filter(Model): 50 | """ 51 | Implements a filter. 52 | """ 53 | 54 | def update(self, **kwargs): 55 | """ 56 | Updates filter. 57 | """ 58 | self.api.filters.update(self["id"], **kwargs) 59 | self.data.update(kwargs) 60 | 61 | def delete(self): 62 | """ 63 | Deletes filter. 64 | """ 65 | self.api.filters.delete(self["id"]) 66 | self.data["is_deleted"] = 1 67 | 68 | 69 | class Item(Model): 70 | """ 71 | Implements an item. 72 | """ 73 | 74 | def update(self, **kwargs): 75 | """ 76 | Updates item. 77 | """ 78 | self.api.items.update(self["id"], **kwargs) 79 | self.data.update(kwargs) 80 | 81 | def delete(self): 82 | """ 83 | Deletes item. 84 | """ 85 | self.api.items.delete(self["id"]) 86 | self.data["is_deleted"] = 1 87 | 88 | def move(self, **kwargs): 89 | """ 90 | Moves item to another parent, project, or section. 91 | """ 92 | if "parent_id" in kwargs: 93 | self.api.items.move(self["id"], parent_id=kwargs.get("parent_id")) 94 | self.data["parent_id"] = kwargs.get("parent_id") 95 | elif "project_id" in kwargs: 96 | self.api.items.move(self["id"], project_id=kwargs.get("project_id")) 97 | self.data["project_id"] = kwargs.get("project_id") 98 | elif "section_id" in kwargs: 99 | self.api.items.move(self["id"], section_id=kwargs.get("section_id")) 100 | self.data["section_id"] = kwargs.get("section_id") 101 | else: 102 | raise TypeError("move() takes one of parent_id, project_id, or section_id arguments") 103 | 104 | def reorder(self, child_order): 105 | """ 106 | Reorder item. 107 | """ 108 | self.api.items.reorder([{"id": self["id"], "child_order": child_order}]) 109 | self.data["child_order"] = child_order 110 | 111 | def close(self): 112 | """ 113 | Marks item as closed 114 | """ 115 | self.api.items.close(self["id"]) 116 | 117 | def complete(self, date_completed=None): 118 | """ 119 | Marks item as completed. 120 | """ 121 | self.api.items.complete(self["id"], date_completed=date_completed) 122 | self.data["checked"] = 1 123 | 124 | def uncomplete(self): 125 | """ 126 | Marks item as uncompleted. 127 | """ 128 | self.api.items.uncomplete(self["id"]) 129 | self.data["checked"] = 0 130 | 131 | def archive(self): 132 | """ 133 | Marks item as archived. 134 | """ 135 | self.api.items.archive(self["id"]) 136 | self.data["in_history"] = 1 137 | 138 | def unarchive(self): 139 | """ 140 | Marks item as unarchived. 141 | """ 142 | self.api.items.unarchive(self["id"]) 143 | self.data["in_history"] = 0 144 | 145 | def update_date_complete(self, due=None): 146 | """ 147 | Completes a recurring task. 148 | """ 149 | self.api.items.update_date_complete(self["id"], due=due) 150 | if due: 151 | self.data["due"] = due 152 | 153 | 154 | class Label(Model): 155 | """ 156 | Implements a label. 157 | """ 158 | 159 | def update(self, **kwargs): 160 | """ 161 | Updates label. 162 | """ 163 | self.api.labels.update(self["id"], **kwargs) 164 | self.data.update(kwargs) 165 | 166 | def delete(self): 167 | """ 168 | Deletes label. 169 | """ 170 | self.api.labels.delete(self["id"]) 171 | self.data["is_deleted"] = 1 172 | 173 | 174 | class LiveNotification(Model): 175 | """ 176 | Implements a live notification. 177 | """ 178 | 179 | pass 180 | 181 | 182 | class GenericNote(Model): 183 | """ 184 | Implements a note. 185 | """ 186 | 187 | #: has to be defined in subclasses 188 | local_manager = None 189 | 190 | def update(self, **kwargs): 191 | """ 192 | Updates note. 193 | """ 194 | self.local_manager.update(self["id"], **kwargs) 195 | self.data.update(kwargs) 196 | 197 | def delete(self): 198 | """ 199 | Deletes note. 200 | """ 201 | self.local_manager.delete(self["id"]) 202 | self.data["is_deleted"] = 1 203 | 204 | 205 | class Note(GenericNote): 206 | """ 207 | Implement an item note. 208 | """ 209 | 210 | def __init__(self, data, api): 211 | GenericNote.__init__(self, data, api) 212 | self.local_manager = self.api.notes 213 | 214 | 215 | class ProjectNote(GenericNote): 216 | """ 217 | Implement a project note. 218 | """ 219 | 220 | def __init__(self, data, api): 221 | GenericNote.__init__(self, data, api) 222 | self.local_manager = self.api.project_notes 223 | 224 | 225 | class Project(Model): 226 | """ 227 | Implements a project. 228 | """ 229 | 230 | def update(self, **kwargs): 231 | """ 232 | Updates project. 233 | """ 234 | self.api.projects.update(self["id"], **kwargs) 235 | self.data.update(kwargs) 236 | 237 | def delete(self): 238 | """ 239 | Deletes project. 240 | """ 241 | self.api.projects.delete(self["id"]) 242 | self.data["is_deleted"] = 1 243 | 244 | def archive(self): 245 | """ 246 | Marks project as archived. 247 | """ 248 | self.api.projects.archive(self["id"]) 249 | self.data["is_archived"] = 1 250 | 251 | def unarchive(self): 252 | """ 253 | Marks project as unarchived. 254 | """ 255 | self.api.projects.unarchive(self["id"]) 256 | self.data["is_archived"] = 0 257 | 258 | def move(self, parent_id): 259 | """ 260 | Moves project to another parent. 261 | """ 262 | self.api.projects.move(self["id"], parent_id) 263 | 264 | def reorder(self, child_order): 265 | """ 266 | Reorder project. 267 | """ 268 | self.api.projects.reorder([{"id": self["id"], "child_order": child_order}]) 269 | self.data["child_order"] = child_order 270 | 271 | def share(self, email): 272 | """ 273 | Shares projects with a user. 274 | """ 275 | self.api.projects.share(self["id"], email) 276 | 277 | def take_ownership(self): 278 | """ 279 | Takes ownership of a shared project. 280 | """ 281 | self.api.projects.take_ownership(self["id"]) 282 | 283 | 284 | class Reminder(Model): 285 | """ 286 | Implements a reminder. 287 | """ 288 | 289 | def update(self, **kwargs): 290 | """ 291 | Updates reminder. 292 | """ 293 | self.api.reminders.update(self["id"], **kwargs) 294 | self.data.update(kwargs) 295 | 296 | def delete(self): 297 | """ 298 | Deletes reminder. 299 | """ 300 | self.api.reminders.delete(self["id"]) 301 | self.data["is_deleted"] = 1 302 | 303 | 304 | class Section(Model): 305 | """ 306 | Implements a section. 307 | """ 308 | 309 | def update(self, **kwargs): 310 | """ 311 | Updates section. 312 | """ 313 | self.api.sections.update(self["id"], **kwargs) 314 | self.data.update(kwargs) 315 | 316 | def delete(self): 317 | """ 318 | Deletes section. 319 | """ 320 | self.api.sections.delete(self["id"]) 321 | self.data["is_deleted"] = 1 322 | 323 | def move(self, project_id): 324 | """ 325 | Moves section to another project. 326 | """ 327 | self.api.sections.move(self["id"], project_id=project_id) 328 | self.data["project_id"] = project_id 329 | 330 | def reorder(self, section_order): 331 | """ 332 | Reorder section. 333 | """ 334 | self.api.sections.reorder([{"id": self["id"], "section_order": section_order}]) 335 | self.data["section_order"] = section_order 336 | 337 | def archive(self, date_archived=None): 338 | """ 339 | Marks section as archived. 340 | """ 341 | self.api.sections.archive(self["id"], date_archived=date_archived) 342 | self.data["is_archived"] = 1 343 | 344 | def unarchive(self): 345 | """ 346 | Marks section as unarchived. 347 | """ 348 | self.api.sections.unarchive(self["id"]) 349 | self.data["is_archived"] = 0 350 | -------------------------------------------------------------------------------- /doc/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # todoist-python documentation build configuration file, created by 4 | # sphinx-quickstart on Tue Jul 22 11:37:51 2014. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import os 16 | import sys 17 | 18 | on_rtd = os.environ.get("READTHEDOCS", None) == "True" 19 | 20 | # If extensions (or modules to document with autodoc) are in another directory, 21 | # add these directories to sys.path here. If the directory is relative to the 22 | # documentation root, use os.path.abspath to make it absolute, like shown here. 23 | sys.path.insert(0, os.path.abspath("..")) 24 | 25 | # -- General configuration ------------------------------------------------ 26 | 27 | # If your documentation needs a minimal Sphinx version, state it here. 28 | # needs_sphinx = '1.0' 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | extensions = [ 34 | "sphinx.ext.autodoc", 35 | "sphinx.ext.autosummary", 36 | "sphinx.ext.coverage", 37 | "sphinx.ext.viewcode", 38 | ] 39 | autodoc_member_order = "bysource" 40 | 41 | # Add any paths that contain templates here, relative to this directory. 42 | templates_path = ["_templates"] 43 | 44 | # The suffix of source filenames. 45 | source_suffix = ".rst" 46 | 47 | # The encoding of source files. 48 | # source_encoding = 'utf-8-sig' 49 | 50 | # The master toctree document. 51 | master_doc = "index" 52 | 53 | # General information about the project. 54 | project = u"todoist-python" 55 | copyright = u"2014, Doist" 56 | 57 | # The version info for the project you're documenting, acts as replacement for 58 | # |version| and |release|, also used in various other places throughout the 59 | # built documents. 60 | # 61 | # The short X.Y version. 62 | version = "1.0" 63 | # The full version, including alpha/beta/rc tags. 64 | release = "1.0" 65 | 66 | # The language for content autogenerated by Sphinx. Refer to documentation 67 | # for a list of supported languages. 68 | # language = None 69 | 70 | # There are two options for replacing |today|: either, you set today to some 71 | # non-false value, then it is used: 72 | # today = '' 73 | # Else, today_fmt is used as the format for a strftime call. 74 | # today_fmt = '%B %d, %Y' 75 | 76 | # List of patterns, relative to source directory, that match files and 77 | # directories to ignore when looking for source files. 78 | exclude_patterns = ["_build"] 79 | 80 | # The reST default role (used for this markup: `text`) to use for all 81 | # documents. 82 | # default_role = None 83 | 84 | # If true, '()' will be appended to :func: etc. cross-reference text. 85 | # add_function_parentheses = True 86 | 87 | # If true, the current module name will be prepended to all description 88 | # unit titles (such as .. function::). 89 | # add_module_names = True 90 | 91 | # If true, sectionauthor and moduleauthor directives will be shown in the 92 | # output. They are ignored by default. 93 | # show_authors = False 94 | 95 | # The name of the Pygments (syntax highlighting) style to use. 96 | pygments_style = "sphinx" 97 | 98 | # A list of ignored prefixes for module index sorting. 99 | # modindex_common_prefix = [] 100 | 101 | # If true, keep warnings as "system message" paragraphs in the built documents. 102 | # keep_warnings = False 103 | 104 | 105 | # -- Options for HTML output ---------------------------------------------- 106 | 107 | # The theme to use for HTML and HTML Help pages. See the documentation for 108 | # a list of builtin themes. 109 | html_theme = "default" 110 | 111 | # Theme options are theme-specific and customize the look and feel of a theme 112 | # further. For a list of options available for each theme, see the 113 | # documentation. 114 | # html_theme_options = {} 115 | 116 | # Add any paths that contain custom themes here, relative to this directory. 117 | # html_theme_path = [] 118 | 119 | if not on_rtd: 120 | import sphinx_rtd_theme 121 | 122 | html_theme = "sphinx_rtd_theme" 123 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 124 | 125 | # The name for this set of Sphinx documents. If None, it defaults to 126 | # " v documentation". 127 | # html_title = None 128 | 129 | # A shorter title for the navigation bar. Default is the same as html_title. 130 | # html_short_title = None 131 | 132 | # The name of an image file (relative to this directory) to place at the top 133 | # of the sidebar. 134 | # html_logo = None 135 | 136 | # The name of an image file (within the static path) to use as favicon of the 137 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 138 | # pixels large. 139 | # html_favicon = None 140 | 141 | # Add any paths that contain custom static files (such as style sheets) here, 142 | # relative to this directory. They are copied after the builtin static files, 143 | # so a file named "default.css" will overwrite the builtin "default.css". 144 | html_static_path = ["_static"] 145 | 146 | # Add any extra paths that contain custom files (such as robots.txt or 147 | # .htaccess) here, relative to this directory. These files are copied 148 | # directly to the root of the documentation. 149 | # html_extra_path = [] 150 | 151 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 152 | # using the given strftime format. 153 | # html_last_updated_fmt = '%b %d, %Y' 154 | 155 | # If true, SmartyPants will be used to convert quotes and dashes to 156 | # typographically correct entities. 157 | # html_use_smartypants = True 158 | 159 | # Custom sidebar templates, maps document names to template names. 160 | # html_sidebars = {} 161 | 162 | # Additional templates that should be rendered to pages, maps page names to 163 | # template names. 164 | # html_additional_pages = {} 165 | 166 | # If false, no module index is generated. 167 | # html_domain_indices = True 168 | 169 | # If false, no index is generated. 170 | # html_use_index = True 171 | 172 | # If true, the index is split into individual pages for each letter. 173 | # html_split_index = False 174 | 175 | # If true, links to the reST sources are added to the pages. 176 | # html_show_sourcelink = True 177 | 178 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 179 | # html_show_sphinx = True 180 | 181 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 182 | # html_show_copyright = True 183 | 184 | # If true, an OpenSearch description file will be output, and all pages will 185 | # contain a tag referring to it. The value of this option must be the 186 | # base URL from which the finished HTML is served. 187 | # html_use_opensearch = '' 188 | 189 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 190 | # html_file_suffix = None 191 | 192 | # Output file base name for HTML help builder. 193 | htmlhelp_basename = "todoist-pythondoc" 194 | 195 | 196 | # -- Options for LaTeX output --------------------------------------------- 197 | 198 | latex_elements = { 199 | # The paper size ('letterpaper' or 'a4paper'). 200 | # 'papersize': 'letterpaper', 201 | # The font size ('10pt', '11pt' or '12pt'). 202 | # 'pointsize': '10pt', 203 | # Additional stuff for the LaTeX preamble. 204 | # 'preamble': '', 205 | } 206 | 207 | # Grouping the document tree into LaTeX files. List of tuples 208 | # (source start file, target name, title, 209 | # author, documentclass [howto, manual, or own class]). 210 | latex_documents = [ 211 | ( 212 | "index", 213 | "todoist-python.tex", 214 | u"todoist-python Documentation", 215 | u"Doist", 216 | "manual", 217 | ), 218 | ] 219 | 220 | # The name of an image file (relative to this directory) to place at the top of 221 | # the title page. 222 | # latex_logo = None 223 | 224 | # For "manual" documents, if this is true, then toplevel headings are parts, 225 | # not chapters. 226 | # latex_use_parts = False 227 | 228 | # If true, show page references after internal links. 229 | # latex_show_pagerefs = False 230 | 231 | # If true, show URL addresses after external links. 232 | # latex_show_urls = False 233 | 234 | # Documents to append as an appendix to all manuals. 235 | # latex_appendices = [] 236 | 237 | # If false, no module index is generated. 238 | # latex_domain_indices = True 239 | 240 | 241 | # -- Options for manual page output --------------------------------------- 242 | 243 | # One entry per manual page. List of tuples 244 | # (source start file, name, description, authors, manual section). 245 | man_pages = [ 246 | ("index", "todoist-python", u"todoist-python Documentation", [u"Doist"], 1) 247 | ] 248 | 249 | # If true, show URL addresses after external links. 250 | # man_show_urls = False 251 | 252 | 253 | # -- Options for Texinfo output ------------------------------------------- 254 | 255 | # Grouping the document tree into Texinfo files. List of tuples 256 | # (source start file, target name, title, author, 257 | # dir menu entry, description, category) 258 | texinfo_documents = [ 259 | ( 260 | "index", 261 | "todoist-python", 262 | u"todoist-python Documentation", 263 | u"Doist", 264 | "todoist-python", 265 | "One line description of project.", 266 | "Miscellaneous", 267 | ), 268 | ] 269 | 270 | # Documents to append as an appendix to all manuals. 271 | # texinfo_appendices = [] 272 | 273 | # If false, no module index is generated. 274 | # texinfo_domain_indices = True 275 | 276 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 277 | # texinfo_show_urls = 'footnote' 278 | 279 | # If true, do not generate a @detailmenu in the "Top" node's menu. 280 | # texinfo_no_detailmenu = False 281 | -------------------------------------------------------------------------------- /todoist/api.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import functools 3 | import json 4 | import os 5 | import uuid 6 | 7 | import requests 8 | 9 | from todoist import models 10 | from todoist.managers.activity import ActivityManager 11 | from todoist.managers.archive import ( 12 | ItemsArchiveManagerMaker, 13 | SectionsArchiveManagerMaker, 14 | ) 15 | from todoist.managers.backups import BackupsManager 16 | from todoist.managers.biz_invitations import BizInvitationsManager 17 | from todoist.managers.business_users import BusinessUsersManager 18 | from todoist.managers.collaborator_states import CollaboratorStatesManager 19 | from todoist.managers.collaborators import CollaboratorsManager 20 | from todoist.managers.completed import CompletedManager 21 | from todoist.managers.emails import EmailsManager 22 | from todoist.managers.filters import FiltersManager 23 | from todoist.managers.invitations import InvitationsManager 24 | from todoist.managers.items import ItemsManager 25 | from todoist.managers.labels import LabelsManager 26 | from todoist.managers.live_notifications import LiveNotificationsManager 27 | from todoist.managers.locations import LocationsManager 28 | from todoist.managers.notes import NotesManager, ProjectNotesManager 29 | from todoist.managers.projects import ProjectsManager 30 | from todoist.managers.quick import QuickManager 31 | from todoist.managers.reminders import RemindersManager 32 | from todoist.managers.sections import SectionsManager 33 | from todoist.managers.templates import TemplatesManager 34 | from todoist.managers.uploads import UploadsManager 35 | from todoist.managers.user import UserManager 36 | from todoist.managers.user_settings import UserSettingsManager 37 | 38 | DEFAULT_API_VERSION = "v8" 39 | 40 | 41 | class SyncError(Exception): 42 | pass 43 | 44 | 45 | class TodoistAPI(object): 46 | """ 47 | Implements the API that makes it possible to interact with a Todoist user 48 | account and its data. 49 | """ 50 | 51 | _serialize_fields = ("token", "api_endpoint", "sync_token", "state", "temp_ids") 52 | 53 | @classmethod 54 | def deserialize(cls, data): 55 | obj = cls() 56 | for key in cls._serialize_fields: 57 | if key in data: 58 | setattr(obj, key, data[key]) 59 | return obj 60 | 61 | def __init__( 62 | self, 63 | token="", 64 | api_endpoint="https://api.todoist.com", 65 | api_version=DEFAULT_API_VERSION, 66 | session=None, 67 | cache="~/.todoist-sync/", 68 | ): 69 | self.api_endpoint = api_endpoint 70 | self.api_version = api_version 71 | self.reset_state() 72 | self.token = token # User's API token 73 | self.temp_ids = {} # Mapping of temporary ids to real ids 74 | self.queue = [] # Requests to be sent are appended here 75 | self.session = session or requests.Session() # Session instance for requests 76 | 77 | # managers 78 | self.biz_invitations = BizInvitationsManager(self) 79 | self.collaborators = CollaboratorsManager(self) 80 | self.collaborator_states = CollaboratorStatesManager(self) 81 | self.filters = FiltersManager(self) 82 | self.invitations = InvitationsManager(self) 83 | self.items = ItemsManager(self) 84 | self.labels = LabelsManager(self) 85 | self.live_notifications = LiveNotificationsManager(self) 86 | self.locations = LocationsManager(self) 87 | self.notes = NotesManager(self) 88 | self.projects = ProjectsManager(self) 89 | self.project_notes = ProjectNotesManager(self) 90 | self.reminders = RemindersManager(self) 91 | self.sections = SectionsManager(self) 92 | self.user = UserManager(self) 93 | self.user_settings = UserSettingsManager(self) 94 | 95 | self.activity = ActivityManager(self) 96 | self.backups = BackupsManager(self) 97 | self.business_users = BusinessUsersManager(self) 98 | self.completed = CompletedManager(self) 99 | self.emails = EmailsManager(self) 100 | self.quick = QuickManager(self) 101 | self.templates = TemplatesManager(self) 102 | self.uploads = UploadsManager(self) 103 | 104 | self.items_archive = ItemsArchiveManagerMaker(self) 105 | self.sections_archive = SectionsArchiveManagerMaker(self) 106 | 107 | if cache: # Read and write user state on local disk cache 108 | self.cache = os.path.expanduser(cache) 109 | self._read_cache() 110 | else: 111 | self.cache = None 112 | 113 | def reset_state(self): 114 | self.sync_token = "*" 115 | self.state = { # Local copy of all of the user's objects 116 | "collaborator_states": [], 117 | "collaborators": [], 118 | "day_orders": {}, 119 | "day_orders_timestamp": "", 120 | "filters": [], 121 | "items": [], 122 | "labels": [], 123 | "live_notifications": [], 124 | "live_notifications_last_read_id": -1, 125 | "locations": [], 126 | "notes": [], 127 | "project_notes": [], 128 | "projects": [], 129 | "reminders": [], 130 | "sections": [], 131 | "settings_notifications": {}, 132 | "user": {}, 133 | "user_settings": {}, 134 | } 135 | 136 | def __getitem__(self, key): 137 | return self.state[key] 138 | 139 | def serialize(self): 140 | return {key: getattr(self, key) for key in self._serialize_fields} 141 | 142 | def get_api_url(self): 143 | return "{0}/sync/{1}/".format(self.api_endpoint, self.api_version) 144 | 145 | def _update_state(self, syncdata): 146 | """ 147 | Updates the local state, with the data returned by the server after a 148 | sync. 149 | """ 150 | # Check sync token first 151 | if "sync_token" in syncdata: 152 | self.sync_token = syncdata["sync_token"] 153 | 154 | # It is straightforward to update these type of data, since it is 155 | # enough to just see if they are present in the sync data, and then 156 | # either replace the local values or update them. 157 | if "day_orders" in syncdata: 158 | self.state["day_orders"].update(syncdata["day_orders"]) 159 | if "day_orders_timestamp" in syncdata: 160 | self.state["day_orders_timestamp"] = syncdata["day_orders_timestamp"] 161 | if "live_notifications_last_read_id" in syncdata: 162 | self.state["live_notifications_last_read_id"] = syncdata[ 163 | "live_notifications_last_read_id" 164 | ] 165 | if "locations" in syncdata: 166 | self.state["locations"] = syncdata["locations"] 167 | if "settings_notifications" in syncdata: 168 | self.state["settings_notifications"].update( 169 | syncdata["settings_notifications"] 170 | ) 171 | if "user" in syncdata: 172 | self.state["user"].update(syncdata["user"]) 173 | if "user_settings" in syncdata: 174 | self.state["user_settings"].update(syncdata["user_settings"]) 175 | 176 | # Updating these type of data is a bit more complicated, since it is 177 | # necessary to find out whether an object in the sync data is new, 178 | # updates an existing object, or marks an object to be deleted. But 179 | # the same procedure takes place for each of these types of data. 180 | resp_models_mapping = [ 181 | ("collaborators", models.Collaborator), 182 | ("collaborator_states", models.CollaboratorState), 183 | ("filters", models.Filter), 184 | ("items", models.Item), 185 | ("labels", models.Label), 186 | ("live_notifications", models.LiveNotification), 187 | ("notes", models.Note), 188 | ("project_notes", models.ProjectNote), 189 | ("projects", models.Project), 190 | ("reminders", models.Reminder), 191 | ("sections", models.Section), 192 | ] 193 | for datatype, model in resp_models_mapping: 194 | if datatype not in syncdata: 195 | continue 196 | 197 | # Process each object of this specific type in the sync data. 198 | for remoteobj in syncdata[datatype]: 199 | # Find out whether the object already exists in the local 200 | # state. 201 | localobj = self._find_object(datatype, remoteobj) 202 | if localobj is not None: 203 | # If the object is already present in the local state, then 204 | # we either update it, or if marked as to be deleted, we 205 | # remove it. 206 | is_deleted = remoteobj.get("is_deleted", 0) 207 | if is_deleted == 0 or is_deleted is False: 208 | localobj.data.update(remoteobj) 209 | else: 210 | self.state[datatype].remove(localobj) 211 | else: 212 | # If not, then the object is new and it should be added, 213 | # unless it is marked as to be deleted (in which case it's 214 | # ignored). 215 | is_deleted = remoteobj.get("is_deleted", 0) 216 | if is_deleted == 0 or is_deleted is False: 217 | newobj = model(remoteobj, self) 218 | self.state[datatype].append(newobj) 219 | 220 | def _read_cache(self): 221 | if not self.cache: 222 | return 223 | 224 | try: 225 | os.makedirs(self.cache) 226 | except OSError: 227 | if not os.path.isdir(self.cache): 228 | raise 229 | 230 | try: 231 | with open(self.cache + self.token + ".json") as f: 232 | state = f.read() 233 | state = json.loads(state) 234 | self._update_state(state) 235 | 236 | with open(self.cache + self.token + ".sync") as f: 237 | sync_token = f.read() 238 | self.sync_token = sync_token 239 | except Exception: 240 | return 241 | 242 | def _write_cache(self): 243 | if not self.cache: 244 | return 245 | result = json.dumps(self.state, indent=2, sort_keys=True, default=state_default) 246 | with open(self.cache + self.token + ".json", "w") as f: 247 | f.write(result) 248 | with open(self.cache + self.token + ".sync", "w") as f: 249 | f.write(self.sync_token) 250 | 251 | def _find_object(self, objtype, obj): 252 | """ 253 | Searches for an object in the local state, depending on the type of 254 | object, and then on its primary key is. If the object is found it is 255 | returned, and if not, then None is returned. 256 | """ 257 | if objtype == "collaborators": 258 | return self.collaborators.get_by_id(obj["id"]) 259 | elif objtype == "collaborator_states": 260 | return self.collaborator_states.get_by_ids( 261 | obj["project_id"], obj["user_id"] 262 | ) 263 | elif objtype == "filters": 264 | return self.filters.get_by_id(obj["id"], only_local=True) 265 | elif objtype == "items": 266 | return self.items.get_by_id(obj["id"], only_local=True) 267 | elif objtype == "labels": 268 | return self.labels.get_by_id(obj["id"], only_local=True) 269 | elif objtype == "live_notifications": 270 | return self.live_notifications.get_by_id(obj["id"]) 271 | elif objtype == "notes": 272 | return self.notes.get_by_id(obj["id"], only_local=True) 273 | elif objtype == "project_notes": 274 | return self.project_notes.get_by_id(obj["id"], only_local=True) 275 | elif objtype == "projects": 276 | return self.projects.get_by_id(obj["id"], only_local=True) 277 | elif objtype == "reminders": 278 | return self.reminders.get_by_id(obj["id"], only_local=True) 279 | elif objtype == "sections": 280 | return self.sections.get_by_id(obj["id"], only_local=True) 281 | else: 282 | return None 283 | 284 | def _replace_temp_id(self, temp_id, new_id): 285 | """ 286 | Replaces the temporary id generated locally when an object was first 287 | created, with a real Id supplied by the server. True is returned if 288 | the temporary id was found and replaced, and False otherwise. 289 | """ 290 | # Go through all the objects for which we expect the temporary id to be 291 | # replaced by a real one. 292 | for datatype in [ 293 | "filters", 294 | "items", 295 | "labels", 296 | "notes", 297 | "project_notes", 298 | "projects", 299 | "reminders", 300 | "sections", 301 | ]: 302 | for obj in self.state[datatype]: 303 | if obj.temp_id == temp_id: 304 | obj["id"] = new_id 305 | return True 306 | return False 307 | 308 | def _get(self, call, url=None, **kwargs): 309 | """ 310 | Sends an HTTP GET request to the specified URL, and returns the JSON 311 | object received (if any), or whatever answer it got otherwise. 312 | """ 313 | if not url: 314 | url = self.get_api_url() 315 | 316 | response = self.session.get(url + call, **kwargs) 317 | 318 | try: 319 | return response.json() 320 | except ValueError: 321 | return response.text 322 | 323 | def _post(self, call, url=None, **kwargs): 324 | """ 325 | Sends an HTTP POST request to the specified URL, and returns the JSON 326 | object received (if any), or whatever answer it got otherwise. 327 | """ 328 | if not url: 329 | url = self.get_api_url() 330 | 331 | response = self.session.post(url + call, **kwargs) 332 | 333 | try: 334 | return response.json() 335 | except ValueError: 336 | return response.text 337 | 338 | # Sync 339 | def generate_uuid(self): 340 | """ 341 | Generates a uuid. 342 | """ 343 | return str(uuid.uuid1()) 344 | 345 | def sync(self, commands=None): 346 | """ 347 | Sends to the server the changes that were made locally, and also 348 | fetches the latest updated data from the server. 349 | """ 350 | post_data = { 351 | "token": self.token, 352 | "sync_token": self.sync_token, 353 | "day_orders_timestamp": self.state["day_orders_timestamp"], 354 | "include_notification_settings": 1, 355 | "resource_types": json_dumps(["all"]), 356 | "commands": json_dumps(commands or []), 357 | } 358 | response = self._post("sync", data=post_data) 359 | if "temp_id_mapping" in response: 360 | for temp_id, new_id in response["temp_id_mapping"].items(): 361 | self.temp_ids[temp_id] = new_id 362 | self._replace_temp_id(temp_id, new_id) 363 | self._update_state(response) 364 | self._write_cache() 365 | return response 366 | 367 | def commit(self, raise_on_error=True): 368 | """ 369 | Commits all requests that are queued. Note that, without calling this 370 | method none of the changes that are made to the objects are actually 371 | synchronized to the server, unless one of the aforementioned Sync API 372 | calls are called directly. 373 | """ 374 | if len(self.queue) == 0: 375 | return 376 | ret = self.sync(commands=self.queue) 377 | del self.queue[:] 378 | if "sync_status" in ret: 379 | if raise_on_error: 380 | for k, v in ret["sync_status"].items(): 381 | if v != "ok": 382 | raise SyncError(k, v) 383 | return ret 384 | 385 | # Miscellaneous 386 | 387 | def query(self, queries, **kwargs): 388 | """ 389 | DEPRECATED: query endpoint is deprecated for a long time and this 390 | method will be removed in the next major version of todoist-python 391 | """ 392 | params = {"queries": json_dumps(queries), "token": self.token} 393 | params.update(kwargs) 394 | return self._get("query", params=params) 395 | 396 | def add_item(self, content, **kwargs): 397 | """ 398 | Adds a new task. 399 | """ 400 | params = {"token": self.token, "content": content} 401 | params.update(kwargs) 402 | if "labels" in params: 403 | params["labels"] = str(params["labels"]) 404 | return self._get("add_item", params=params) 405 | 406 | # Class 407 | def __repr__(self): 408 | name = self.__class__.__name__ 409 | unsaved = "*" if len(self.queue) > 0 else "" 410 | email = self.user.get("email") 411 | email_repr = repr(email) if email else "" 412 | return "%s%s(%s)" % (name, unsaved, email_repr) 413 | 414 | 415 | def state_default(obj): 416 | return obj.data 417 | 418 | 419 | def json_default(obj): 420 | if isinstance(obj, datetime.datetime): 421 | return obj.strftime("%Y-%m-%dT%H:%M:%S") 422 | elif isinstance(obj, datetime.date): 423 | return obj.strftime("%Y-%m-%d") 424 | elif isinstance(obj, datetime.time): 425 | return obj.strftime("%H:%M:%S") 426 | 427 | 428 | json_dumps = functools.partial(json.dumps, separators=",:", default=json_default) 429 | -------------------------------------------------------------------------------- /tests/test_api.py: -------------------------------------------------------------------------------- 1 | import io 2 | import time 3 | 4 | import todoist 5 | 6 | 7 | def test_stats_get(api_endpoint, api_token): 8 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 9 | response = api.completed.get_stats() 10 | assert "days_items" in response 11 | assert "week_items" in response 12 | assert "karma_trend" in response 13 | assert "karma_last_update" in response 14 | 15 | 16 | def test_user_update(api_endpoint, api_token): 17 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 18 | api.sync() 19 | date_format = api.state["user"]["date_format"] 20 | date_format_new = 1 - date_format 21 | api.user.update(date_format=date_format_new) 22 | api.commit() 23 | assert date_format_new == api.state["user"]["date_format"] 24 | api.user.update_goals(vacation_mode=1) 25 | api.commit() 26 | api.user.update_goals(vacation_mode=0) 27 | api.commit() 28 | 29 | 30 | def test_user_settings_update(api_endpoint, api_token): 31 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 32 | api.sync() 33 | reminder_email = api.state["user_settings"]["reminder_email"] 34 | if reminder_email: 35 | reminder_email = False 36 | else: 37 | reminder_email = True 38 | api.user_settings.update(reminder_email=reminder_email) 39 | api.commit() 40 | assert reminder_email == api.state["user_settings"]["reminder_email"] 41 | 42 | 43 | def test_project_add(cleanup, api_endpoint, api_token): 44 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 45 | api.sync() 46 | 47 | project1 = api.projects.add("Project1") 48 | response = api.commit() 49 | assert response["projects"][0]["name"] == "Project1" 50 | assert "Project1" in [p["name"] for p in api.state["projects"]] 51 | assert api.projects.get_by_id(project1["id"]) == project1 52 | 53 | assert api.projects.get(project1["id"])["project"]["name"] == project1["name"] 54 | 55 | project1.delete() 56 | api.commit() 57 | 58 | 59 | def test_project_delete(cleanup, api_endpoint, api_token): 60 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 61 | api.sync() 62 | 63 | project1 = api.projects.add("Project1") 64 | api.commit() 65 | 66 | project1.delete() 67 | response = api.commit() 68 | assert response["projects"][0]["id"] == project1["id"] 69 | assert response["projects"][0]["is_deleted"] == 1 70 | assert "Project1" not in [p["name"] for p in api.state["projects"]] 71 | 72 | 73 | def test_project_update(cleanup, api_endpoint, api_token): 74 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 75 | api.sync() 76 | 77 | project1 = api.projects.add("Project1") 78 | api.commit() 79 | 80 | project1.update(name="UpdatedProject1") 81 | response = api.commit() 82 | assert response["projects"][0]["name"] == "UpdatedProject1" 83 | assert "UpdatedProject1" in [p["name"] for p in api.state["projects"]] 84 | assert api.projects.get_by_id(project1["id"]) == project1 85 | 86 | project1.delete() 87 | api.commit() 88 | 89 | 90 | def test_project_archive(cleanup, api_endpoint, api_token): 91 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 92 | api.sync() 93 | 94 | project1 = api.projects.add("Project1") 95 | api.commit() 96 | 97 | project1.archive() 98 | response = api.commit() 99 | assert response["projects"][0]["name"] == "Project1" 100 | assert response["projects"][0]["is_archived"] == 1 101 | assert "Project1" in [p["name"] for p in api.state["projects"]] 102 | assert 1 in [ 103 | p["is_archived"] for p in api.state["projects"] if p["id"] == project1["id"] 104 | ] 105 | 106 | project1.delete() 107 | api.commit() 108 | 109 | 110 | def test_project_unarchive(cleanup, api_endpoint, api_token): 111 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 112 | api.sync() 113 | 114 | project1 = api.projects.add("Project1") 115 | api.commit() 116 | 117 | project1.archive() 118 | api.commit() 119 | 120 | project1.unarchive() 121 | response = api.commit() 122 | assert response["projects"][0]["name"] == "Project1" 123 | assert response["projects"][0]["is_archived"] == 0 124 | assert 0 in [ 125 | p["is_archived"] for p in api.state["projects"] if p["id"] == project1["id"] 126 | ] 127 | 128 | project1.delete() 129 | api.commit() 130 | 131 | 132 | def test_project_move_to_parent(cleanup, api_endpoint, api_token): 133 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 134 | api.sync() 135 | 136 | project1 = api.projects.add("Project1") 137 | api.commit() 138 | 139 | project2 = api.projects.add("Project2") 140 | api.commit() 141 | 142 | project2.move(project1["id"]) 143 | response = api.commit() 144 | assert response["projects"][0]["name"] == "Project2" 145 | assert response["projects"][0]["parent_id"] == project1["id"] 146 | assert project1["id"] in [ 147 | i["parent_id"] for i in api.state["projects"] if i["id"] == project2["id"] 148 | ] 149 | 150 | project2.delete() 151 | api.commit() 152 | project1.delete() 153 | api.commit() 154 | 155 | 156 | def test_project_reorder(cleanup, api_endpoint, api_token): 157 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 158 | api.sync() 159 | 160 | project1 = api.projects.add("Project1") 161 | api.commit() 162 | 163 | project2 = api.projects.add("Project2") 164 | api.commit() 165 | 166 | api.projects.reorder( 167 | projects=[ 168 | {"id": project1["id"], "child_order": 2}, 169 | {"id": project2["id"], "child_order": 1}, 170 | ] 171 | ) 172 | response = api.commit() 173 | for project in response["projects"]: 174 | if project["id"] == project1["id"]: 175 | assert project["child_order"] == 2 176 | if project["id"] == project2["id"]: 177 | assert project["child_order"] == 1 178 | assert 2 in [ 179 | p["child_order"] for p in api.state["projects"] if p["id"] == project1["id"] 180 | ] 181 | assert 1 in [ 182 | p["child_order"] for p in api.state["projects"] if p["id"] == project2["id"] 183 | ] 184 | 185 | project1.delete() 186 | api.commit() 187 | project2.delete() 188 | api.commit() 189 | 190 | 191 | def test_item_add(cleanup, api_endpoint, api_token): 192 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 193 | api.sync() 194 | 195 | response = api.add_item("Item1") 196 | assert response["content"] == "Item1" 197 | api.sync() 198 | assert "Item1" in [i["content"] for i in api.state["items"]] 199 | item1 = [i for i in api.state["items"] if i["content"] == "Item1"][0] 200 | assert api.items.get_by_id(item1["id"]) == item1 201 | 202 | assert api.items.get(item1["id"])["item"]["content"] == item1["content"] 203 | 204 | item1.delete() 205 | api.commit() 206 | 207 | 208 | def test_item_delete(cleanup, api_endpoint, api_token): 209 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 210 | api.sync() 211 | 212 | item1 = api.items.add("Item1") 213 | api.sync() 214 | 215 | item1.delete() 216 | response = api.commit() 217 | assert response["items"][0]["id"] == item1["id"] 218 | assert response["items"][0]["is_deleted"] == 1 219 | assert "Item1" not in [i["content"] for i in api.state["items"]] 220 | 221 | 222 | def test_item_update(cleanup, api_endpoint, api_token): 223 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 224 | api.sync() 225 | 226 | item1 = api.items.add("Item1") 227 | api.commit() 228 | 229 | item1.update(content="UpdatedItem1") 230 | response = api.commit() 231 | assert response["items"][0]["content"] == "UpdatedItem1" 232 | assert "UpdatedItem1" in [i["content"] for i in api.state["items"]] 233 | assert api.items.get_by_id(item1["id"]) == item1 234 | 235 | item1.delete() 236 | api.commit() 237 | 238 | 239 | def test_item_complete(cleanup, api_endpoint, api_token): 240 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 241 | api.sync() 242 | 243 | item1 = api.items.add("Item1") 244 | api.commit() 245 | item2 = api.items.add("Item2", parent_id=item1["id"]) 246 | api.commit() 247 | 248 | item2.complete() 249 | response = api.commit() 250 | assert response["items"][0]["content"] == "Item2" 251 | assert response["items"][0]["checked"] == 1 252 | assert 1 in [i["checked"] for i in api.state["items"] if i["id"] == item2["id"]] 253 | 254 | item1.delete() 255 | api.commit() 256 | item2.delete() 257 | api.commit() 258 | 259 | 260 | def test_item_uncomplete(cleanup, api_endpoint, api_token): 261 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 262 | api.sync() 263 | 264 | item1 = api.items.add("Item1") 265 | api.commit() 266 | item2 = api.items.add("Item2", parent_id=item1["id"]) 267 | api.commit() 268 | item2.complete() 269 | api.commit() 270 | 271 | item2.uncomplete() 272 | response = api.commit() 273 | assert response["items"][0]["content"] == "Item2" 274 | assert response["items"][0]["checked"] == 0 275 | assert 0 in [i["checked"] for i in api.state["items"] if i["id"] == item1["id"]] 276 | 277 | item1.delete() 278 | api.commit() 279 | item2.delete() 280 | api.commit() 281 | 282 | 283 | def test_item_archive(cleanup, api_endpoint, api_token): 284 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 285 | api.sync() 286 | 287 | item1 = api.items.add("Item1") 288 | api.commit() 289 | item2 = api.items.add("Item2", parent_id=item1["id"]) 290 | api.commit() 291 | item2.complete() 292 | api.commit() 293 | 294 | item2.archive() 295 | response = api.commit() 296 | assert response["items"][0]["content"] == "Item2" 297 | assert response["items"][0]["in_history"] == 1 298 | assert 1 in [i["in_history"] for i in api.state["items"] if i["id"] == item2["id"]] 299 | 300 | item1.delete() 301 | api.commit() 302 | item2.delete() 303 | api.commit() 304 | 305 | 306 | def test_item_unarchive(cleanup, api_endpoint, api_token): 307 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 308 | api.sync() 309 | 310 | item1 = api.items.add("Item1") 311 | api.commit() 312 | item2 = api.items.add("Item2", parent_id=item1["id"]) 313 | api.commit() 314 | item2.complete() 315 | api.commit() 316 | item2.archive() 317 | api.commit() 318 | 319 | item2.unarchive() 320 | response = api.commit() 321 | assert response["items"][0]["content"] == "Item2" 322 | assert response["items"][0]["in_history"] == 0 323 | assert 0 in [i["in_history"] for i in api.state["items"] if i["id"] == item2["id"]] 324 | 325 | item1.delete() 326 | api.commit() 327 | item2.delete() 328 | api.commit() 329 | 330 | 331 | def test_item_move_to_project(cleanup, api_endpoint, api_token): 332 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 333 | api.sync() 334 | 335 | item1 = api.items.add("Item1") 336 | api.commit() 337 | project1 = api.projects.add("Project1") 338 | api.commit() 339 | 340 | item1.move(project_id=project1["id"]) 341 | response = api.commit() 342 | assert response["items"][0]["content"] == "Item1" 343 | assert response["items"][0]["project_id"] == project1["id"] 344 | assert project1["id"] in [ 345 | i["project_id"] for i in api.state["items"] if i["id"] == item1["id"] 346 | ] 347 | 348 | item1.delete() 349 | api.commit() 350 | project1.delete() 351 | api.commit() 352 | 353 | 354 | def test_item_move_to_section(cleanup, api_endpoint, api_token): 355 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 356 | api.sync() 357 | 358 | item1 = api.items.add("Item1") 359 | api.commit() 360 | section1 = api.sections.add("Section1", api.state["user"]["inbox_project"]) 361 | api.commit() 362 | 363 | item1.move(section_id=section1["id"]) 364 | response = api.commit() 365 | assert response["items"][0]["content"] == "Item1" 366 | assert response["items"][0]["section_id"] == section1["id"] 367 | assert section1["id"] in [ 368 | i["section_id"] for i in api.state["items"] if i["id"] == item1["id"] 369 | ] 370 | 371 | item1.delete() 372 | api.commit() 373 | section1.delete() 374 | api.commit() 375 | 376 | 377 | def test_item_move_to_parent(cleanup, api_endpoint, api_token): 378 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 379 | api.sync() 380 | 381 | item1 = api.items.add("Item1") 382 | api.commit() 383 | item2 = api.items.add("Item2") 384 | api.commit() 385 | 386 | item2.move(parent_id=item1["id"]) 387 | response = api.commit() 388 | assert response["items"][0]["content"] == "Item2" 389 | assert response["items"][0]["parent_id"] == item1["id"] 390 | assert item1["id"] in [ 391 | i["parent_id"] for i in api.state["items"] if i["id"] == item2["id"] 392 | ] 393 | 394 | item1.delete() 395 | api.commit() 396 | item2.delete() 397 | api.commit() 398 | 399 | 400 | def test_item_update_date_complete(cleanup, api_endpoint, api_token): 401 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 402 | api.sync() 403 | 404 | item1 = api.items.add("Item1", due={"string": "every day"}) 405 | api.commit() 406 | 407 | now = time.time() 408 | tomorrow = time.gmtime(now + 24 * 3600) 409 | new_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow) 410 | due = { 411 | "date": new_date_utc, 412 | "string": "every day", 413 | } 414 | api.items.update_date_complete(item1["id"], due=due) 415 | response = api.commit() 416 | assert response["items"][0]["due"]["string"] == "every day" 417 | assert "every day" in [ 418 | i["due"]["string"] for i in api.state["items"] if i["id"] == item1["id"] 419 | ] 420 | 421 | item1.delete() 422 | api.commit() 423 | 424 | 425 | def test_item_reorder(cleanup, api_endpoint, api_token): 426 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 427 | api.sync() 428 | 429 | item1 = api.items.add("Item1") 430 | api.commit() 431 | item2 = api.items.add("Item2") 432 | api.commit() 433 | 434 | api.items.reorder( 435 | items=[ 436 | {"id": item1["id"], "child_order": 2}, 437 | {"id": item2["id"], "child_order": 1}, 438 | ] 439 | ) 440 | response = api.commit() 441 | for item in response["items"]: 442 | if item["id"] == item1["id"]: 443 | assert item["child_order"] == 2 444 | if item["id"] == item2["id"]: 445 | assert item["child_order"] == 1 446 | assert 2 in [p["child_order"] for p in api.state["items"] if p["id"] == item1["id"]] 447 | assert 1 in [p["child_order"] for p in api.state["items"] if p["id"] == item2["id"]] 448 | 449 | item1.delete() 450 | api.commit() 451 | item2.delete() 452 | api.commit() 453 | 454 | 455 | def test_item_update_day_orders(cleanup, api_endpoint, api_token): 456 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 457 | api.sync() 458 | 459 | item1 = api.items.add("Item1") 460 | api.commit() 461 | item2 = api.items.add("Item2") 462 | api.commit() 463 | 464 | api.items.update_day_orders({item1["id"]: 1, item2["id"]: 2}) 465 | response = api.commit() 466 | for item in response["items"]: 467 | if item["id"] == item1["id"]: 468 | assert item["day_order"] == 1 469 | if item["id"] == item2["id"]: 470 | assert item["day_order"] == 2 471 | assert 1 == api.state["day_orders"][str(item1["id"])] 472 | assert 2 == api.state["day_orders"][str(item2["id"])] 473 | 474 | item1.delete() 475 | api.commit() 476 | 477 | item2.delete() 478 | api.commit() 479 | 480 | 481 | def test_label_add(cleanup, api_endpoint, api_token): 482 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 483 | api.sync() 484 | 485 | label1 = api.labels.add("Label1") 486 | response = api.commit() 487 | assert response["labels"][0]["name"] == "Label1" 488 | assert "Label1" in [l["name"] for l in api.state["labels"]] 489 | assert api.labels.get_by_id(label1["id"]) == label1 490 | 491 | assert api.labels.get(label1["id"])["label"]["name"] == label1["name"] 492 | 493 | label1.delete() 494 | api.commit() 495 | 496 | 497 | def test_label_delete(cleanup, api_endpoint, api_token): 498 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 499 | api.sync() 500 | 501 | label1 = api.labels.add("Label1") 502 | api.commit() 503 | 504 | label1.delete() 505 | response = api.commit() 506 | assert response["labels"][0]["id"] == label1["id"] 507 | assert response["labels"][0]["is_deleted"] == 1 508 | assert "UpdatedLabel1" not in [l["name"] for l in api.state["labels"]] 509 | 510 | 511 | def test_label_update(cleanup, api_endpoint, api_token): 512 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 513 | api.sync() 514 | 515 | label1 = api.labels.add("Label1") 516 | api.commit() 517 | 518 | label1.update(name="UpdatedLabel1") 519 | response = api.commit() 520 | assert response["labels"][0]["name"] == "UpdatedLabel1" 521 | assert "UpdatedLabel1" in [l["name"] for l in api.state["labels"]] 522 | assert api.labels.get_by_id(label1["id"]) == label1 523 | 524 | label1.delete() 525 | api.commit() 526 | 527 | 528 | def test_label_update_orders(cleanup, api_endpoint, api_token): 529 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 530 | api.sync() 531 | 532 | label1 = api.labels.add("Label1") 533 | api.commit() 534 | label2 = api.labels.add("Label2") 535 | api.commit() 536 | 537 | api.labels.update_orders({label1["id"]: 1, label2["id"]: 2}) 538 | response = api.commit() 539 | for label in response["labels"]: 540 | if label["id"] == label1["id"]: 541 | assert label["item_order"] == 1 542 | if label["id"] == label2["id"]: 543 | assert label["item_order"] == 2 544 | assert 1 in [ 545 | l["item_order"] for l in api.state["labels"] if l["id"] == label1["id"] 546 | ] 547 | assert 2 in [ 548 | l["item_order"] for l in api.state["labels"] if l["id"] == label2["id"] 549 | ] 550 | 551 | label1.delete() 552 | api.commit() 553 | label2.delete() 554 | api.commit() 555 | 556 | 557 | def test_note_add(cleanup, api_endpoint, api_token): 558 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 559 | api.sync() 560 | 561 | item1 = api.items.add("Item1") 562 | api.commit() 563 | 564 | note1 = api.notes.add(item1["id"], "Note1") 565 | response = api.commit() 566 | assert response["notes"][0]["content"] == "Note1" 567 | assert "Note1" in [n["content"] for n in api.state["notes"]] 568 | assert api.notes.get_by_id(note1["id"]) == note1 569 | 570 | assert api.notes.get(note1["id"])["note"]["content"] == note1["content"] 571 | 572 | note1.delete() 573 | api.commit() 574 | item1.delete() 575 | api.commit() 576 | 577 | 578 | def test_note_delete(cleanup, api_endpoint, api_token): 579 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 580 | api.sync() 581 | 582 | item1 = api.items.add("Item1") 583 | api.commit() 584 | note1 = api.notes.add(item1["id"], "Note1") 585 | api.commit() 586 | 587 | note1.delete() 588 | response = api.commit() 589 | assert response["notes"][0]["id"] == note1["id"] 590 | assert response["notes"][0]["is_deleted"] == 1 591 | assert "UpdatedNote1" not in [n["content"] for n in api.state["notes"]] 592 | 593 | note1.delete() 594 | api.commit() 595 | item1.delete() 596 | api.commit() 597 | 598 | 599 | def test_note_update(cleanup, api_endpoint, api_token): 600 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 601 | api.sync() 602 | 603 | item1 = api.items.add("Item1") 604 | api.commit() 605 | note1 = api.notes.add(item1["id"], "Note1") 606 | api.commit() 607 | 608 | note1.update(content="UpdatedNote1") 609 | response = api.commit() 610 | assert response["notes"][0]["content"] == "UpdatedNote1" 611 | assert "UpdatedNote1" in [n["content"] for n in api.state["notes"]] 612 | assert api.notes.get_by_id(note1["id"]) == note1 613 | 614 | note1.delete() 615 | api.commit() 616 | item1.delete() 617 | api.commit() 618 | 619 | 620 | def test_projectnote_add(cleanup, api_endpoint, api_token): 621 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 622 | api.sync() 623 | 624 | project1 = api.projects.add("Project1") 625 | api.commit() 626 | 627 | note1 = api.project_notes.add(project1["id"], "Note1") 628 | response = api.commit() 629 | assert response["project_notes"][0]["content"] == "Note1" 630 | assert "Note1" in [n["content"] for n in api.state["project_notes"]] 631 | assert api.project_notes.get_by_id(note1["id"]) == note1 632 | 633 | assert api.project_notes.get(note1["id"])["note"]["content"] == note1["content"] 634 | 635 | note1.delete() 636 | api.commit() 637 | project1.delete() 638 | api.commit() 639 | 640 | 641 | def test_projectnote_delete(cleanup, api_endpoint, api_token): 642 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 643 | api.sync() 644 | 645 | project1 = api.projects.add("Project1") 646 | api.commit() 647 | note1 = api.project_notes.add(project1["id"], "Note1") 648 | api.commit() 649 | 650 | note1.delete() 651 | response = api.commit() 652 | assert response["project_notes"][0]["id"] == note1["id"] 653 | assert response["project_notes"][0]["is_deleted"] == 1 654 | assert "UpdatedNote1" not in [n["content"] for n in api.state["project_notes"]] 655 | 656 | project1.delete() 657 | api.commit() 658 | 659 | 660 | def test_projectnote_update(cleanup, api_endpoint, api_token): 661 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 662 | api.sync() 663 | 664 | project1 = api.projects.add("Project1") 665 | api.commit() 666 | note1 = api.project_notes.add(project1["id"], "Note1") 667 | api.commit() 668 | 669 | note1.update(content="UpdatedNote1") 670 | response = api.commit() 671 | assert response["project_notes"][0]["content"] == "UpdatedNote1" 672 | assert "UpdatedNote1" in [n["content"] for n in api.state["project_notes"]] 673 | assert api.project_notes.get_by_id(note1["id"]) == note1 674 | 675 | note1.delete() 676 | api.commit() 677 | project1.delete() 678 | api.commit() 679 | 680 | 681 | def test_section_add(cleanup, api_endpoint, api_token): 682 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 683 | api.sync() 684 | 685 | response = api.sections.add("Section1", api.state["user"]["inbox_project"]) 686 | assert response["name"] == "Section1" 687 | api.commit() 688 | assert "Section1" in [i["name"] for i in api.state["sections"]] 689 | section1 = [i for i in api.state["sections"] if i["name"] == "Section1"][0] 690 | assert api.sections.get_by_id(section1["id"]) == section1 691 | 692 | assert api.sections.get(section1["id"])["section"]["name"] == section1["name"] 693 | 694 | section1.delete() 695 | api.commit() 696 | 697 | 698 | def test_section_delete(cleanup, api_endpoint, api_token): 699 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 700 | api.sync() 701 | 702 | section1 = api.sections.add("Section1", api.state["user"]["inbox_project"]) 703 | api.commit() 704 | 705 | section1.delete() 706 | response = api.commit() 707 | assert response["sections"][0]["id"] == section1["id"] 708 | assert response["sections"][0]["is_deleted"] == 1 709 | assert "Section1" not in [i["name"] for i in api.state["sections"]] 710 | 711 | 712 | def test_section_update(cleanup, api_endpoint, api_token): 713 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 714 | api.sync() 715 | 716 | section1 = api.sections.add("Section1", api.state["user"]["inbox_project"]) 717 | api.commit() 718 | 719 | section1.update(name="UpdatedSection1") 720 | response = api.commit() 721 | assert response["sections"][0]["name"] == "UpdatedSection1" 722 | assert "UpdatedSection1" in [i["name"] for i in api.state["sections"]] 723 | assert api.sections.get_by_id(section1["id"]) == section1 724 | 725 | section1.delete() 726 | api.commit() 727 | 728 | 729 | def test_section_archive(cleanup, api_endpoint, api_token): 730 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 731 | api.sync() 732 | 733 | section1 = api.sections.add("Section1", api.state["user"]["inbox_project"]) 734 | api.commit() 735 | 736 | section1.archive() 737 | response = api.commit() 738 | assert response["sections"][0]["name"] == "Section1" 739 | assert response["sections"][0]["is_archived"] == 1 740 | 741 | section1.delete() 742 | api.commit() 743 | 744 | 745 | def test_section_unarchive(cleanup, api_endpoint, api_token): 746 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 747 | api.sync() 748 | 749 | section1 = api.sections.add("Section1", api.state["user"]["inbox_project"]) 750 | api.commit() 751 | 752 | section1.unarchive() 753 | response = api.commit() 754 | assert response["sections"][0]["name"] == "Section1" 755 | assert response["sections"][0]["is_archived"] == 0 756 | 757 | section1.delete() 758 | api.commit() 759 | 760 | 761 | def test_section_move_to_project(cleanup, api_endpoint, api_token): 762 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 763 | api.sync() 764 | 765 | section1 = api.sections.add("Section1", api.state["user"]["inbox_project"]) 766 | api.commit() 767 | project1 = api.projects.add("Project1") 768 | api.commit() 769 | 770 | section1.move(project_id=project1["id"]) 771 | response = api.commit() 772 | assert response["sections"][0]["name"] == "Section1" 773 | assert response["sections"][0]["project_id"] == project1["id"] 774 | assert project1["id"] in [ 775 | i["project_id"] for i in api.state["sections"] if i["id"] == section1["id"] 776 | ] 777 | 778 | section1.delete() 779 | api.commit() 780 | project1.delete() 781 | api.commit() 782 | 783 | 784 | def test_section_reorder(cleanup, api_endpoint, api_token): 785 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 786 | api.sync() 787 | 788 | section1 = api.sections.add("Section1", api.state["user"]["inbox_project"]) 789 | api.commit() 790 | section2 = api.sections.add("Section2", api.state["user"]["inbox_project"]) 791 | api.commit() 792 | 793 | api.sections.reorder( 794 | sections=[ 795 | {"id": section1["id"], "section_order": 2}, 796 | {"id": section2["id"], "section_order": 1}, 797 | ] 798 | ) 799 | response = api.commit() 800 | for section in response["sections"]: 801 | if section["id"] == section1["id"]: 802 | assert section["section_order"] == 2 803 | if section["id"] == section2["id"]: 804 | assert section["section_order"] == 1 805 | assert 2 in [ 806 | p["section_order"] for p in api.state["sections"] if p["id"] == section1["id"] 807 | ] 808 | assert 1 in [ 809 | p["section_order"] for p in api.state["sections"] if p["id"] == section2["id"] 810 | ] 811 | 812 | section1.delete() 813 | api.commit() 814 | section2.delete() 815 | api.commit() 816 | 817 | 818 | def test_filter_add(cleanup, api_endpoint, api_token): 819 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 820 | api.sync() 821 | 822 | filter1 = api.filters.add("Filter1", "no due date") 823 | response = api.commit() 824 | assert response["filters"][0]["name"] == "Filter1" 825 | assert "Filter1" in [f["name"] for f in api.state["filters"]] 826 | assert api.filters.get_by_id(filter1["id"]) == filter1 827 | assert api.filters.get(filter1["id"])["filter"]["name"] == filter1["name"] 828 | 829 | filter1.delete() 830 | api.commit() 831 | 832 | 833 | def test_filter_delete(cleanup, api_endpoint, api_token): 834 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 835 | api.sync() 836 | 837 | filter1 = api.filters.add("Filter1", "no due date") 838 | api.commit() 839 | 840 | filter1.delete() 841 | response = api.commit() 842 | assert response["filters"][0]["id"] == filter1["id"] 843 | assert response["filters"][0]["is_deleted"] == 1 844 | assert "Filter1" not in [p["name"] for p in api.state["filters"]] 845 | 846 | 847 | def test_filter_update(cleanup, api_endpoint, api_token): 848 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 849 | api.sync() 850 | 851 | filter1 = api.filters.add("Filter1", "no due date") 852 | api.commit() 853 | 854 | filter1.update(name="UpdatedFilter1") 855 | response = api.commit() 856 | assert response["filters"][0]["name"] == "UpdatedFilter1" 857 | assert "UpdatedFilter1" in [f["name"] for f in api.state["filters"]] 858 | assert api.filters.get_by_id(filter1["id"]) == filter1 859 | 860 | filter1.delete() 861 | api.commit() 862 | 863 | 864 | def test_filter_update_orders(cleanup, api_endpoint, api_token): 865 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 866 | api.sync() 867 | 868 | filter1 = api.filters.add("Filter1", "no due date") 869 | api.commit() 870 | 871 | filter2 = api.filters.add("Filter2", "today") 872 | api.commit() 873 | 874 | api.filters.update_orders({filter1["id"]: 2, filter2["id"]: 1}) 875 | response = api.commit() 876 | for filter in response["filters"]: 877 | if filter["id"] == filter1["id"]: 878 | assert filter["item_order"] == 2 879 | if filter["id"] == filter2["id"]: 880 | assert filter["item_order"] == 1 881 | assert 2 in [ 882 | f["item_order"] for f in api.state["filters"] if f["id"] == filter1["id"] 883 | ] 884 | assert 1 in [ 885 | f["item_order"] for f in api.state["filters"] if f["id"] == filter2["id"] 886 | ] 887 | 888 | filter1.delete() 889 | api.commit() 890 | filter2.delete() 891 | api.commit() 892 | 893 | 894 | def test_reminder_relative_add(cleanup, api_endpoint, api_token): 895 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 896 | api.sync() 897 | 898 | item1 = api.items.add("Item1", due={"string": "tomorrow 5pm"}) 899 | api.commit() 900 | 901 | reminder1 = api.reminders.add(item1["id"], minute_offset=30) 902 | response = api.commit() 903 | assert response["reminders"][0]["minute_offset"] == 30 904 | assert reminder1["id"] in [p["id"] for p in api.state["reminders"]] 905 | assert api.reminders.get_by_id(reminder1["id"]) == reminder1 906 | assert api.reminders.get(reminder1["id"])["reminder"]["due"] == reminder1["due"] 907 | 908 | reminder1.delete() 909 | api.commit() 910 | item1.delete() 911 | api.commit() 912 | 913 | 914 | def test_reminder_relative_delete(cleanup, api_endpoint, api_token): 915 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 916 | api.sync() 917 | 918 | item1 = api.items.add("Item1", due={"string": "tomorrow 5pm"}) 919 | api.commit() 920 | reminder1 = api.reminders.add(item1["id"], minute_offset=30) 921 | api.commit() 922 | 923 | reminder1.delete() 924 | response = api.commit() 925 | assert response["reminders"][0]["is_deleted"] == 1 926 | assert reminder1["id"] not in [p["id"] for p in api.state["reminders"]] 927 | 928 | item1.delete() 929 | api.commit() 930 | 931 | 932 | def test_reminder_relative_update(cleanup, api_endpoint, api_token): 933 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 934 | api.sync() 935 | 936 | item1 = api.items.add("Item1", due={"string": "tomorrow 5pm"}) 937 | api.commit() 938 | reminder1 = api.reminders.add(item1["id"], minute_offset=30) 939 | api.commit() 940 | 941 | reminder1.update(minute_offset=str(15)) 942 | response = api.commit() 943 | assert response["reminders"][0]["minute_offset"] == 15 944 | assert reminder1["id"] in [p["id"] for p in api.state["reminders"]] 945 | assert api.reminders.get_by_id(reminder1["id"]) == reminder1 946 | 947 | reminder1.delete() 948 | api.commit() 949 | item1.delete() 950 | api.commit() 951 | 952 | 953 | def test_reminder_absolute_add(cleanup, api_endpoint, api_token): 954 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 955 | api.sync() 956 | 957 | item1 = api.items.add("Item1", due={"string": "tomorrow 5pm"}) 958 | api.commit() 959 | 960 | now = time.time() 961 | tomorrow = time.gmtime(now + 24 * 3600) 962 | due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow) 963 | reminder1 = api.reminders.add(item1["id"], due={"date": due_date_utc}) 964 | response = api.commit() 965 | assert response["reminders"][0]["due"]["date"] == due_date_utc 966 | tomorrow = time.gmtime(time.time() + 24 * 3600) 967 | assert reminder1["id"] in [p["id"] for p in api.state["reminders"]] 968 | assert api.reminders.get_by_id(reminder1["id"]) == reminder1 969 | assert api.reminders.get(reminder1["id"])["reminder"]["due"] == reminder1["due"] 970 | 971 | reminder1.delete() 972 | api.commit() 973 | item1.delete() 974 | api.commit() 975 | 976 | 977 | def test_reminder_absolute_delete(cleanup, api_endpoint, api_token): 978 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 979 | api.sync() 980 | 981 | item1 = api.items.add("Item1", due={"string": "tomorrow 5pm"}) 982 | api.commit() 983 | 984 | now = time.time() 985 | tomorrow = time.gmtime(now + 24 * 3600) 986 | due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow) 987 | reminder1 = api.reminders.add(item1["id"], due={"date": due_date_utc}) 988 | api.commit() 989 | 990 | api.reminders.delete(reminder1["id"]) 991 | response = api.commit() 992 | assert response["reminders"][0]["is_deleted"] == 1 993 | assert reminder1["id"] not in [p["id"] for p in api.state["reminders"]] 994 | 995 | item1.delete() 996 | response = api.commit() 997 | 998 | 999 | def test_reminder_absolute_update(cleanup, api_endpoint, api_token): 1000 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 1001 | api.sync() 1002 | 1003 | item1 = api.items.add("Item1", due={"string": "tomorrow 5pm"}) 1004 | api.commit() 1005 | 1006 | now = time.time() 1007 | tomorrow = time.gmtime(now + 24 * 3600) 1008 | due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow) 1009 | reminder1 = api.reminders.add(item1["id"], due={"date": due_date_utc}) 1010 | api.commit() 1011 | 1012 | tomorrow = time.gmtime(now + 24 * 3600 + 60) 1013 | due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow) 1014 | api.reminders.update(reminder1["id"], due_date_utc=due_date_utc) 1015 | response = api.commit() 1016 | assert response["reminders"][0]["due"]["date"] == due_date_utc 1017 | assert reminder1["id"] in [p["id"] for p in api.state["reminders"]] 1018 | assert api.reminders.get_by_id(reminder1["id"]) == reminder1 1019 | 1020 | reminder1.delete() 1021 | api.commit() 1022 | item1.delete() 1023 | api.commit() 1024 | 1025 | 1026 | def test_locations(api_endpoint, api_token): 1027 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 1028 | 1029 | api.sync() 1030 | 1031 | api.locations.clear() 1032 | api.commit() 1033 | 1034 | assert api.state["locations"] == [] 1035 | 1036 | 1037 | def test_live_notifications(api_endpoint, api_token): 1038 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 1039 | 1040 | api.sync() 1041 | 1042 | api.live_notifications.set_last_read(api.state["live_notifications_last_read_id"]) 1043 | response = api.commit() 1044 | assert ( 1045 | response["live_notifications_last_read_id"] 1046 | == api.state["live_notifications_last_read_id"] 1047 | ) 1048 | 1049 | 1050 | def test_share_accept(cleanup, cleanup2, api_endpoint, api_token, api_token2): 1051 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 1052 | api2 = todoist.api.TodoistAPI(api_token2, api_endpoint) 1053 | 1054 | api.user.update(auto_invite_disabled=1) 1055 | api.commit() 1056 | api.sync() 1057 | 1058 | api2.user.update(auto_invite_disabled=1) 1059 | api2.commit() 1060 | api2.sync() 1061 | 1062 | project1 = api.projects.add("Project1") 1063 | api.commit() 1064 | 1065 | api.projects.share(project1["id"], api2.state["user"]["email"]) 1066 | response = api.commit() 1067 | assert response["projects"][0]["name"] == project1["name"] 1068 | assert response["projects"][0]["shared"] 1069 | 1070 | response2 = api2.sync() 1071 | invitation1 = next( 1072 | ( 1073 | ln 1074 | for ln in response2["live_notifications"] 1075 | if ln["notification_type"] == "share_invitation_sent" 1076 | ), 1077 | None, 1078 | ) 1079 | assert invitation1 is not None 1080 | assert invitation1["project_name"] == project1["name"] 1081 | assert invitation1["from_user"]["email"] == api.state["user"]["email"] 1082 | 1083 | api2.invitations.accept(invitation1["id"], invitation1["invitation_secret"]) 1084 | response2 = api2.commit() 1085 | assert api2.state["user"]["id"] in [ 1086 | p["user_id"] for p in api2.state["collaborator_states"] 1087 | ] 1088 | 1089 | api.sync() 1090 | project1 = [p for p in api.state["projects"] if p["name"] == "Project1"][0] 1091 | project1.delete() 1092 | api.commit() 1093 | 1094 | 1095 | def test_share_reject(cleanup, cleanup2, api_endpoint, api_token, api_token2): 1096 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 1097 | api2 = todoist.api.TodoistAPI(api_token2, api_endpoint) 1098 | 1099 | api.user.update(auto_invite_disabled=1) 1100 | api.commit() 1101 | api.sync() 1102 | 1103 | api2.user.update(auto_invite_disabled=1) 1104 | api2.commit() 1105 | api2.sync() 1106 | 1107 | project1 = api.projects.add("Project1") 1108 | api.commit() 1109 | 1110 | api.projects.share(project1["id"], api2.state["user"]["email"]) 1111 | response = api.commit() 1112 | assert response["projects"][0]["name"] == project1["name"] 1113 | assert response["projects"][0]["shared"] 1114 | 1115 | response2 = api2.sync() 1116 | invitation2 = next( 1117 | ( 1118 | ln 1119 | for ln in response2["live_notifications"] 1120 | if ln["notification_type"] == "share_invitation_sent" 1121 | ), 1122 | None, 1123 | ) 1124 | assert invitation2 is not None 1125 | assert invitation2["project_name"] == project1["name"] 1126 | assert invitation2["from_user"]["email"] == api.state["user"]["email"] 1127 | 1128 | api2.invitations.reject(invitation2["id"], invitation2["invitation_secret"]) 1129 | response2 = api2.commit() 1130 | assert len(response2["projects"]) == 0 1131 | assert len(response2["collaborator_states"]) == 0 1132 | 1133 | project1 = [p for p in api.state["projects"] if p["name"] == "Project1"][0] 1134 | project1.delete() 1135 | api.commit() 1136 | 1137 | 1138 | def test_share_delete(cleanup, cleanup2, api_endpoint, api_token, api_token2): 1139 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 1140 | api2 = todoist.api.TodoistAPI(api_token2, api_endpoint) 1141 | 1142 | api.user.update(auto_invite_disabled=1) 1143 | api.commit() 1144 | api.sync() 1145 | 1146 | api2.user.update(auto_invite_disabled=1) 1147 | api2.commit() 1148 | api2.sync() 1149 | 1150 | project1 = api.projects.add("Project1") 1151 | api.commit() 1152 | 1153 | api.projects.share(project1["id"], api2.state["user"]["email"]) 1154 | response = api.commit() 1155 | assert response["projects"][0]["name"] == project1["name"] 1156 | assert response["projects"][0]["shared"] 1157 | 1158 | response2 = api2.sync() 1159 | invitation3 = next( 1160 | ( 1161 | ln 1162 | for ln in response2["live_notifications"] 1163 | if ln["notification_type"] == "share_invitation_sent" 1164 | ), 1165 | None, 1166 | ) 1167 | assert invitation3 is not None 1168 | assert invitation3["project_name"] == project1["name"] 1169 | assert invitation3["from_user"]["email"] == api.state["user"]["email"] 1170 | 1171 | api.invitations.delete(invitation3["id"]) 1172 | api.commit() 1173 | 1174 | project1 = [p for p in api.state["projects"] if p["name"] == "Project1"][0] 1175 | project1.delete() 1176 | api.commit() 1177 | 1178 | 1179 | def test_items_archive(cleanup, api_endpoint, api_token): 1180 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 1181 | 1182 | # Create and complete five tasks 1183 | project = api.projects.add("Project") 1184 | items = [ 1185 | api.items.add("task{}".format(i), project_id=project["id"]) for i in range(5) 1186 | ] 1187 | for i, item in enumerate(items): 1188 | date_completed = "2019-01-01T00:00:0{}Z".format(i) 1189 | api.items.complete(item_id=item["id"], date_completed=date_completed) 1190 | api.commit() 1191 | 1192 | # Create an archive manager to iterate over them 1193 | manager = api.items_archive.for_project(project["id"]) 1194 | item_ids = [item["id"] for item in manager.items()] 1195 | assert item_ids == [item["id"] for item in items[::-1]] 1196 | 1197 | # tear down 1198 | project.delete() 1199 | api.commit() 1200 | 1201 | 1202 | def test_sections_archive(cleanup, api_endpoint, api_token): 1203 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 1204 | 1205 | # Create and complete five sections 1206 | project = api.projects.add("Project") 1207 | sections = [ 1208 | api.sections.add("s{}".format(i), project_id=project["id"]) for i in range(5) 1209 | ] 1210 | for i, section in enumerate(sections): 1211 | date_archived = "2019-01-01T00:00:0{}Z".format(i) 1212 | api.sections.archive(section_id=section["id"], date_archived=date_archived) 1213 | api.commit() 1214 | 1215 | # Create an archive manager to iterate over them 1216 | manager = api.sections_archive.for_project(project["id"]) 1217 | section_ids = [section["id"] for section in manager.sections()] 1218 | assert section_ids == [section["id"] for section in sections[::-1]] 1219 | 1220 | # tear down 1221 | project.delete() 1222 | api.commit() 1223 | 1224 | 1225 | def test_templates(cleanup, api_endpoint, api_token): 1226 | api = todoist.api.TodoistAPI(api_token, api_endpoint) 1227 | 1228 | api.sync() 1229 | 1230 | project1 = api.projects.add("Project1") 1231 | project2 = api.projects.add("Project2") 1232 | api.commit() 1233 | 1234 | item1 = api.items.add("Item1", project_id=project1["id"]) 1235 | api.commit() 1236 | 1237 | template = api.templates.export_as_file(project1["id"]) 1238 | assert "task,Item1,4,1" in template 1239 | with io.open("/tmp/example.csv", "w", encoding="utf-8") as example: 1240 | example.write(template) 1241 | 1242 | result = api.templates.import_into_project(project1["id"], "/tmp/example.csv") 1243 | assert result == {"status": u"ok"} 1244 | 1245 | item1.delete() 1246 | api.commit() 1247 | project1.delete() 1248 | api.commit() 1249 | project2.delete() 1250 | api.commit() 1251 | --------------------------------------------------------------------------------