├── .env.example ├── .flake8 ├── .github └── workflows │ └── ci.yaml ├── .gitignore ├── LICENSE.md ├── README.rst ├── integration_tests ├── __init__.py ├── test_client.py ├── test_helper.py └── test_session.py ├── poetry.lock ├── pyproject.toml ├── python_picnic_api ├── __init__.py ├── client.py ├── helper.py └── session.py └── tests ├── __init__.py ├── test_client.py └── test_session.py /.env.example: -------------------------------------------------------------------------------- 1 | # you can use this .env file when running pytest 2 | USERNAME="john@doe.com" 3 | PASSWORD="password123" 4 | COUNTRY_CODE="NL" # use either "NL" or "DE" -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | extend-ignore = E203, E266, E501 3 | # line length is intentionally set to 80 here because black uses Bugbear 4 | # See https://github.com/psf/black/blob/master/docs/the_black_code_style.md#line-length for more details 5 | max-line-length = 80 6 | max-complexity = 18 7 | select = B,C,E,F,W,T4,B9 8 | # We need to configure the mypy.ini because the flake8-mypy's default 9 | # options don't properly override it, so if we don't specify it we get 10 | # half of the config from mypy.ini and half from flake8-mypy. 11 | mypy_config = mypy.ini -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | build: 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | python-version: [3.6, 3.7, 3.8] 15 | poetry-version: [1.1.4] 16 | 17 | runs-on: ubuntu-latest 18 | 19 | steps: 20 | - uses: actions/checkout@v2 21 | 22 | - name: Set up Python ${{ matrix.python-version }} 23 | uses: actions/setup-python@v2 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | 27 | - name: Install poetry ${{ matrix.poetry-version }} 28 | run: | 29 | python -m ensurepip 30 | python -m pip install --upgrade pip 31 | python -m pip install poetry==${{ matrix.poetry-version }} 32 | 33 | - name: Install dependencies 34 | shell: bash 35 | run: python -m poetry install 36 | 37 | - name: Test with pytest 38 | env: 39 | USERNAME: ${{ secrets.PICNIC_USERNAME }} 40 | PASSWORD: ${{ secrets.PICNIC_PASSWORD }} 41 | COUNTRY_CODE: ${{ secrets.PICNIC_COUNTRY_CODE }} 42 | run: | 43 | python -m poetry run python -m pytest -v tests 44 | 45 | - name: Lint with flake8 46 | run: | 47 | python -m poetry run python -m flake8 . 48 | 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | db.sqlite3 61 | db.sqlite3-journal 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # IPython 80 | profile_default/ 81 | ipython_config.py 82 | 83 | # pyenv 84 | .python-version 85 | 86 | # pipenv 87 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 88 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 89 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 90 | # install all needed dependencies. 91 | #Pipfile.lock 92 | 93 | # celery beat schedule file 94 | celerybeat-schedule 95 | 96 | # SageMath parsed files 97 | *.sage.py 98 | 99 | # Environments 100 | .env 101 | .venv 102 | env/ 103 | venv/ 104 | ENV/ 105 | env.bak/ 106 | venv.bak/ 107 | 108 | # Spyder project settings 109 | .spyderproject 110 | .spyproject 111 | 112 | # Rope project settings 113 | .ropeproject 114 | 115 | # mkdocs documentation 116 | /site 117 | 118 | # mypy 119 | .mypy_cache/ 120 | .dmypy.json 121 | dmypy.json 122 | 123 | # Pyre type checker 124 | .pyre/ 125 | 126 | # Visual Studio Code 127 | .vscode 128 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "{}" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright {yyyy} {name of copyright owner} 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | """"""""""""""""" 2 | Python-Picnic-API 3 | """"""""""""""""" 4 | 5 | .. image:: https://camo.githubusercontent.com/cd005dca0ef55d7725912ec03a936d3a7c8de5b5/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f6275792532306d6525323061253230636f666665652d646f6e6174652d79656c6c6f772e737667 6 | :target: https://www.buymeacoffee.com/MikeBrink 7 | :alt: Buy me a coffee 8 | 9 | Unofficial Python wrapper for the Picnic_ API. While not all API methods have been implemented yet, you'll find most of what you need to build a working application are available. 10 | 11 | This library is not affiliated with Picnic and retrieves data from the endpoints of the mobile application. Use at your own risk. 12 | 13 | .. _Picnic: https://picnic.app/nl/ 14 | 15 | =============== 16 | Getting started 17 | =============== 18 | The easiest way to install is directly from pip:: 19 | 20 | $ pip install python-picnic-api 21 | 22 | 23 | ----- 24 | Usage 25 | ----- 26 | I'll go over a few common operations here you'll most likely use in applications. 27 | To login: 28 | 29 | .. code-block:: python 30 | 31 | from python_picnic_api import PicnicAPI 32 | 33 | picnic = PicnicAPI(username='username', password='password', country_code="NL") 34 | 35 | The country_code parameter defaults to NL, but you have to change it if you live in a different country than the Netherlands (Germany: DE, Belgium: BE). 36 | You can also store your credentials by setting the store value to true, this will store your credentials and country_code in /config/app.yaml. 37 | 38 | Searching for a product 39 | ----------------------- 40 | .. code-block:: python 41 | 42 | >>> picnic.search('coffee') 43 | [{'type': 'CATEGORY', 'id': 'coffee', 'links': [{'type': 'SEARCH', 'href': 'https://storefront-prod.nl.picnicinternational.com/api/15/search?search_term=coffee'}], 'name': 'coffee', 'items': [{'type': 'SINGLE_ARTICLE', 'id': '10511523', 'decorators': [{'type': 'UNIT_QUANTITY', 'unit_quantity_text': '500 gram'}], 'name': 'Lavazza espresso koffiebonen', 'display_price': 599, 'price': 599, 'image_id': 'd3fb2888fc41514bc06dfd6b52f8622cc222d017d2651501f227a537915fcc4f', 'max_count': 50, 'unit_quantity': '500 gram', 'unit_quantity_sub': '€11.98/kg', 'tags': []}, ... 44 | 45 | Check cart 46 | ---------- 47 | .. code-block:: python 48 | 49 | >>> picnic.get_cart() 50 | {'type': 'ORDER', 'id': 'shopping_cart', 'items': [], 'delivery_slots': [... 51 | 52 | 53 | Manipulating your cart 54 | ---------------------- 55 | All of these methods will return the shopping cart. 56 | 57 | .. code-block:: python 58 | 59 | # adding 2 'Lavazza espresso koffiebonen' to cart 60 | picnic.add_product('10511523', count=2) 61 | 62 | # removing 1 'Lavazza espresso koffiebonen' from cart 63 | picnic.remove_product('10511523') 64 | 65 | # clearing the cart 66 | picnic.clear_cart() 67 | 68 | See upcoming deliveries 69 | ------------------------ 70 | .. code-block:: python 71 | 72 | >>> picnic.get_current_deliveries() 73 | [] 74 | 75 | 76 | See available delivery slots 77 | ---------------------------- 78 | .. code-block:: python 79 | 80 | >>> picnic.get_delivery_slots() 81 | 82 | -------------------------------------------------------------------------------- /integration_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MikeBrink/python-picnic-api/f2029134dfcb4cd92a234be1796487e052960177/integration_tests/__init__.py -------------------------------------------------------------------------------- /integration_tests/test_client.py: -------------------------------------------------------------------------------- 1 | from python_picnic_api import PicnicAPI 2 | from dotenv import load_dotenv 3 | import os 4 | 5 | load_dotenv() 6 | 7 | username = os.getenv("USERNAME") 8 | password = os.getenv("PASSWORD") 9 | country_code = os.getenv("COUNTRY_CODE") 10 | 11 | picnic = PicnicAPI(username, password, country_code=country_code) 12 | 13 | 14 | def _get_amount(cart: dict, product_id: str): 15 | items = cart["items"][0]["items"] 16 | product = next((item for item in items if item["id"] == product_id), None) 17 | return product["decorators"][0]["quantity"] 18 | 19 | 20 | def test_get_user(): 21 | response = picnic.get_user() 22 | assert isinstance(response, dict) 23 | assert "contact_email" in response.keys() 24 | assert response["contact_email"] == username 25 | 26 | 27 | def test_search(): 28 | response = picnic.search("koffie") 29 | assert isinstance(response, list) 30 | assert isinstance(response[0], dict) 31 | assert "id" in response[0].keys() 32 | assert response[0]["id"] == "koffie" 33 | 34 | 35 | def test_get_article(): 36 | response = picnic.get_article("s1001546") 37 | assert isinstance(response, dict) 38 | assert "id" in response.keys() 39 | assert response["id"] == "s1001546" 40 | assert response["name"] == "Douwe Egberts aroma rood filterkoffie" 41 | 42 | 43 | def test_get_article_with_category_name(): 44 | response = picnic.get_article("s1001546", add_category_name=True) 45 | assert isinstance(response, dict) 46 | assert "id" in response.keys() 47 | assert response["id"] == "s1001546" 48 | assert response["name"] == "Douwe Egberts aroma rood filterkoffie" 49 | assert "category_name" in response.keys() 50 | assert response["category_name"] == "Koffie & thee" 51 | 52 | 53 | def test_get_lists(): 54 | response_1 = picnic.get_lists() 55 | response_2 = picnic.get_lists("21725") 56 | assert isinstance(response_1, list) 57 | assert isinstance(response_2, list) 58 | 59 | 60 | def test_get_cart(): 61 | response = picnic.get_cart() 62 | assert isinstance(response, dict) 63 | assert "id" in response.keys() 64 | assert response["id"] == "shopping_cart" 65 | 66 | 67 | def test_add_product(): 68 | # need a clear cart for reproducibility 69 | picnic.clear_cart() 70 | response = picnic.add_product("10407428", count=2) 71 | 72 | assert isinstance(response, dict) 73 | assert "items" in response.keys() 74 | assert any(item["id"] == "10407428" for item in response["items"][0]["items"]) 75 | assert _get_amount(response, "10407428") == 2 76 | 77 | 78 | def test_remove_product(): 79 | # need a clear cart for reproducibility 80 | picnic.clear_cart() 81 | # add two coffee to the cart so we can remove 1 82 | picnic.add_product("10407428", count=2) 83 | 84 | response = picnic.remove_product("10407428", count=1) 85 | amount = _get_amount(response, "10407428") 86 | 87 | assert isinstance(response, dict) 88 | assert "items" in response.keys() 89 | assert amount == 1 90 | 91 | 92 | def test_clear_cart(): 93 | # need a clear cart for reproducibility 94 | picnic.clear_cart() 95 | # add two coffee to the cart so we can clear it 96 | picnic.add_product("10407428", count=2) 97 | 98 | response = picnic.clear_cart() 99 | 100 | assert isinstance(response, dict) 101 | assert "items" in response.keys() 102 | assert len(response["items"]) == 0 103 | 104 | 105 | def test_get_delivery_slots(): 106 | response = picnic.get_delivery_slots() 107 | assert isinstance(response, dict) 108 | assert "delivery_slots" in response.keys() 109 | assert isinstance(response["delivery_slots"], list) 110 | 111 | 112 | def test_get_deliveries(): 113 | response_1 = picnic.get_deliveries() 114 | response_2 = picnic.get_deliveries(summary=True) 115 | 116 | assert isinstance(response_1, list) 117 | assert isinstance(response_1[0], dict) 118 | assert response_1[0]["type"] == "DELIVERY" 119 | 120 | assert isinstance(response_2, list) 121 | assert isinstance(response_2[0], dict) 122 | 123 | assert response_1 != response_2 124 | 125 | 126 | def test_get_delivery(): 127 | # get a id to test against 128 | response = picnic.get_deliveries() 129 | deliveryId = response[0]["id"] 130 | 131 | response = picnic.get_delivery(deliveryId) 132 | assert isinstance(response, dict) 133 | assert response["type"] == "DELIVERY" 134 | assert response["id"] == deliveryId 135 | 136 | 137 | def test_get_current_deliveries(): 138 | response = picnic.get_current_deliveries() 139 | assert isinstance(response, list) 140 | 141 | 142 | def test_get_categories(): 143 | response = picnic.get_categories() 144 | assert isinstance(response, list) 145 | 146 | 147 | def test_print_categories(capsys): 148 | picnic.print_categories() 149 | captured = capsys.readouterr() 150 | 151 | assert isinstance(captured.out, str) 152 | 153 | 154 | # TO DO: add test for re-logging 155 | -------------------------------------------------------------------------------- /integration_tests/test_helper.py: -------------------------------------------------------------------------------- 1 | from python_picnic_api.helper import get_image, get_recipe_image 2 | import requests 3 | 4 | def test_get_image(): 5 | id = "8560e1f1c2d2811dfefbbb2342ef0d95250533f2131416aca459bde35d73e901" 6 | size = "tile-medium" 7 | suffix = "webp" 8 | url = get_image(id, size=size, suffix=suffix) 9 | 10 | response = requests.get(url) 11 | 12 | # Check if the response status code indicates success 13 | assert response.status_code == 200, "Failed to fetch URL" 14 | 15 | # Check if the response content type is an image format 16 | content_type = response.headers.get("content-type") 17 | assert content_type.startswith("image/"), "URL does not return an image" 18 | 19 | 20 | def test_get_recipe_image(): 21 | id = "5c4cc7cb7a0429695da708394eb0cae1bd9b92935ac76c8fda63bbc57ad5b826" 22 | size = "medium" 23 | url = get_recipe_image(id, size=size) 24 | print(url) 25 | 26 | response = requests.get(url) 27 | 28 | # Check if the response status code indicates success 29 | assert response.status_code == 200, "Failed to fetch URL" 30 | 31 | # Check if the response content type is an image format 32 | content_type = response.headers.get("content-type") 33 | assert content_type.startswith("image/"), "URL does not return an image" 34 | -------------------------------------------------------------------------------- /integration_tests/test_session.py: -------------------------------------------------------------------------------- 1 | from python_picnic_api.session import PicnicAPISession, PicnicAuthError 2 | from python_picnic_api.helper import _url_generator 3 | from requests import Session 4 | from dotenv import load_dotenv 5 | import os 6 | 7 | load_dotenv() 8 | 9 | username = os.getenv("USERNAME") 10 | password = os.getenv("PASSWORD") 11 | country_code = os.getenv("COUNTRY_CODE") 12 | 13 | DEFAULT_URL = "https://storefront-prod.{}.picnicinternational.com/api/{}" 14 | DEFAULT_API_VERSION = "15" 15 | 16 | 17 | def test_init(): 18 | assert issubclass(PicnicAPISession, Session) 19 | 20 | 21 | def test_login(): 22 | base_url = _url_generator(DEFAULT_URL, country_code, DEFAULT_API_VERSION) 23 | 24 | session = PicnicAPISession() 25 | session.login(username, password, base_url) 26 | assert "x-picnic-auth" in session.headers.keys() 27 | 28 | 29 | def test_login_auth_error(): 30 | base_url = _url_generator(DEFAULT_URL, country_code, DEFAULT_API_VERSION) 31 | 32 | try: 33 | session = PicnicAPISession() 34 | session.login('username', 'password', base_url) 35 | except PicnicAuthError: 36 | assert True 37 | else: 38 | assert False 39 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "appdirs" 3 | version = "1.4.4" 4 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 5 | category = "dev" 6 | optional = false 7 | python-versions = "*" 8 | 9 | [[package]] 10 | name = "atomicwrites" 11 | version = "1.4.0" 12 | description = "Atomic file writes." 13 | category = "dev" 14 | optional = false 15 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 16 | 17 | [[package]] 18 | name = "attrs" 19 | version = "20.3.0" 20 | description = "Classes Without Boilerplate" 21 | category = "dev" 22 | optional = false 23 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 24 | 25 | [package.extras] 26 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] 27 | docs = ["furo", "sphinx", "zope.interface"] 28 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] 29 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] 30 | 31 | [[package]] 32 | name = "black" 33 | version = "19.10b0" 34 | description = "The uncompromising code formatter." 35 | category = "dev" 36 | optional = false 37 | python-versions = ">=3.6" 38 | 39 | [package.dependencies] 40 | appdirs = "*" 41 | attrs = ">=18.1.0" 42 | click = ">=6.5" 43 | pathspec = ">=0.6,<1" 44 | regex = "*" 45 | toml = ">=0.9.4" 46 | typed-ast = ">=1.4.0" 47 | 48 | [package.extras] 49 | d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] 50 | 51 | [[package]] 52 | name = "certifi" 53 | version = "2020.12.5" 54 | description = "Python package for providing Mozilla's CA Bundle." 55 | category = "main" 56 | optional = false 57 | python-versions = "*" 58 | 59 | [[package]] 60 | name = "chardet" 61 | version = "3.0.4" 62 | description = "Universal encoding detector for Python 2 and 3" 63 | category = "main" 64 | optional = false 65 | python-versions = "*" 66 | 67 | [[package]] 68 | name = "click" 69 | version = "7.1.2" 70 | description = "Composable command line interface toolkit" 71 | category = "dev" 72 | optional = false 73 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 74 | 75 | [[package]] 76 | name = "colorama" 77 | version = "0.4.4" 78 | description = "Cross-platform colored terminal text." 79 | category = "dev" 80 | optional = false 81 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 82 | 83 | [[package]] 84 | name = "flake8" 85 | version = "3.8.4" 86 | description = "the modular source code checker: pep8 pyflakes and co" 87 | category = "dev" 88 | optional = false 89 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" 90 | 91 | [package.dependencies] 92 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 93 | mccabe = ">=0.6.0,<0.7.0" 94 | pycodestyle = ">=2.6.0a1,<2.7.0" 95 | pyflakes = ">=2.2.0,<2.3.0" 96 | 97 | [[package]] 98 | name = "idna" 99 | version = "2.10" 100 | description = "Internationalized Domain Names in Applications (IDNA)" 101 | category = "main" 102 | optional = false 103 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 104 | 105 | [[package]] 106 | name = "importlib-metadata" 107 | version = "3.1.1" 108 | description = "Read metadata from Python packages" 109 | category = "dev" 110 | optional = false 111 | python-versions = ">=3.6" 112 | 113 | [package.dependencies] 114 | zipp = ">=0.5" 115 | 116 | [package.extras] 117 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] 118 | testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] 119 | 120 | [[package]] 121 | name = "mccabe" 122 | version = "0.6.1" 123 | description = "McCabe checker, plugin for flake8" 124 | category = "dev" 125 | optional = false 126 | python-versions = "*" 127 | 128 | [[package]] 129 | name = "more-itertools" 130 | version = "8.6.0" 131 | description = "More routines for operating on iterables, beyond itertools" 132 | category = "dev" 133 | optional = false 134 | python-versions = ">=3.5" 135 | 136 | [[package]] 137 | name = "packaging" 138 | version = "20.7" 139 | description = "Core utilities for Python packages" 140 | category = "dev" 141 | optional = false 142 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 143 | 144 | [package.dependencies] 145 | pyparsing = ">=2.0.2" 146 | 147 | [[package]] 148 | name = "pathspec" 149 | version = "0.8.1" 150 | description = "Utility library for gitignore style pattern matching of file paths." 151 | category = "dev" 152 | optional = false 153 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 154 | 155 | [[package]] 156 | name = "pluggy" 157 | version = "0.13.1" 158 | description = "plugin and hook calling mechanisms for python" 159 | category = "dev" 160 | optional = false 161 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 162 | 163 | [package.dependencies] 164 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 165 | 166 | [package.extras] 167 | dev = ["pre-commit", "tox"] 168 | 169 | [[package]] 170 | name = "py" 171 | version = "1.9.0" 172 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 173 | category = "dev" 174 | optional = false 175 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 176 | 177 | [[package]] 178 | name = "pycodestyle" 179 | version = "2.6.0" 180 | description = "Python style guide checker" 181 | category = "dev" 182 | optional = false 183 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 184 | 185 | [[package]] 186 | name = "pyflakes" 187 | version = "2.2.0" 188 | description = "passive checker of Python programs" 189 | category = "dev" 190 | optional = false 191 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 192 | 193 | [[package]] 194 | name = "pyparsing" 195 | version = "2.4.7" 196 | description = "Python parsing module" 197 | category = "dev" 198 | optional = false 199 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 200 | 201 | [[package]] 202 | name = "pytest" 203 | version = "5.4.3" 204 | description = "pytest: simple powerful testing with Python" 205 | category = "dev" 206 | optional = false 207 | python-versions = ">=3.5" 208 | 209 | [package.dependencies] 210 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 211 | attrs = ">=17.4.0" 212 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 213 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 214 | more-itertools = ">=4.0.0" 215 | packaging = "*" 216 | pluggy = ">=0.12,<1.0" 217 | py = ">=1.5.0" 218 | wcwidth = "*" 219 | 220 | [package.extras] 221 | checkqa-mypy = ["mypy (==v0.761)"] 222 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 223 | 224 | [[package]] 225 | name = "python-dotenv" 226 | version = "0.15.0" 227 | description = "Add .env support to your django/flask apps in development and deployments" 228 | category = "dev" 229 | optional = false 230 | python-versions = "*" 231 | 232 | [package.extras] 233 | cli = ["click (>=5.0)"] 234 | 235 | [[package]] 236 | name = "regex" 237 | version = "2020.11.13" 238 | description = "Alternative regular expression module, to replace re." 239 | category = "dev" 240 | optional = false 241 | python-versions = "*" 242 | 243 | [[package]] 244 | name = "requests" 245 | version = "2.25.0" 246 | description = "Python HTTP for Humans." 247 | category = "main" 248 | optional = false 249 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 250 | 251 | [package.dependencies] 252 | certifi = ">=2017.4.17" 253 | chardet = ">=3.0.2,<4" 254 | idna = ">=2.5,<3" 255 | urllib3 = ">=1.21.1,<1.27" 256 | 257 | [package.extras] 258 | security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] 259 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 260 | 261 | [[package]] 262 | name = "toml" 263 | version = "0.10.2" 264 | description = "Python Library for Tom's Obvious, Minimal Language" 265 | category = "dev" 266 | optional = false 267 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 268 | 269 | [[package]] 270 | name = "typed-ast" 271 | version = "1.4.1" 272 | description = "a fork of Python 2 and 3 ast modules with type comment support" 273 | category = "dev" 274 | optional = false 275 | python-versions = "*" 276 | 277 | [[package]] 278 | name = "urllib3" 279 | version = "1.26.2" 280 | description = "HTTP library with thread-safe connection pooling, file post, and more." 281 | category = "main" 282 | optional = false 283 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 284 | 285 | [package.extras] 286 | brotli = ["brotlipy (>=0.6.0)"] 287 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 288 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 289 | 290 | [[package]] 291 | name = "wcwidth" 292 | version = "0.2.5" 293 | description = "Measures the displayed width of unicode strings in a terminal" 294 | category = "dev" 295 | optional = false 296 | python-versions = "*" 297 | 298 | [[package]] 299 | name = "zipp" 300 | version = "3.4.0" 301 | description = "Backport of pathlib-compatible object wrapper for zip files" 302 | category = "dev" 303 | optional = false 304 | python-versions = ">=3.6" 305 | 306 | [package.extras] 307 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] 308 | testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] 309 | 310 | [metadata] 311 | lock-version = "1.1" 312 | python-versions = "^3.6" 313 | content-hash = "251e54bd29f3d67e6130d2c7f5a3334fae5e17e785518398350ad6344fc42e0e" 314 | 315 | [metadata.files] 316 | appdirs = [ 317 | {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, 318 | {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, 319 | ] 320 | atomicwrites = [ 321 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 322 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 323 | ] 324 | attrs = [ 325 | {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, 326 | {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, 327 | ] 328 | black = [ 329 | {file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"}, 330 | {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, 331 | ] 332 | certifi = [ 333 | {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, 334 | {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, 335 | ] 336 | chardet = [ 337 | {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, 338 | {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, 339 | ] 340 | click = [ 341 | {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, 342 | {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, 343 | ] 344 | colorama = [ 345 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 346 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, 347 | ] 348 | flake8 = [ 349 | {file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"}, 350 | {file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"}, 351 | ] 352 | idna = [ 353 | {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, 354 | {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, 355 | ] 356 | importlib-metadata = [ 357 | {file = "importlib_metadata-3.1.1-py3-none-any.whl", hash = "sha256:6112e21359ef8f344e7178aa5b72dc6e62b38b0d008e6d3cb212c5b84df72013"}, 358 | {file = "importlib_metadata-3.1.1.tar.gz", hash = "sha256:b0c2d3b226157ae4517d9625decf63591461c66b3a808c2666d538946519d170"}, 359 | ] 360 | mccabe = [ 361 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 362 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 363 | ] 364 | more-itertools = [ 365 | {file = "more-itertools-8.6.0.tar.gz", hash = "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf"}, 366 | {file = "more_itertools-8.6.0-py3-none-any.whl", hash = "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330"}, 367 | ] 368 | packaging = [ 369 | {file = "packaging-20.7-py2.py3-none-any.whl", hash = "sha256:eb41423378682dadb7166144a4926e443093863024de508ca5c9737d6bc08376"}, 370 | {file = "packaging-20.7.tar.gz", hash = "sha256:05af3bb85d320377db281cf254ab050e1a7ebcbf5410685a9a407e18a1f81236"}, 371 | ] 372 | pathspec = [ 373 | {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, 374 | {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, 375 | ] 376 | pluggy = [ 377 | {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, 378 | {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, 379 | ] 380 | py = [ 381 | {file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"}, 382 | {file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"}, 383 | ] 384 | pycodestyle = [ 385 | {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"}, 386 | {file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"}, 387 | ] 388 | pyflakes = [ 389 | {file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"}, 390 | {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, 391 | ] 392 | pyparsing = [ 393 | {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, 394 | {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, 395 | ] 396 | pytest = [ 397 | {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, 398 | {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"}, 399 | ] 400 | python-dotenv = [ 401 | {file = "python-dotenv-0.15.0.tar.gz", hash = "sha256:587825ed60b1711daea4832cf37524dfd404325b7db5e25ebe88c495c9f807a0"}, 402 | {file = "python_dotenv-0.15.0-py2.py3-none-any.whl", hash = "sha256:0c8d1b80d1a1e91717ea7d526178e3882732420b03f08afea0406db6402e220e"}, 403 | ] 404 | regex = [ 405 | {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"}, 406 | {file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"}, 407 | {file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"}, 408 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"}, 409 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"}, 410 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"}, 411 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"}, 412 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"}, 413 | {file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"}, 414 | {file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"}, 415 | {file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"}, 416 | {file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"}, 417 | {file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"}, 418 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"}, 419 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"}, 420 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"}, 421 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"}, 422 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"}, 423 | {file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"}, 424 | {file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"}, 425 | {file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"}, 426 | {file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"}, 427 | {file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"}, 428 | {file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"}, 429 | {file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"}, 430 | {file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"}, 431 | {file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"}, 432 | {file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"}, 433 | {file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"}, 434 | {file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"}, 435 | {file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"}, 436 | {file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"}, 437 | {file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"}, 438 | {file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"}, 439 | {file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"}, 440 | {file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"}, 441 | {file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"}, 442 | {file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"}, 443 | {file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"}, 444 | {file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"}, 445 | {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"}, 446 | ] 447 | requests = [ 448 | {file = "requests-2.25.0-py2.py3-none-any.whl", hash = "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998"}, 449 | {file = "requests-2.25.0.tar.gz", hash = "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8"}, 450 | ] 451 | toml = [ 452 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 453 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 454 | ] 455 | typed-ast = [ 456 | {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, 457 | {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, 458 | {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, 459 | {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, 460 | {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, 461 | {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, 462 | {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, 463 | {file = "typed_ast-1.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:fcf135e17cc74dbfbc05894ebca928ffeb23d9790b3167a674921db19082401f"}, 464 | {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, 465 | {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, 466 | {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, 467 | {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, 468 | {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, 469 | {file = "typed_ast-1.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:f208eb7aff048f6bea9586e61af041ddf7f9ade7caed625742af423f6bae3298"}, 470 | {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, 471 | {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, 472 | {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, 473 | {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, 474 | {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, 475 | {file = "typed_ast-1.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7e4c9d7658aaa1fc80018593abdf8598bf91325af6af5cce4ce7c73bc45ea53d"}, 476 | {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, 477 | {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, 478 | {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, 479 | {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:92c325624e304ebf0e025d1224b77dd4e6393f18aab8d829b5b7e04afe9b7a2c"}, 480 | {file = "typed_ast-1.4.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d648b8e3bf2fe648745c8ffcee3db3ff903d0817a01a12dd6a6ea7a8f4889072"}, 481 | {file = "typed_ast-1.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:fac11badff8313e23717f3dada86a15389d0708275bddf766cca67a84ead3e91"}, 482 | {file = "typed_ast-1.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0d8110d78a5736e16e26213114a38ca35cb15b6515d535413b090bd50951556d"}, 483 | {file = "typed_ast-1.4.1-cp39-cp39-win32.whl", hash = "sha256:b52ccf7cfe4ce2a1064b18594381bccf4179c2ecf7f513134ec2f993dd4ab395"}, 484 | {file = "typed_ast-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:3742b32cf1c6ef124d57f95be609c473d7ec4c14d0090e5a5e05a15269fb4d0c"}, 485 | {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, 486 | ] 487 | urllib3 = [ 488 | {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"}, 489 | {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"}, 490 | ] 491 | wcwidth = [ 492 | {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, 493 | {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, 494 | ] 495 | zipp = [ 496 | {file = "zipp-3.4.0-py3-none-any.whl", hash = "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108"}, 497 | {file = "zipp-3.4.0.tar.gz", hash = "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb"}, 498 | ] 499 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "python-picnic-api" 3 | version = "1.1.0" 4 | description = "" 5 | license = "Apache-2.0" 6 | authors = ["Mike Brink "] 7 | readme = "README.rst" 8 | repository = "https://github.com/MikeBrink/python-picnic-api" 9 | 10 | [tool.poetry.dependencies] 11 | python = "^3.6" 12 | requests = "^2.24.0" 13 | 14 | [tool.poetry.dev-dependencies] 15 | pytest = "^5.2" 16 | flake8 = "^3.8.3" 17 | black = {version = "^19.10b0", allow-prereleases = true} 18 | python-dotenv = "^0.15.0" 19 | 20 | [build-system] 21 | requires = ["poetry-core"] 22 | build-backend = "poetry.core.masonry.api" 23 | -------------------------------------------------------------------------------- /python_picnic_api/__init__.py: -------------------------------------------------------------------------------- 1 | from .client import PicnicAPI 2 | 3 | __all__ = ["PicnicAPI"] 4 | __title__ = "python-picnic-api" 5 | __version__ = "1.1.0" 6 | __author__ = "Mike Brink" 7 | -------------------------------------------------------------------------------- /python_picnic_api/client.py: -------------------------------------------------------------------------------- 1 | from hashlib import md5 2 | 3 | from .helper import _tree_generator, _url_generator, _get_category_name 4 | from .session import PicnicAPISession, PicnicAuthError 5 | 6 | DEFAULT_URL = "https://storefront-prod.{}.picnicinternational.com/api/{}" 7 | DEFAULT_COUNTRY_CODE = "NL" 8 | DEFAULT_API_VERSION = "15" 9 | 10 | 11 | class PicnicAPI: 12 | def __init__( 13 | self, username: str = None, password: str = None, 14 | country_code: str = DEFAULT_COUNTRY_CODE, auth_token: str = None 15 | ): 16 | self._country_code = country_code 17 | self._base_url = _url_generator( 18 | DEFAULT_URL, self._country_code, DEFAULT_API_VERSION 19 | ) 20 | 21 | self.session = PicnicAPISession(auth_token=auth_token) 22 | 23 | # Login if not authenticated 24 | if not self.session.authenticated and username and password: 25 | self.login(username, password) 26 | 27 | self.high_level_categories = None 28 | 29 | def initialize_high_level_categories(self): 30 | """Initialize high-level categories once to avoid multiple requests.""" 31 | if not self.high_level_categories: 32 | self.high_level_categories = self.get_categories(depth=1) 33 | 34 | def _get(self, path: str, add_picnic_headers=False): 35 | url = self._base_url + path 36 | 37 | # Make the request, add special picnic headers if needed 38 | headers = { 39 | "x-picnic-agent": "30100;1.15.183-14941;", 40 | "x-picnic-did": "00DE6414C744E7CB" 41 | } if add_picnic_headers else None 42 | response = self.session.get(url, headers=headers).json() 43 | 44 | if self._contains_auth_error(response): 45 | raise PicnicAuthError("Picnic authentication error") 46 | 47 | return response 48 | 49 | def _post(self, path: str, data=None): 50 | url = self._base_url + path 51 | response = self.session.post(url, json=data).json() 52 | 53 | if self._contains_auth_error(response): 54 | raise PicnicAuthError(f"Picnic authentication error: {response['error'].get('message')}") 55 | 56 | return response 57 | 58 | @staticmethod 59 | def _contains_auth_error(response): 60 | if not isinstance(response, dict): 61 | return False 62 | 63 | error_code = response.setdefault("error", {}).get("code") 64 | return error_code == "AUTH_ERROR" or error_code == "AUTH_INVALID_CRED" 65 | 66 | def login(self, username: str, password: str): 67 | path = "/user/login" 68 | secret = md5(password.encode("utf-8")).hexdigest() 69 | data = {"key": username, "secret": secret, "client_id": 30100} 70 | 71 | return self._post(path, data) 72 | 73 | def logged_in(self): 74 | return self.session.authenticated 75 | 76 | def get_user(self): 77 | return self._get("/user") 78 | 79 | def search(self, term: str): 80 | path = "/search?search_term=" + term 81 | return self._get(path) 82 | 83 | def get_lists(self, list_id: str = None): 84 | if list_id: 85 | path = "/lists/" + list_id 86 | else: 87 | path = "/lists" 88 | return self._get(path) 89 | 90 | def get_sublist(self, list_id: str, sublist_id: str) -> list: 91 | """Get sublist. 92 | 93 | Args: 94 | list_id (str): ID of list, corresponding to requested sublist. 95 | sublist_id (str): ID of sublist. 96 | 97 | Returns: 98 | list: Sublist result. 99 | """ 100 | return self._get(f"/lists/{list_id}?sublist={sublist_id}") 101 | 102 | def get_cart(self): 103 | return self._get("/cart") 104 | 105 | def get_article(self, article_id: str, add_category_name=False): 106 | path = "/articles/" + article_id 107 | article = self._get(path) 108 | if add_category_name and "category_link" in article: 109 | self.initialize_high_level_categories() 110 | article.update( 111 | category_name=_get_category_name(article['category_link'], self.high_level_categories) 112 | ) 113 | return article 114 | 115 | def get_article_category(self, article_id: str): 116 | path = "/articles/" + article_id + "/category" 117 | return self._get(path) 118 | 119 | def add_product(self, product_id: str, count: int = 1): 120 | data = {"product_id": product_id, "count": count} 121 | return self._post("/cart/add_product", data) 122 | 123 | def remove_product(self, product_id: str, count: int = 1): 124 | data = {"product_id": product_id, "count": count} 125 | return self._post("/cart/remove_product", data) 126 | 127 | def clear_cart(self): 128 | return self._post("/cart/clear") 129 | 130 | def get_delivery_slots(self): 131 | return self._get("/cart/delivery_slots") 132 | 133 | def get_delivery(self, delivery_id: str): 134 | path = "/deliveries/" + delivery_id 135 | return self._get(path) 136 | 137 | def get_delivery_scenario(self, delivery_id: str): 138 | path = "/deliveries/" + delivery_id + "/scenario" 139 | return self._get(path, add_picnic_headers=True) 140 | 141 | def get_delivery_position(self, delivery_id: str): 142 | path = "/deliveries/" + delivery_id + "/position" 143 | return self._get(path, add_picnic_headers=True) 144 | 145 | def get_deliveries(self, summary: bool = False, data=None): 146 | data = [] if data is None else data 147 | if summary: 148 | return self._post("/deliveries/summary", data=data) 149 | return self._post("/deliveries", data=data) 150 | 151 | def get_current_deliveries(self): 152 | return self.get_deliveries(data=["CURRENT"]) 153 | 154 | def get_categories(self, depth: int = 0): 155 | return self._get(f"/my_store?depth={depth}")["catalog"] 156 | 157 | def print_categories(self, depth: int = 0): 158 | tree = "\n".join(_tree_generator(self.get_categories(depth=depth))) 159 | print(tree) 160 | 161 | 162 | __all__ = ["PicnicAPI"] 163 | -------------------------------------------------------------------------------- /python_picnic_api/helper.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | # prefix components: 4 | space = " " 5 | branch = "│ " 6 | # pointers: 7 | tee = "├── " 8 | last = "└── " 9 | 10 | IMAGE_SIZES = ["small", "medium", "regular", "large", "extra-large"] 11 | IMAGE_BASE_URL = "https://storefront-prod.nl.picnicinternational.com/static/images" 12 | 13 | def _tree_generator(response: list, prefix: str = ""): 14 | """A recursive tree generator, 15 | will yield a visual tree structure line by line 16 | with each line prefixed by the same characters 17 | """ 18 | # response each get pointers that are ├── with a final └── : 19 | pointers = [tee] * (len(response) - 1) + [last] 20 | for pointer, item in zip(pointers, response): 21 | if "name" in item: # print the item 22 | pre = "" 23 | if "unit_quantity" in item.keys(): 24 | pre = f"{item['unit_quantity']} " 25 | after = "" 26 | if "display_price" in item.keys(): 27 | after = f" €{int(item['display_price'])/100.0:.2f}" 28 | 29 | yield prefix + pointer + pre + item["name"] + after 30 | if "items" in item: # extend the prefix and recurse: 31 | extension = branch if pointer == tee else space 32 | # i.e. space because last, └── , above so no more | 33 | yield from _tree_generator(item["items"], prefix=prefix + extension) 34 | 35 | 36 | def _url_generator(url: str, country_code: str, api_version: str): 37 | return url.format(country_code.lower(), api_version) 38 | 39 | 40 | def _get_category_id_from_link(category_link: str) -> str: 41 | pattern = r'categories/(\d+)' 42 | first_number = re.search(pattern, category_link) 43 | if first_number: 44 | result = str(first_number.group(1)) 45 | return result 46 | else: 47 | return None 48 | 49 | 50 | def _get_category_name(category_link: str, categories: list) -> str: 51 | category_id = _get_category_id_from_link(category_link) 52 | if category_id: 53 | category = next((item for item in categories if item["id"] == category_id), None) 54 | if category: 55 | return category["name"] 56 | else: 57 | return None 58 | else: 59 | return None 60 | 61 | def get_recipe_image(id: str, size="regular"): 62 | sizes = IMAGE_SIZES + ["1250x1250"] 63 | assert size in sizes, "size must be one of: " + ", ".join(sizes) 64 | return f"{IMAGE_BASE_URL}/recipes/{id}/{size}.png" 65 | 66 | 67 | def get_image(id: str, size="regular", suffix="webp"): 68 | assert "tile" in size if suffix == "webp" else True, ( 69 | "webp format only supports tile sizes" 70 | ) 71 | assert suffix in ["webp", "png"], "suffix must be webp or png" 72 | sizes = IMAGE_SIZES + [f"tile-{size}" for size in IMAGE_SIZES] 73 | 74 | assert size in sizes, ( 75 | "size must be one of: " + ", ".join(sizes) 76 | ) 77 | return f"{IMAGE_BASE_URL}/{id}/{size}.{suffix}" 78 | 79 | -------------------------------------------------------------------------------- /python_picnic_api/session.py: -------------------------------------------------------------------------------- 1 | from requests import Response, Session 2 | 3 | 4 | class PicnicAuthError(Exception): 5 | """Indicates an error when authenticating to the Picnic API.""" 6 | 7 | 8 | class PicnicAPISession(Session): 9 | AUTH_HEADER = "x-picnic-auth" 10 | 11 | def __init__(self, auth_token: str = None): 12 | super().__init__() 13 | self._auth_token = auth_token 14 | 15 | self.headers.update( 16 | { 17 | "User-Agent": "okhttp/3.9.0", 18 | "Content-Type": "application/json; charset=UTF-8", 19 | self.AUTH_HEADER: self._auth_token 20 | } 21 | ) 22 | 23 | @property 24 | def authenticated(self): 25 | """Returns whether the user is authenticated by checking if the authentication token is set.""" 26 | return bool(self._auth_token) 27 | 28 | @property 29 | def auth_token(self): 30 | """Returns the auth token.""" 31 | return self._auth_token 32 | 33 | def _update_auth_token(self, auth_token): 34 | """Update the auth token if not None and changed.""" 35 | if auth_token and auth_token != self._auth_token: 36 | self._auth_token = auth_token 37 | self.headers.update({self.AUTH_HEADER: self._auth_token}) 38 | 39 | def get(self, url, **kwargs) -> Response: 40 | """Do a GET request and update the auth token if set.""" 41 | response = super(PicnicAPISession, self).get(url, **kwargs) 42 | self._update_auth_token(response.headers.get(self.AUTH_HEADER)) 43 | 44 | return response 45 | 46 | def post(self, url, data=None, json=None, **kwargs) -> Response: 47 | """Do a POST request and update the auth token if set.""" 48 | response = super(PicnicAPISession, self).post(url, data, json, **kwargs) 49 | self._update_auth_token(response.headers.get(self.AUTH_HEADER)) 50 | 51 | return response 52 | 53 | 54 | __all__ = ["PicnicAuthError", "PicnicAPISession"] 55 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MikeBrink/python-picnic-api/f2029134dfcb4cd92a234be1796487e052960177/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_client.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest.mock import patch 3 | 4 | from python_picnic_api import PicnicAPI 5 | from python_picnic_api.client import DEFAULT_URL 6 | from python_picnic_api.session import PicnicAuthError 7 | 8 | PICNIC_HEADERS = { 9 | "x-picnic-agent": "30100;1.15.77-10293", 10 | "x-picnic-did": "3C417201548B2E3B", 11 | } 12 | 13 | 14 | class TestClient(unittest.TestCase): 15 | class MockResponse: 16 | def __init__(self, json_data, status_code): 17 | self.json_data = json_data 18 | self.status_code = status_code 19 | 20 | def json(self): 21 | return self.json_data 22 | 23 | def setUp(self) -> None: 24 | self.session_patcher = patch("python_picnic_api.client.PicnicAPISession") 25 | self.session_mock = self.session_patcher.start() 26 | self.client = PicnicAPI(username="test@test.nl", password="test") 27 | self.expected_base_url = DEFAULT_URL.format("nl", "15") 28 | 29 | def tearDown(self) -> None: 30 | self.session_patcher.stop() 31 | 32 | def test_login_credentials(self): 33 | self.session_mock().authenticated = False 34 | PicnicAPI(username='test@test.nl', password='test') 35 | self.session_mock().post.assert_called_with( 36 | self.expected_base_url + '/user/login', 37 | json={'key': 'test@test.nl', 'secret': '098f6bcd4621d373cade4e832627b4f6', "client_id": 1} 38 | ) 39 | 40 | def test_login_auth_token(self): 41 | self.session_mock().authenticated = True 42 | PicnicAPI(username='test@test.nl', password='test', auth_token='a3fwo7f3h78kf3was7h8f3ahf3ah78f3') 43 | self.session_mock().login.assert_not_called() 44 | 45 | def test_login_failed(self): 46 | response = { 47 | "error": { 48 | "code": "AUTH_INVALID_CRED", 49 | "message": "Invalid credentials." 50 | } 51 | } 52 | self.session_mock().post.return_value = self.MockResponse(response, 200) 53 | 54 | client = PicnicAPI() 55 | with self.assertRaises(PicnicAuthError): 56 | client.login('test-user', 'test-password') 57 | 58 | def test_get_user(self): 59 | response = { 60 | "user_id": "594-241-3623", 61 | "firstname": "Firstname", 62 | "lastname": "Lastname", 63 | "address": { 64 | "house_number": 25, 65 | "house_number_ext": "b", 66 | "postcode": "1234 AB", 67 | "street": "Dorpsstraat", 68 | "city": "Het dorp", 69 | }, 70 | "phone": "+31123456798", 71 | "contact_email": "test@test.nl", 72 | "total_deliveries": 25, 73 | "completed_deliveries": 20, 74 | } 75 | self.session_mock().get.return_value = self.MockResponse(response, 200) 76 | 77 | user = self.client.get_user() 78 | self.session_mock().get.assert_called_with( 79 | self.expected_base_url + "/user", headers=None 80 | ) 81 | self.assertDictEqual(user, response) 82 | 83 | def test_search(self): 84 | self.client.search("test-product") 85 | self.session_mock().get.assert_called_with( 86 | self.expected_base_url + "/search?search_term=test-product", headers=None 87 | ) 88 | 89 | def test_get_lists(self): 90 | self.client.get_lists() 91 | self.session_mock().get.assert_called_with( 92 | self.expected_base_url + "/lists", headers=None 93 | ) 94 | 95 | def test_get_sublist(self): 96 | self.client.get_sublist(list_id="promotion", sublist_id="12345") 97 | self.session_mock().get.assert_called_with( 98 | self.expected_base_url + "/lists/promotion?sublist=12345", headers=None 99 | ) 100 | 101 | def test_get_list_by_id(self): 102 | self.client.get_lists("abc") 103 | self.session_mock().get.assert_called_with( 104 | self.expected_base_url + "/lists/abc", headers=None 105 | ) 106 | 107 | def test_get_cart(self): 108 | self.client.get_cart() 109 | self.session_mock().get.assert_called_with( 110 | self.expected_base_url + "/cart", headers=None 111 | ) 112 | 113 | def test_add_product(self): 114 | self.client.add_product("p3f2qa") 115 | self.session_mock().post.assert_called_with( 116 | self.expected_base_url + "/cart/add_product", 117 | json={"product_id": "p3f2qa", "count": 1}, 118 | ) 119 | 120 | def test_add_multiple_products(self): 121 | self.client.add_product("gs4puhf3a", count=5) 122 | self.session_mock().post.assert_called_with( 123 | self.expected_base_url + "/cart/add_product", 124 | json={"product_id": "gs4puhf3a", "count": 5}, 125 | ) 126 | 127 | def test_remove_product(self): 128 | self.client.remove_product("gs4puhf3a", count=5) 129 | self.session_mock().post.assert_called_with( 130 | self.expected_base_url + "/cart/remove_product", 131 | json={"product_id": "gs4puhf3a", "count": 5}, 132 | ) 133 | 134 | def test_clear_cart(self): 135 | self.client.clear_cart() 136 | self.session_mock().post.assert_called_with( 137 | self.expected_base_url + "/cart/clear", json=None 138 | ) 139 | 140 | def test_get_delivery_slots(self): 141 | self.client.get_delivery_slots() 142 | self.session_mock().get.assert_called_with( 143 | self.expected_base_url + "/cart/delivery_slots", headers=None 144 | ) 145 | 146 | def test_get_delivery(self): 147 | self.client.get_delivery("3fpawshusz3") 148 | self.session_mock().get.assert_called_with( 149 | self.expected_base_url + "/deliveries/3fpawshusz3", headers=None 150 | ) 151 | 152 | def test_get_delivery_scenario(self): 153 | self.client.get_delivery_scenario("3fpawshusz3") 154 | self.session_mock().get.assert_called_with( 155 | self.expected_base_url + "/deliveries/3fpawshusz3/scenario", 156 | headers=PICNIC_HEADERS, 157 | ) 158 | 159 | def test_get_delivery_position(self): 160 | self.client.get_delivery_position("3fpawshusz3") 161 | self.session_mock().get.assert_called_with( 162 | self.expected_base_url + "/deliveries/3fpawshusz3/position", 163 | headers=PICNIC_HEADERS, 164 | ) 165 | 166 | def test_get_deliveries(self): 167 | self.client.get_deliveries() 168 | self.session_mock().post.assert_called_with( 169 | self.expected_base_url + "/deliveries", json=[] 170 | ) 171 | 172 | def test_get_deliveries_summary(self): 173 | self.client.get_deliveries(summary=True) 174 | self.session_mock().post.assert_called_with( 175 | self.expected_base_url + "/deliveries/summary", json=[] 176 | ) 177 | 178 | def test_get_current_deliveries(self): 179 | self.client.get_current_deliveries() 180 | self.session_mock().post.assert_called_with( 181 | self.expected_base_url + "/deliveries", json=["CURRENT"] 182 | ) 183 | 184 | def test_get_categories(self): 185 | self.session_mock().get.return_value = self.MockResponse( 186 | { 187 | "type": "MY_STORE", 188 | "catalog": [ 189 | {"type": "CATEGORY", "id": "purchases", "name": "Besteld"}, 190 | {"type": "CATEGORY", "id": "promotions", "name": "Acties"}, 191 | ], 192 | "user": {}, 193 | }, 194 | 200, 195 | ) 196 | 197 | categories = self.client.get_categories() 198 | self.session_mock().get.assert_called_with( 199 | self.expected_base_url + "/my_store?depth=0", headers=None 200 | ) 201 | 202 | self.assertDictEqual( 203 | categories[0], {"type": "CATEGORY", "id": "purchases", "name": "Besteld"} 204 | ) 205 | 206 | def test_get_auth_exception(self): 207 | self.session_mock().get.return_value = self.MockResponse( 208 | {"error": {"code": "AUTH_ERROR"}}, 400 209 | ) 210 | 211 | with self.assertRaises(PicnicAuthError): 212 | self.client.get_user() 213 | 214 | def test_post_auth_exception(self): 215 | self.session_mock().post.return_value = self.MockResponse( 216 | {"error": {"code": "AUTH_ERROR"}}, 400 217 | ) 218 | 219 | with self.assertRaises(PicnicAuthError): 220 | self.client.clear_cart() 221 | -------------------------------------------------------------------------------- /tests/test_session.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest.mock import patch 3 | 4 | from requests import Session 5 | 6 | from python_picnic_api.session import PicnicAPISession 7 | 8 | 9 | class TestSession(unittest.TestCase): 10 | class MockResponse: 11 | def __init__(self, headers): 12 | self.headers = headers 13 | 14 | @patch.object(Session, "post") 15 | def test_update_auth_token(self, post_mock): 16 | """Test that the initial auth-token is saved.""" 17 | post_mock.return_value = self.MockResponse({ 18 | "x-picnic-auth": "3p9fqahw3uehfaw9fh8aw3ufaw389fpawhuo3fa" 19 | }) 20 | 21 | picnic_session = PicnicAPISession() 22 | picnic_session.post("https://picnic.app/user/login", json={"test": "data"}) 23 | self.assertDictEqual(dict(picnic_session.headers), { 24 | "Accept": "*/*", 25 | "Accept-Encoding": "gzip, deflate", 26 | "Connection": "keep-alive", 27 | "User-Agent": "okhttp/3.9.0", 28 | "Content-Type": "application/json; charset=UTF-8", 29 | "x-picnic-auth": "3p9fqahw3uehfaw9fh8aw3ufaw389fpawhuo3fa" 30 | }) 31 | 32 | @patch.object(Session, "post") 33 | def test_update_auth_token_refresh(self, post_mock): 34 | """Test that the auth-token is updated if a new one is given in the response headers.""" 35 | post_mock.return_value = self.MockResponse({ 36 | "x-picnic-auth": "renewed-auth-token" 37 | }) 38 | 39 | picnic_session = PicnicAPISession(auth_token="initial-auth-token") 40 | self.assertEqual(picnic_session.auth_token, "initial-auth-token") 41 | 42 | picnic_session.post("https://picnic.app", json={"test": "data"}) 43 | self.assertEqual(picnic_session.auth_token, "renewed-auth-token") 44 | 45 | self.assertDictEqual(dict(picnic_session.headers), { 46 | "Accept": "*/*", 47 | "Accept-Encoding": "gzip, deflate", 48 | "Connection": "keep-alive", 49 | "User-Agent": "okhttp/3.9.0", 50 | "Content-Type": "application/json; charset=UTF-8", 51 | "x-picnic-auth": "renewed-auth-token" 52 | }) 53 | 54 | def test_authenticated_with_auth_token(self): 55 | picnic_session = PicnicAPISession(auth_token=None) 56 | self.assertFalse(picnic_session.authenticated) 57 | self.assertIsNone(picnic_session.headers[picnic_session.AUTH_HEADER]) 58 | 59 | picnic_session = PicnicAPISession(auth_token="3p9aw8fhzsefaw29f38h7p3fwuefah37f8kwg3i") 60 | self.assertTrue(picnic_session.authenticated) 61 | self.assertEqual(picnic_session.headers[picnic_session.AUTH_HEADER], "3p9aw8fhzsefaw29f38h7p3fwuefah37f8kwg3i") 62 | --------------------------------------------------------------------------------