├── .gitignore ├── project_0 ├── src │ ├── project_0.egg-info │ │ ├── dependency_links.txt │ │ ├── top_level.txt │ │ ├── requires.txt │ │ ├── SOURCES.txt │ │ └── PKG-INFO │ ├── hw2.py │ ├── hw.py │ └── hello_world.py ├── docs │ ├── _build │ │ ├── html │ │ │ ├── _static │ │ │ │ ├── custom.css │ │ │ │ ├── file.png │ │ │ │ ├── minus.png │ │ │ │ ├── plus.png │ │ │ │ ├── documentation_options.js │ │ │ │ ├── _sphinx_javascript_frameworks_compat.js │ │ │ │ ├── sphinx_highlight.js │ │ │ │ ├── pygments.css │ │ │ │ ├── language_data.js │ │ │ │ ├── doctools.js │ │ │ │ └── alabaster.css │ │ │ ├── objects.inv │ │ │ ├── _sources │ │ │ │ ├── api.rst.txt │ │ │ │ ├── overview.rst.txt │ │ │ │ ├── index.rst.txt │ │ │ │ └── design.rst.txt │ │ │ ├── _plantuml │ │ │ │ └── 40 │ │ │ │ │ └── 4038882977cec53bc727d03bdf0773719520a72a.png │ │ │ ├── _images │ │ │ │ └── plantuml-4038882977cec53bc727d03bdf0773719520a72a.png │ │ │ ├── .buildinfo │ │ │ ├── searchindex.js │ │ │ ├── search.html │ │ │ ├── py-modindex.html │ │ │ ├── overview.html │ │ │ ├── design.html │ │ │ ├── genindex.html │ │ │ ├── index.html │ │ │ └── api.html │ │ └── doctrees │ │ │ ├── api.doctree │ │ │ ├── design.doctree │ │ │ ├── index.doctree │ │ │ ├── overview.doctree │ │ │ └── environment.pickle │ ├── api.rst │ ├── overview.rst │ ├── index.rst │ ├── Makefile │ ├── design.rst │ ├── make.bat │ └── conf.py ├── requirements.txt ├── tests │ ├── environment.py │ ├── test_hello_world.py │ ├── test_hw.py │ ├── test_hw2.py │ ├── features │ │ └── hello_world.feature │ └── steps │ │ └── hw_cli.py ├── tox.ini ├── pyproject.toml ├── requirements-test.txt ├── README.md └── requirements-dev.txt ├── project_1_1 ├── src │ ├── project_1_1.egg-info │ │ ├── dependency_links.txt │ │ ├── top_level.txt │ │ ├── requires.txt │ │ ├── SOURCES.txt │ │ └── PKG-INFO │ ├── model.py │ ├── acquire.py │ └── csv_extract.py ├── tests │ ├── environment.py │ ├── test_model.py │ ├── features │ │ └── acquire.feature │ └── steps │ │ └── fake_cli_steps.py ├── tox.ini ├── pyproject.toml └── requirements.txt ├── project_1_2 ├── src │ ├── project_1_2.egg-info │ │ ├── dependency_links.txt │ │ ├── top_level.txt │ │ ├── requires.txt │ │ ├── SOURCES.txt │ │ └── PKG-INFO │ ├── model.py │ ├── webpage_client.py │ ├── acquire.py │ └── kaggle_client.py ├── tox.ini ├── tests │ ├── features │ │ └── dataset_download.feature │ ├── test_kaggle_client.py │ ├── environment.py │ ├── steps │ │ └── cli_interface.py │ ├── mock_kaggle_bottle.py │ └── mock_kaggle_server.py ├── pyproject.toml ├── requirements.txt └── requirements-dev.txt ├── project_1_3 ├── src │ ├── project_1_3.egg-info │ │ ├── dependency_links.txt │ │ ├── top_level.txt │ │ ├── requires.txt │ │ ├── SOURCES.txt │ │ └── PKG-INFO │ ├── html_extract.py │ └── acquire.py ├── tests │ ├── environment.py │ ├── features │ │ └── html_extract.feature │ ├── steps │ │ └── cli_interface.py │ └── test_html_extract.py ├── tox.ini ├── pyproject.toml └── requirements.txt ├── project_1_4 ├── src │ ├── project_1_4.egg-info │ │ ├── dependency_links.txt │ │ ├── top_level.txt │ │ ├── requires.txt │ │ ├── SOURCES.txt │ │ └── PKG-INFO │ ├── sql_script_maker.py │ ├── query_examples.py │ └── build_db.py ├── example.db ├── tox.ini ├── pyproject.toml ├── requirements.txt ├── tests │ └── test_build_db.py └── schema.toml ├── project_1_5 ├── src │ ├── project_1_5.egg-info │ │ ├── dependency_links.txt │ │ ├── top_level.txt │ │ ├── requires.txt │ │ ├── SOURCES.txt │ │ └── PKG-INFO │ ├── model.py │ ├── db_extract.py │ └── acquire.py ├── example.db ├── tox.ini ├── tests │ ├── features │ │ └── db_extract.feature │ ├── test_db_extract.py │ ├── environment.py │ └── steps │ │ └── db_interface.py ├── pyproject.toml ├── requirements.txt └── schema.toml ├── LICENSE └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | .idea/* 3 | -------------------------------------------------------------------------------- /project_0/src/project_0.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /project_1_1/src/project_1_1.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /project_1_2/src/project_1_2.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /project_1_3/src/project_1_3.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /project_1_4/src/project_1_4.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /project_1_5/src/project_1_5.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /project_0/src/project_0.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | hello_world 2 | hw 3 | hw2 4 | -------------------------------------------------------------------------------- /project_1_3/src/project_1_3.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | acquire 2 | html_extract 3 | -------------------------------------------------------------------------------- /project_1_1/src/project_1_1.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | acquire 2 | csv_extract 3 | model 4 | -------------------------------------------------------------------------------- /project_1_5/src/project_1_5.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | acquire 2 | db_extract 3 | model 4 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_static/custom.css: -------------------------------------------------------------------------------- 1 | /* This file intentionally left blank. */ 2 | -------------------------------------------------------------------------------- /project_1_4/src/project_1_4.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | build_db 2 | query_examples 3 | sql_script_maker 4 | -------------------------------------------------------------------------------- /project_1_2/src/project_1_2.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | acquire 2 | kaggle_client 3 | model 4 | webpage_client 5 | -------------------------------------------------------------------------------- /project_1_4/example.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_1_4/example.db -------------------------------------------------------------------------------- /project_1_5/example.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_1_5/example.db -------------------------------------------------------------------------------- /project_1_3/tests/environment.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.3: Scrape data from a web page 4 | """ 5 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_0/docs/_build/html/objects.inv -------------------------------------------------------------------------------- /project_0/docs/_build/doctrees/api.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_0/docs/_build/doctrees/api.doctree -------------------------------------------------------------------------------- /project_0/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile 6 | # 7 | -------------------------------------------------------------------------------- /project_0/docs/_build/doctrees/design.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_0/docs/_build/doctrees/design.doctree -------------------------------------------------------------------------------- /project_0/docs/_build/doctrees/index.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_0/docs/_build/doctrees/index.doctree -------------------------------------------------------------------------------- /project_0/docs/_build/html/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_0/docs/_build/html/_static/file.png -------------------------------------------------------------------------------- /project_0/docs/_build/html/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_0/docs/_build/html/_static/minus.png -------------------------------------------------------------------------------- /project_0/docs/_build/html/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_0/docs/_build/html/_static/plus.png -------------------------------------------------------------------------------- /project_0/docs/_build/doctrees/overview.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_0/docs/_build/doctrees/overview.doctree -------------------------------------------------------------------------------- /project_0/docs/_build/doctrees/environment.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_0/docs/_build/doctrees/environment.pickle -------------------------------------------------------------------------------- /project_0/docs/api.rst: -------------------------------------------------------------------------------- 1 | .. Python Real-World Projects -- Project Zero: A template for other projects 2 | 3 | #### 4 | API 5 | #### 6 | 7 | .. automodule:: hello_world 8 | :members: 9 | -------------------------------------------------------------------------------- /project_0/docs/overview.rst: -------------------------------------------------------------------------------- 1 | .. Python Real-World Projects -- Project Zero: A template for other projects 2 | 3 | ######## 4 | Overview 5 | ######## 6 | 7 | This application writes a cheerful greeting. 8 | -------------------------------------------------------------------------------- /project_0/src/project_0.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | 2 | [dev] 3 | sphinx==7.2.2 4 | sphinxcontrib-plantuml==0.25 5 | pip-tools==7.3.0 6 | 7 | [test] 8 | pytest==7.4.0 9 | tox==4.9.0 10 | behave==1.2.6 11 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_sources/api.rst.txt: -------------------------------------------------------------------------------- 1 | .. Python Real-World Projects -- Project Zero: A template for other projects 2 | 3 | #### 4 | API 5 | #### 6 | 7 | .. automodule:: hello_world 8 | :members: 9 | -------------------------------------------------------------------------------- /project_1_1/src/project_1_1.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | 2 | [dev] 3 | sphinx==7.2.2 4 | sphinxcontrib-plantuml==0.25 5 | pip-tools==7.3.0 6 | 7 | [test] 8 | pytest==7.4.0 9 | tox==4.9.0 10 | behave==1.2.6 11 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_sources/overview.rst.txt: -------------------------------------------------------------------------------- 1 | .. Python Real-World Projects -- Project Zero: A template for other projects 2 | 3 | ######## 4 | Overview 5 | ######## 6 | 7 | This application writes a cheerful greeting. 8 | -------------------------------------------------------------------------------- /project_1_4/src/project_1_4.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | toml==0.10.2 2 | 3 | [dev] 4 | sphinx==7.2.2 5 | sphinxcontrib-plantuml==0.25 6 | pip-tools==7.3.0 7 | 8 | [test] 9 | pytest==7.4.0 10 | tox==4.9.0 11 | behave==1.2.6 12 | -------------------------------------------------------------------------------- /project_1_5/src/project_1_5.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | toml==0.10.2 2 | 3 | [dev] 4 | sphinx==7.2.2 5 | sphinxcontrib-plantuml==0.25 6 | pip-tools==7.3.0 7 | 8 | [test] 9 | pytest==7.4.0 10 | tox==4.9.0 11 | behave==1.2.6 12 | -------------------------------------------------------------------------------- /project_0/tests/environment.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project Zero: A template for other projects 4 | """ 5 | 6 | # This is empty. 7 | # The file is required. 8 | # There are no environmental functions needed 9 | -------------------------------------------------------------------------------- /project_1_1/tests/environment.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.1: Data Acquisition Base Application 4 | """ 5 | 6 | # This is empty. 7 | # The file is required. 8 | # There are no environmental functions needed 9 | -------------------------------------------------------------------------------- /project_1_3/src/project_1_3.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4==4.11.1 2 | 3 | [dev] 4 | sphinx==7.2.2 5 | sphinxcontrib-plantuml==0.25 6 | pip-tools==7.3.0 7 | 8 | [test] 9 | pytest==7.4.0 10 | tox==4.9.0 11 | behave==1.2.6 12 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_plantuml/40/4038882977cec53bc727d03bdf0773719520a72a.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_0/docs/_build/html/_plantuml/40/4038882977cec53bc727d03bdf0773719520a72a.png -------------------------------------------------------------------------------- /project_0/docs/_build/html/_images/plantuml-4038882977cec53bc727d03bdf0773719520a72a.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Python-Real-World-Projects/HEAD/project_0/docs/_build/html/_images/plantuml-4038882977cec53bc727d03bdf0773719520a72a.png -------------------------------------------------------------------------------- /project_0/docs/_build/html/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: 17cb68e66a3b7ffb98fb043318f591db 4 | tags: 645f666f9bcd5a90fca523b33c5a78b7 5 | -------------------------------------------------------------------------------- /project_1_2/src/project_1_2.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | requests==2.28.1 2 | beautifulsoup4==4.11.1 3 | 4 | [dev] 5 | sphinx==7.2.2 6 | sphinxcontrib-plantuml==0.25 7 | pip-tools==7.3.0 8 | 9 | [test] 10 | pytest==7.4.0 11 | tox==4.9.0 12 | behave==1.2.6 13 | bottle==0.12.23 14 | -------------------------------------------------------------------------------- /project_0/tests/test_hello_world.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project Zero: A template for other projects 4 | """ 5 | import hello_world 6 | 7 | def test_hello_world(capsys): 8 | hello_world.main([]) 9 | out, err = capsys.readouterr() 10 | assert "Hello, World!" in out 11 | -------------------------------------------------------------------------------- /project_1_5/src/model.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.5: Acquire data from a SQL extract 4 | """ 5 | from dataclasses import dataclass 6 | 7 | @dataclass 8 | class SeriesSample: 9 | x: str 10 | y: str 11 | 12 | @dataclass 13 | class Series: 14 | name: str 15 | samples: list[SeriesSample] 16 | -------------------------------------------------------------------------------- /project_0/tests/test_hw.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project Zero: A template for other projects 4 | """ 5 | import pytest 6 | 7 | import hw 8 | 9 | @pytest.mark.xfail(reason="Not fully implemented") 10 | def test_hw(capsys): 11 | hw.main([]) 12 | out, err = capsys.readouterr() 13 | assert "Hello, World!" in out 14 | -------------------------------------------------------------------------------- /project_1_3/src/project_1_3.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | pyproject.toml 2 | src/acquire.py 3 | src/html_extract.py 4 | src/project_1_3.egg-info/PKG-INFO 5 | src/project_1_3.egg-info/SOURCES.txt 6 | src/project_1_3.egg-info/dependency_links.txt 7 | src/project_1_3.egg-info/requires.txt 8 | src/project_1_3.egg-info/top_level.txt 9 | tests/test_html_extract.py -------------------------------------------------------------------------------- /project_0/tests/test_hw2.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project Zero: A template for other projects 4 | """ 5 | import pytest 6 | 7 | import hw2 8 | 9 | @pytest.mark.xfail(reason="Not fully implemented") 10 | def test_hw2(capsys): 11 | hw2.main([]) 12 | out, err = capsys.readouterr() 13 | assert "Hello, World!" in out 14 | -------------------------------------------------------------------------------- /project_1_1/src/project_1_1.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | pyproject.toml 2 | src/acquire.py 3 | src/csv_extract.py 4 | src/model.py 5 | src/project_1_1.egg-info/PKG-INFO 6 | src/project_1_1.egg-info/SOURCES.txt 7 | src/project_1_1.egg-info/dependency_links.txt 8 | src/project_1_1.egg-info/requires.txt 9 | src/project_1_1.egg-info/top_level.txt 10 | tests/test_model.py -------------------------------------------------------------------------------- /project_1_3/tox.ini: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects -- Project 1.3: Scrape data from a web page 2 | [tox] 3 | min_version = 4.0 4 | skipsdist = true 5 | 6 | [testenv] 7 | deps = pip-tools 8 | pytest 9 | behave 10 | commands_pre = pip-sync requirements.txt 11 | setenv = 12 | PYTHONPATH=src 13 | commands = 14 | pytest tests 15 | behave tests 16 | -------------------------------------------------------------------------------- /project_1_5/src/project_1_5.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | pyproject.toml 2 | src/acquire.py 3 | src/db_extract.py 4 | src/model.py 5 | src/project_1_5.egg-info/PKG-INFO 6 | src/project_1_5.egg-info/SOURCES.txt 7 | src/project_1_5.egg-info/dependency_links.txt 8 | src/project_1_5.egg-info/requires.txt 9 | src/project_1_5.egg-info/top_level.txt 10 | tests/test_db_extract.py -------------------------------------------------------------------------------- /project_1_2/tox.ini: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects -- Project 1.2: Acquire data from web service 2 | [tox] 3 | min_version = 4.0 4 | skipsdist = true 5 | 6 | [testenv] 7 | deps = pip-tools 8 | pytest 9 | behave 10 | commands_pre = pip-sync requirements.txt 11 | setenv = 12 | PYTHONPATH=src 13 | commands = 14 | pytest tests 15 | behave tests 16 | -------------------------------------------------------------------------------- /project_1_5/tox.ini: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects -- Project 1.5: Acquire data from a SQL extract 2 | [tox] 3 | min_version = 4.0 4 | skipsdist = true 5 | 6 | [testenv] 7 | deps = pip-tools 8 | pytest 9 | behave 10 | commands_pre = pip-sync requirements.txt 11 | setenv = 12 | PYTHONPATH=src 13 | commands = 14 | pytest tests 15 | behave tests 16 | -------------------------------------------------------------------------------- /project_1_1/tox.ini: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects -- Project 1.1: Data Acquisition Base Application 2 | [tox] 3 | min_version = 4.0 4 | skipsdist = true 5 | 6 | [testenv] 7 | deps = pip-tools 8 | pytest 9 | behave 10 | commands_pre = pip-sync requirements.txt 11 | setenv = 12 | PYTHONPATH=src 13 | commands = 14 | pytest tests 15 | behave tests 16 | -------------------------------------------------------------------------------- /project_0/tox.ini: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects 2 | # Project Zero: A template for other projects 3 | 4 | [tox] 5 | min_version = 4.0 6 | # skipsdist = true 7 | 8 | [testenv] 9 | deps = pip-tools 10 | pytest 11 | behave 12 | commands_pre = pip-sync requirements.txt 13 | setenv = 14 | PYTHONPATH=src 15 | commands = 16 | pytest tests 17 | behave tests 18 | -------------------------------------------------------------------------------- /project_1_4/src/project_1_4.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | pyproject.toml 2 | src/build_db.py 3 | src/query_examples.py 4 | src/sql_script_maker.py 5 | src/project_1_4.egg-info/PKG-INFO 6 | src/project_1_4.egg-info/SOURCES.txt 7 | src/project_1_4.egg-info/dependency_links.txt 8 | src/project_1_4.egg-info/requires.txt 9 | src/project_1_4.egg-info/top_level.txt 10 | tests/test_build_db.py -------------------------------------------------------------------------------- /project_1_2/src/project_1_2.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | pyproject.toml 2 | src/acquire.py 3 | src/kaggle_client.py 4 | src/model.py 5 | src/webpage_client.py 6 | src/project_1_2.egg-info/PKG-INFO 7 | src/project_1_2.egg-info/SOURCES.txt 8 | src/project_1_2.egg-info/dependency_links.txt 9 | src/project_1_2.egg-info/requires.txt 10 | src/project_1_2.egg-info/top_level.txt 11 | tests/test_kaggle_client.py -------------------------------------------------------------------------------- /project_0/tests/features/hello_world.feature: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects -- Project Zero: A template for other projects 2 | # Project 0. 3 | 4 | Feature: The Cheerful Greeting CLI interface provides a greeting 5 | to a specific name. 6 | 7 | Scenario: The application writes the greeting message. 8 | When we run command "python src/hello_world.py" 9 | Then output has "Hello, World!" 10 | -------------------------------------------------------------------------------- /project_1_4/tox.ini: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects -- Project 1.4: Local SQL Database 2 | [tox] 3 | min_version = 4.0 4 | skipsdist = true 5 | 6 | [testenv] 7 | deps = pip-tools 8 | pytest 9 | behave 10 | commands_pre = pip-sync requirements.txt 11 | setenv = 12 | PYTHONPATH=src 13 | commands = 14 | pytest tests 15 | # behave tests # No acceptance test for the database builder 16 | -------------------------------------------------------------------------------- /project_0/src/project_0.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | README.md 2 | pyproject.toml 3 | src/hello_world.py 4 | src/hw.py 5 | src/hw2.py 6 | src/project_0.egg-info/PKG-INFO 7 | src/project_0.egg-info/SOURCES.txt 8 | src/project_0.egg-info/dependency_links.txt 9 | src/project_0.egg-info/requires.txt 10 | src/project_0.egg-info/top_level.txt 11 | tests/test_hello_world.py 12 | tests/test_hw.py 13 | tests/test_hw2.py -------------------------------------------------------------------------------- /project_1_2/src/model.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.2: Acquire data from web service 4 | """ 5 | from dataclasses import dataclass 6 | from typing import TypeAlias 7 | 8 | 9 | @dataclass 10 | class XYPair: 11 | x: str 12 | y: str 13 | 14 | 15 | @dataclass 16 | class SomeOtherStructure: 17 | x: list[str] 18 | y: list[str] 19 | 20 | 21 | RawData: TypeAlias = XYPair | SomeOtherStructure 22 | -------------------------------------------------------------------------------- /project_1_1/src/model.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.1: Data Acquisition Base Application 4 | """ 5 | from dataclasses import dataclass 6 | from typing import TypeAlias 7 | 8 | 9 | @dataclass 10 | class XYPair: 11 | x: str 12 | y: str 13 | 14 | 15 | @dataclass 16 | class SomeOtherStructure: 17 | x: list[str] 18 | y: list[str] 19 | 20 | 21 | RawData: TypeAlias = XYPair | SomeOtherStructure 22 | -------------------------------------------------------------------------------- /project_1_1/tests/test_model.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.1: Data Acquisition Base Application 4 | """ 5 | 6 | from unittest.mock import sentinel 7 | from dataclasses import asdict 8 | 9 | from model import XYPair 10 | 11 | def test_xypair(): 12 | pair = XYPair(x=sentinel.X, y=sentinel.Y) 13 | assert pair.x == sentinel.X 14 | assert pair.y == sentinel.Y 15 | assert asdict(pair) == {"x": sentinel.X, "y": sentinel.Y} 16 | -------------------------------------------------------------------------------- /project_1_1/src/project_1_1.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: project-1-1 3 | Version: 1.0.0 4 | Summary: Real-World Python Projects -- Project 1.1. 5 | Author-email: Author 6 | Classifier: Natural Language :: English 7 | Classifier: Operating System :: OS Independent 8 | Classifier: Programming Language :: Python 9 | Requires-Python: >=3.11 10 | Description-Content-Type: text/markdown 11 | Provides-Extra: dev 12 | Provides-Extra: test 13 | -------------------------------------------------------------------------------- /project_1_2/src/project_1_2.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: project-1-2 3 | Version: 1.0.0 4 | Summary: Real-World Python Projects -- Project 1.2. 5 | Author-email: Author 6 | Classifier: Natural Language :: English 7 | Classifier: Operating System :: OS Independent 8 | Classifier: Programming Language :: Python 9 | Requires-Python: >=3.11 10 | Description-Content-Type: text/markdown 11 | Provides-Extra: dev 12 | Provides-Extra: test 13 | -------------------------------------------------------------------------------- /project_1_3/src/project_1_3.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: project-1-3 3 | Version: 1.0.0 4 | Summary: Real-World Python Projects -- Project 1.3. 5 | Author-email: Author 6 | Classifier: Natural Language :: English 7 | Classifier: Operating System :: OS Independent 8 | Classifier: Programming Language :: Python 9 | Requires-Python: >=3.11 10 | Description-Content-Type: text/markdown 11 | Provides-Extra: dev 12 | Provides-Extra: test 13 | -------------------------------------------------------------------------------- /project_1_4/src/project_1_4.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: project-1-4 3 | Version: 1.0.0 4 | Summary: Real-World Python Projects -- Project 1.4. 5 | Author-email: Author 6 | Classifier: Natural Language :: English 7 | Classifier: Operating System :: OS Independent 8 | Classifier: Programming Language :: Python 9 | Requires-Python: >=3.11 10 | Description-Content-Type: text/markdown 11 | Provides-Extra: dev 12 | Provides-Extra: test 13 | -------------------------------------------------------------------------------- /project_1_5/src/project_1_5.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: project-1-5 3 | Version: 1.0.0 4 | Summary: Real-World Python Projects -- Project 1.5. 5 | Author-email: Author 6 | Classifier: Natural Language :: English 7 | Classifier: Operating System :: OS Independent 8 | Classifier: Programming Language :: Python 9 | Requires-Python: >=3.11 10 | Description-Content-Type: text/markdown 11 | Provides-Extra: dev 12 | Provides-Extra: test 13 | -------------------------------------------------------------------------------- /project_1_4/src/sql_script_maker.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.4: Local SQL Database 4 | """ 5 | 6 | def write_insert(series: int, sequence: int, x: str, y: str) -> str: 7 | print( 8 | f"INSERT INTO SSAMPLES(SERIES, SEQUENCE, X, Y)" 9 | f"VALUES({series}, {sequence}, '{x}', '{y}')" 10 | ) 11 | 12 | if __name__ == "__main__": 13 | write_insert(1, 2, '3.0', '4.0') 14 | write_insert(1, 2, "'); DROP TABLE USERS;", "'); ROLLBACK;") 15 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_static/documentation_options.js: -------------------------------------------------------------------------------- 1 | var DOCUMENTATION_OPTIONS = { 2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), 3 | VERSION: '1.0', 4 | LANGUAGE: 'None', 5 | COLLAPSE_INDEX: false, 6 | BUILDER: 'html', 7 | FILE_SUFFIX: '.html', 8 | LINK_SUFFIX: '.html', 9 | HAS_SOURCE: true, 10 | SOURCELINK_SUFFIX: '.txt', 11 | NAVIGATION_WITH_KEYS: false, 12 | SHOW_SEARCH_SUMMARY: true, 13 | ENABLE_SEARCH_SHORTCUTS: true, 14 | }; -------------------------------------------------------------------------------- /project_1_5/tests/features/db_extract.feature: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects -- Project 1.5: Acquire data from a SQL extract 2 | Feature: 3 | Database Extraction. 4 | 5 | @fixture.sqlite 6 | Scenario: Extract data from the enterprise database 7 | 8 | Given a series named "test1" 9 | And sample values "[(11, 13), (17, 19)]" 10 | When we run the database extract command with the test fixture database 11 | Then log has INFO line with "series: test1" 12 | And log has INFO line with "count: 2" 13 | And output directory has file named "quartet/test1.csv" 14 | -------------------------------------------------------------------------------- /project_0/src/hw2.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project Zero: A template for other projects 4 | """ 5 | 6 | """ 7 | A skeleton to show our initial thoughts on a class design. 8 | """ 9 | import sys 10 | 11 | class Greeting: 12 | """ 13 | Created with a greeting text. 14 | Writes the text to stdout. 15 | 16 | .. todo:: Finish this 17 | """ 18 | pass 19 | 20 | 21 | def main(argv: list[str] = sys.argv[1:]) -> None: 22 | """Get options and create Greeting object.""" 23 | pass 24 | 25 | 26 | if __name__ == "__main__": 27 | main() 28 | -------------------------------------------------------------------------------- /project_0/docs/index.rst: -------------------------------------------------------------------------------- 1 | .. Python Real-World Projects -- Project Zero: A template for other projects 2 | sphinx-quickstart on Mon Dec 12 14:49:10 2022. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Project Zero's documentation! 7 | ======================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | overview 14 | design 15 | api 16 | 17 | Indices and tables 18 | ================== 19 | 20 | * :ref:`genindex` 21 | * :ref:`modindex` 22 | * :ref:`search` 23 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_sources/index.rst.txt: -------------------------------------------------------------------------------- 1 | .. Python Real-World Projects -- Project Zero: A template for other projects 2 | sphinx-quickstart on Mon Dec 12 14:49:10 2022. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Project Zero's documentation! 7 | ======================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | overview 14 | design 15 | api 16 | 17 | Indices and tables 18 | ================== 19 | 20 | * :ref:`genindex` 21 | * :ref:`modindex` 22 | * :ref:`search` 23 | -------------------------------------------------------------------------------- /project_0/src/hw.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project Zero: A template for other projects 4 | """ 5 | 6 | """ 7 | A skeleton to show our initial thoughts on a functional design. 8 | """ 9 | 10 | import argparse 11 | import sys 12 | 13 | 14 | def get_options(argv: list[str] = sys.argv[1:]) -> argparse.Namespace: 15 | """Parse command-line""" 16 | pass 17 | 18 | def greeting(who: str = "World") -> None: 19 | """Write greeting.""" 20 | print(f"Hello, {who}!") 21 | 22 | def main(argv: list[str] = sys.argv[1:]) -> None: 23 | """Get options and write greeting.""" 24 | pass 25 | 26 | if __name__ == "__main__": 27 | main() 28 | -------------------------------------------------------------------------------- /project_1_2/tests/features/dataset_download.feature: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects -- Project 1.2: Acquire data from web service 2 | Feature: 3 | The application will download a dataset from Kaggle.com 4 | 5 | @fixture.kaggle_server 6 | Scenario: Request for carlmcbrideellis/data-anscombes-quartet extracts file from ZIP archive. 7 | The download command is 8 | "python src/acquire.py -k kaggle.json -o quartet \ 9 | --zip carlmcbrideellis/data-anscombes-quartet" 10 | 11 | Given proper keys are in "kaggle.json" 12 | When we run the kaggle download command 13 | Then log has INFO line with "header: ['mock', 'data']" 14 | And log has INFO line with "count: 1" 15 | -------------------------------------------------------------------------------- /project_0/docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /project_0/src/hello_world.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project Zero: A template for other projects 4 | """ 5 | 6 | import argparse 7 | import sys 8 | 9 | 10 | def get_options(argv: list[str]) -> argparse.Namespace: 11 | """Parse command-line""" 12 | parser = argparse.ArgumentParser() 13 | parser.add_argument("--who", "-w", type=str, default="World") 14 | return parser.parse_args(argv) 15 | 16 | def greeting(who: str = "World") -> None: 17 | """Write greeting.""" 18 | print(f"Hello, {who}!") 19 | 20 | def main(argv: list[str] = sys.argv[1:]) -> None: 21 | """Get options and write greeting.""" 22 | options = get_options(argv) 23 | greeting(options.who) 24 | 25 | if __name__ == "__main__": 26 | main() 27 | -------------------------------------------------------------------------------- /project_0/docs/design.rst: -------------------------------------------------------------------------------- 1 | .. Python Real-World Projects -- Project Zero: A template for other projects 2 | 3 | ###### 4 | Design 5 | ###### 6 | 7 | The application has the following 8 | structure: 9 | 10 | .. uml:: 11 | 12 | class hello_world.greeting << (F, white) >> { 13 | } 14 | hide hello_world.greeting members 15 | 16 | class hello_world.main << (F, white) >> { 17 | } 18 | hide hello_world.main members 19 | 20 | class hello_world.get_options << (F, white) >> { 21 | } 22 | hide hello_world.get_options members 23 | 24 | hello_world.main -> hello_world.get_options : "calls" 25 | hello_world.main -> hello_world.greeting : "calls" 26 | 27 | 28 | There are three functions that interact to produce 29 | the greeting. 30 | -------------------------------------------------------------------------------- /project_1_2/tests/test_kaggle_client.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.2: Acquire data from web service 4 | """ 5 | from kaggle_client import RestAccess 6 | 7 | from unittest.mock import Mock, sentinel, call 8 | 9 | def test_rest_access(monkeypatch): 10 | mock_auth_class = Mock( 11 | name="Mocked HTTPBasicAuth class", 12 | return_value=sentinel.AUTH 13 | ) 14 | monkeypatch.setattr('requests.auth.HTTPBasicAuth', mock_auth_class) 15 | mock_kaggle_json = {"username": sentinel.USERNAME, "key": sentinel.KEY} 16 | access = RestAccess(mock_kaggle_json) 17 | assert access.credentials == sentinel.AUTH 18 | assert mock_auth_class.mock_calls == [ 19 | call(sentinel.USERNAME, sentinel.KEY) 20 | ] 21 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_sources/design.rst.txt: -------------------------------------------------------------------------------- 1 | .. Python Real-World Projects -- Project Zero: A template for other projects 2 | 3 | ###### 4 | Design 5 | ###### 6 | 7 | The application has the following 8 | structure: 9 | 10 | .. uml:: 11 | 12 | class hello_world.greeting << (F, white) >> { 13 | } 14 | hide hello_world.greeting members 15 | 16 | class hello_world.main << (F, white) >> { 17 | } 18 | hide hello_world.main members 19 | 20 | class hello_world.get_options << (F, white) >> { 21 | } 22 | hide hello_world.get_options members 23 | 24 | hello_world.main -> hello_world.get_options : "calls" 25 | hello_world.main -> hello_world.greeting : "calls" 26 | 27 | 28 | There are three functions that interact to produce 29 | the greeting. 30 | -------------------------------------------------------------------------------- /project_0/tests/steps/hw_cli.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project Zero: A template for other projects 4 | """ 5 | import subprocess 6 | import shlex 7 | from pathlib import Path 8 | 9 | @when(u'we run command "{command}"') 10 | def step_impl(context, command): 11 | output_path = Path("output.log") 12 | with output_path.open('w') as target: 13 | status = subprocess.run( 14 | shlex.split(command), 15 | check=True, text=True, stdout=target, stderr=subprocess.STDOUT) 16 | context.status = status 17 | context.output = output_path.read_text() 18 | output_path.unlink() 19 | # print(f"{context=} {context.status=} {context.output=}") 20 | 21 | @then(u'output has "{expected_output}"') 22 | def step_impl(context, expected_output): 23 | # print(f"{context=} {context.status=} {context.output=}") 24 | assert context.status.returncode == 0 25 | assert expected_output in context.output 26 | -------------------------------------------------------------------------------- /project_0/docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /project_1_1/pyproject.toml: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects 2 | # Project 1.1: Data Acquisition Base Application 3 | 4 | [build-system] 5 | requires = ["setuptools", "wheel"] # PEP 508 specifications. 6 | 7 | [project] 8 | name = "project_1_1" 9 | version = "1.0.0" 10 | authors = [ 11 | {name = "Author", email = "author@email.com"}, 12 | ] 13 | description = "Real-World Python Projects -- Project 1.1." 14 | readme = "README.md" 15 | requires-python = ">=3.11" 16 | classifiers = [ 17 | "Natural Language :: English", 18 | "Operating System :: OS Independent", 19 | "Programming Language :: Python", 20 | ] 21 | dependencies = [ 22 | # Packages this project requires 23 | ] 24 | [project.optional-dependencies] 25 | dev = [ 26 | # Development tools to work on this project 27 | "sphinx==7.2.2", 28 | "sphinxcontrib-plantuml==0.25", 29 | "pip-tools==7.3.0" 30 | ] 31 | test = [ 32 | # Testing tools to test this project 33 | "pytest==7.4.0", 34 | "tox==4.9.0", 35 | "behave==1.2.6" 36 | ] 37 | -------------------------------------------------------------------------------- /project_1_4/pyproject.toml: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects 2 | # Project 1.4: Local SQL Database 3 | 4 | [build-system] 5 | requires = ["setuptools", "wheel"] # PEP 508 specifications. 6 | 7 | [project] 8 | name = "project_1_4" 9 | version = "1.0.0" 10 | authors = [ 11 | {name = "Author", email = "author@email.com"}, 12 | ] 13 | description = "Real-World Python Projects -- Project 1.4." 14 | readme = "README.md" 15 | requires-python = ">=3.11" 16 | classifiers = [ 17 | "Natural Language :: English", 18 | "Operating System :: OS Independent", 19 | "Programming Language :: Python", 20 | ] 21 | dependencies = [ 22 | # Packages this project requires 23 | "toml==0.10.2" 24 | ] 25 | [project.optional-dependencies] 26 | dev = [ 27 | # Development tools to work on this project 28 | "sphinx==7.2.2", 29 | "sphinxcontrib-plantuml==0.25", 30 | "pip-tools==7.3.0" 31 | ] 32 | test = [ 33 | # Testing tools to test this project 34 | "pytest==7.4.0", 35 | "tox==4.9.0", 36 | "behave==1.2.6" 37 | ] 38 | -------------------------------------------------------------------------------- /project_1_5/pyproject.toml: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects 2 | # Project 1.5: Acquire data from a SQL extract 3 | 4 | [build-system] 5 | requires = ["setuptools", "wheel"] # PEP 508 specifications. 6 | 7 | [project] 8 | name = "project_1_5" 9 | version = "1.0.0" 10 | authors = [ 11 | {name = "Author", email = "author@email.com"}, 12 | ] 13 | description = "Real-World Python Projects -- Project 1.5." 14 | readme = "README.md" 15 | requires-python = ">=3.11" 16 | classifiers = [ 17 | "Natural Language :: English", 18 | "Operating System :: OS Independent", 19 | "Programming Language :: Python", 20 | ] 21 | dependencies = [ 22 | # Packages this project requires 23 | "toml==0.10.2" 24 | ] 25 | [project.optional-dependencies] 26 | dev = [ 27 | # Development tools to work on this project 28 | "sphinx==7.2.2", 29 | "sphinxcontrib-plantuml==0.25", 30 | "pip-tools==7.3.0" 31 | ] 32 | test = [ 33 | # Testing tools to test this project 34 | "pytest==7.4.0", 35 | "tox==4.9.0", 36 | "behave==1.2.6" 37 | ] 38 | -------------------------------------------------------------------------------- /project_1_3/pyproject.toml: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects 2 | # Project 1.3: Scrape data from a web page 3 | 4 | [build-system] 5 | requires = ["setuptools", "wheel"] # PEP 508 specifications. 6 | 7 | [project] 8 | name = "project_1_3" 9 | version = "1.0.0" 10 | authors = [ 11 | {name = "Author", email = "author@email.com"}, 12 | ] 13 | description = "Real-World Python Projects -- Project 1.3." 14 | readme = "README.md" 15 | requires-python = ">=3.11" 16 | classifiers = [ 17 | "Natural Language :: English", 18 | "Operating System :: OS Independent", 19 | "Programming Language :: Python", 20 | ] 21 | dependencies = [ 22 | # Packages this project requires 23 | "beautifulsoup4==4.11.1" 24 | ] 25 | [project.optional-dependencies] 26 | dev = [ 27 | # Development tools to work on this project 28 | "sphinx==7.2.2", 29 | "sphinxcontrib-plantuml==0.25", 30 | "pip-tools==7.3.0" 31 | ] 32 | test = [ 33 | # Testing tools to test this project 34 | "pytest==7.4.0", 35 | "tox==4.9.0", 36 | "behave==1.2.6" 37 | ] 38 | -------------------------------------------------------------------------------- /project_0/pyproject.toml: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects 2 | # Project Zero: A template for other projects 3 | 4 | [build-system] 5 | requires = ["setuptools", "wheel"] # PEP 508 specifications. 6 | build-backend = "setuptools.build_meta" 7 | 8 | [project] 9 | name = "project_0" 10 | version = "1.0.0" 11 | authors = [ 12 | {name = "Author", email = "author@email.com"}, 13 | ] 14 | description = "Real-World Python Projects -- Project 0." 15 | readme = "README.md" 16 | requires-python = ">=3.11" 17 | classifiers = [ 18 | "Natural Language :: English", 19 | "Operating System :: OS Independent", 20 | "Programming Language :: Python", 21 | ] 22 | dependencies = [ 23 | # Packages this project requires 24 | ] 25 | [project.optional-dependencies] 26 | dev = [ 27 | # Development tools to work on this project 28 | "sphinx==7.2.2", 29 | "sphinxcontrib-plantuml==0.25", 30 | "pip-tools==7.3.0" 31 | ] 32 | test = [ 33 | # Testing tools to test this project 34 | "pytest==7.4.0", 35 | "tox==4.9.0", 36 | "behave==1.2.6" 37 | ] 38 | -------------------------------------------------------------------------------- /project_1_2/pyproject.toml: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects 2 | # Project 1.2: Acquire data from web service 3 | 4 | [build-system] 5 | requires = ["setuptools", "wheel"] # PEP 508 specifications. 6 | 7 | [project] 8 | name = "project_1_2" 9 | version = "1.0.0" 10 | authors = [ 11 | {name = "Author", email = "author@email.com"}, 12 | ] 13 | description = "Real-World Python Projects -- Project 1.2." 14 | readme = "README.md" 15 | requires-python = ">=3.11" 16 | classifiers = [ 17 | "Natural Language :: English", 18 | "Operating System :: OS Independent", 19 | "Programming Language :: Python", 20 | ] 21 | dependencies = [ 22 | # Packages this project requires 23 | "requests==2.28.1", 24 | "beautifulsoup4==4.11.1" 25 | ] 26 | [project.optional-dependencies] 27 | dev = [ 28 | # Development tools to work on this project 29 | "sphinx==7.2.2", 30 | "sphinxcontrib-plantuml==0.25", 31 | "pip-tools==7.3.0" 32 | ] 33 | test = [ 34 | # Testing tools to test this project 35 | "pytest==7.4.0", 36 | "tox==4.9.0", 37 | "behave==1.2.6", 38 | "bottle==0.12.23" 39 | ] 40 | -------------------------------------------------------------------------------- /project_1_1/tests/features/acquire.feature: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects 2 | # Project 1.1: Data Acquisition Base Application 3 | 4 | Feature: Extract four data series from a file with the peculiar Anscombe Quartet format. 5 | 6 | Scenario: When requested, the application extracts all four series. 7 | Given the "Anscombe_quartet_data.csv" source file exists 8 | And the "quartet" directory exists 9 | When we run command "python src/acquire.py -o quartet Anscombe_quartet_data.csv" 10 | Then the "quartet/series_1.json" file exists 11 | And the "quartet/series_2.json" file exists 12 | And the "quartet/series_3.json" file exists 13 | And the "quartet/series_3.json" file exists 14 | And the "quartet/series_1.json" file starts with '{"x": "10.0", "y": "8.04"}' 15 | 16 | 17 | Scenario: When the file does not exist, the log has the expected error message. 18 | Given the "Anscombe_quartet_data.csv" source file does not exist 19 | And the "quartet" directory exists 20 | When we run command "python src/acquire.py -o quartet Anscombe_quartet_data.csv" 21 | Then the log contains "File not found: Anscombe_quartet_data.csv" 22 | 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Packt 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /project_1_1/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile --extra=test 6 | # 7 | behave==1.2.6 8 | # via project-1-1 (pyproject.toml) 9 | cachetools==5.3.1 10 | # via tox 11 | chardet==5.2.0 12 | # via tox 13 | colorama==0.4.6 14 | # via tox 15 | distlib==0.3.7 16 | # via virtualenv 17 | filelock==3.12.2 18 | # via 19 | # tox 20 | # virtualenv 21 | iniconfig==2.0.0 22 | # via pytest 23 | packaging==23.1 24 | # via 25 | # pyproject-api 26 | # pytest 27 | # tox 28 | parse==1.19.0 29 | # via 30 | # behave 31 | # parse-type 32 | parse-type==0.6.0 33 | # via behave 34 | platformdirs==3.10.0 35 | # via 36 | # tox 37 | # virtualenv 38 | pluggy==1.2.0 39 | # via 40 | # pytest 41 | # tox 42 | pyproject-api==1.5.4 43 | # via tox 44 | pytest==7.4.0 45 | # via project-1-1 (pyproject.toml) 46 | six==1.16.0 47 | # via 48 | # behave 49 | # parse-type 50 | tox==4.9.0 51 | # via project-1-1 (pyproject.toml) 52 | virtualenv==20.24.3 53 | # via tox 54 | -------------------------------------------------------------------------------- /project_0/requirements-test.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile --extra=test --output-file=requirements-test.txt 6 | # 7 | behave==1.2.6 8 | # via project-0 (pyproject.toml) 9 | cachetools==5.3.1 10 | # via tox 11 | chardet==5.2.0 12 | # via tox 13 | colorama==0.4.6 14 | # via tox 15 | distlib==0.3.7 16 | # via virtualenv 17 | filelock==3.12.2 18 | # via 19 | # tox 20 | # virtualenv 21 | iniconfig==2.0.0 22 | # via pytest 23 | packaging==23.1 24 | # via 25 | # pyproject-api 26 | # pytest 27 | # tox 28 | parse==1.19.0 29 | # via 30 | # behave 31 | # parse-type 32 | parse-type==0.6.0 33 | # via behave 34 | platformdirs==3.10.0 35 | # via 36 | # tox 37 | # virtualenv 38 | pluggy==1.2.0 39 | # via 40 | # pytest 41 | # tox 42 | pyproject-api==1.5.4 43 | # via tox 44 | pytest==7.4.0 45 | # via project-0 (pyproject.toml) 46 | six==1.16.0 47 | # via 48 | # behave 49 | # parse-type 50 | tox==4.9.0 51 | # via project-0 (pyproject.toml) 52 | virtualenv==20.24.3 53 | # via tox 54 | -------------------------------------------------------------------------------- /project_1_4/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile --extra=test 6 | # 7 | behave==1.2.6 8 | # via project-1-4 (pyproject.toml) 9 | cachetools==5.3.1 10 | # via tox 11 | chardet==5.2.0 12 | # via tox 13 | colorama==0.4.6 14 | # via tox 15 | distlib==0.3.7 16 | # via virtualenv 17 | filelock==3.12.2 18 | # via 19 | # tox 20 | # virtualenv 21 | iniconfig==2.0.0 22 | # via pytest 23 | packaging==23.1 24 | # via 25 | # pyproject-api 26 | # pytest 27 | # tox 28 | parse==1.19.0 29 | # via 30 | # behave 31 | # parse-type 32 | parse-type==0.6.0 33 | # via behave 34 | platformdirs==3.10.0 35 | # via 36 | # tox 37 | # virtualenv 38 | pluggy==1.2.0 39 | # via 40 | # pytest 41 | # tox 42 | pyproject-api==1.5.4 43 | # via tox 44 | pytest==7.4.0 45 | # via project-1-4 (pyproject.toml) 46 | six==1.16.0 47 | # via 48 | # behave 49 | # parse-type 50 | toml==0.10.2 51 | # via project-1-4 (pyproject.toml) 52 | tox==4.9.0 53 | # via project-1-4 (pyproject.toml) 54 | virtualenv==20.24.3 55 | # via tox 56 | -------------------------------------------------------------------------------- /project_1_5/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile --extra=test 6 | # 7 | behave==1.2.6 8 | # via project-1-5 (pyproject.toml) 9 | cachetools==5.3.1 10 | # via tox 11 | chardet==5.2.0 12 | # via tox 13 | colorama==0.4.6 14 | # via tox 15 | distlib==0.3.7 16 | # via virtualenv 17 | filelock==3.12.2 18 | # via 19 | # tox 20 | # virtualenv 21 | iniconfig==2.0.0 22 | # via pytest 23 | packaging==23.1 24 | # via 25 | # pyproject-api 26 | # pytest 27 | # tox 28 | parse==1.19.0 29 | # via 30 | # behave 31 | # parse-type 32 | parse-type==0.6.0 33 | # via behave 34 | platformdirs==3.10.0 35 | # via 36 | # tox 37 | # virtualenv 38 | pluggy==1.2.0 39 | # via 40 | # pytest 41 | # tox 42 | pyproject-api==1.5.4 43 | # via tox 44 | pytest==7.4.0 45 | # via project-1-5 (pyproject.toml) 46 | six==1.16.0 47 | # via 48 | # behave 49 | # parse-type 50 | toml==0.10.2 51 | # via project-1-5 (pyproject.toml) 52 | tox==4.9.0 53 | # via project-1-5 (pyproject.toml) 54 | virtualenv==20.24.3 55 | # via tox 56 | -------------------------------------------------------------------------------- /project_1_3/tests/features/html_extract.feature: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects -- Project 1.3: Scrape data from a web page 2 | Feature: Extracts table from Wikipedia Page 3 | 4 | Scenario: Finds captioned table and extracts data. 5 | The command is 6 | python src/acquire.py -o quartet --page "$GIVEN_FILENAME" --caption "Anscombe's quartet" 7 | 8 | Given an HTML page "example_1.html" 9 | """ 10 | 11 | 12 | 13 | 14 |

Some Text

15 | 16 | 17 | 18 | 19 | 20 |
Wrong Table
Wrong Table
21 | 22 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | """ 32 | When we run the html extract command 33 | Then log has INFO line with "header: ['Keep this', 'Data']" 34 | And log has INFO line with "count: 1" 35 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/searchindex.js: -------------------------------------------------------------------------------- 1 | Search.setIndex({docnames:["api","design","index","overview"],envversion:{"sphinx.domains.c":2,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":5,"sphinx.domains.index":1,"sphinx.domains.javascript":2,"sphinx.domains.math":2,"sphinx.domains.python":3,"sphinx.domains.rst":2,"sphinx.domains.std":2,sphinx:56},filenames:["api.rst","design.rst","index.rst","overview.rst"],objects:{"":[[0,0,0,"-","hello_world"]],hello_world:[[0,1,1,"","get_options"],[0,1,1,"","greeting"],[0,1,1,"","main"]]},objnames:{"0":["py","module","Python module"],"1":["py","function","Python function"]},objtypes:{"0":"py:module","1":"py:function"},terms:{"function":1,A:0,The:1,There:1,_build:0,api:2,applic:[1,3],ar:1,argpars:0,argv:0,cheer:3,command:0,design:2,follow:1,get:0,get_opt:0,greet:[0,1,3],ha:1,hello_world:0,html:0,index:2,interact:1,line:0,list:0,m:0,main:0,modul:2,namespac:0,none:0,option:0,other:0,overview:2,page:2,pars:0,produc:1,project:0,python:0,real:0,search:2,str:0,structur:1,templat:0,thi:3,three:1,who:0,world:0,write:[0,3],zero:0},titles:["API","Design","Welcome to Project Zero\u2019s documentation!","Overview"],titleterms:{api:0,content:2,design:1,document:2,indic:2,overview:3,project:2,s:2,tabl:2,welcom:2,zero:2}}) -------------------------------------------------------------------------------- /project_1_3/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile --extra=test 6 | # 7 | beautifulsoup4==4.11.1 8 | # via project-1-3 (pyproject.toml) 9 | behave==1.2.6 10 | # via project-1-3 (pyproject.toml) 11 | cachetools==5.3.1 12 | # via tox 13 | chardet==5.2.0 14 | # via tox 15 | colorama==0.4.6 16 | # via tox 17 | distlib==0.3.7 18 | # via virtualenv 19 | filelock==3.12.2 20 | # via 21 | # tox 22 | # virtualenv 23 | iniconfig==2.0.0 24 | # via pytest 25 | packaging==23.1 26 | # via 27 | # pyproject-api 28 | # pytest 29 | # tox 30 | parse==1.19.0 31 | # via 32 | # behave 33 | # parse-type 34 | parse-type==0.6.0 35 | # via behave 36 | platformdirs==3.10.0 37 | # via 38 | # tox 39 | # virtualenv 40 | pluggy==1.2.0 41 | # via 42 | # pytest 43 | # tox 44 | pyproject-api==1.5.4 45 | # via tox 46 | pytest==7.4.0 47 | # via project-1-3 (pyproject.toml) 48 | six==1.16.0 49 | # via 50 | # behave 51 | # parse-type 52 | soupsieve==2.4.1 53 | # via beautifulsoup4 54 | tox==4.9.0 55 | # via project-1-3 (pyproject.toml) 56 | virtualenv==20.24.3 57 | # via tox 58 | -------------------------------------------------------------------------------- /project_1_3/src/html_extract.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.3: Scrape data from a web page 4 | """ 5 | from urllib.request import urlopen 6 | from bs4 import BeautifulSoup, Tag 7 | from collections.abc import Iterator 8 | 9 | def get_page(url: str) -> BeautifulSoup: 10 | return BeautifulSoup( 11 | urlopen(url), "html.parser" 12 | ) 13 | 14 | def find_table_caption(soup: BeautifulSoup, caption_text: str = "Anscombe's quartet") -> Tag: 15 | for table in soup.find_all('table'): 16 | if table.caption: 17 | if table.caption.text.strip() == caption_text.strip(): 18 | return table 19 | raise RuntimeError(f"
Anscombe's quartet 23 |
Skip titlesIn th tags
Keep thisData
And thisData
with not found") 20 | 21 | def table_row_data_iter(table: Tag) -> Iterator[list[str]]: 22 | for row in table.tbody.find_all('tr'): 23 | flat = [td.text.strip() for td in row.find_all('td')] 24 | yield flat 25 | 26 | def dump_table(url: str, caption: str): 27 | soup = get_page(url) 28 | table_tag = find_table_caption(soup, caption) 29 | for row in table_row_data_iter(table_tag): 30 | print(row) 31 | 32 | if __name__ == "__main__": 33 | dump_table("https://en.wikipedia.org/wiki/Anscombe's_quartet", "Anscombe's quartet") 34 | -------------------------------------------------------------------------------- /project_1_2/src/webpage_client.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.2: Acquire data from web service 4 | """ 5 | from collections.abc import Iterator 6 | from urllib.request import urlopen 7 | 8 | from bs4 import BeautifulSoup, Tag 9 | 10 | 11 | def get_page(url: str) -> BeautifulSoup: 12 | return BeautifulSoup( 13 | urlopen(url), "html.parser" 14 | ) 15 | 16 | def find_table_caption( 17 | soup: BeautifulSoup, 18 | caption_text: str = "Anscombe's quartet" 19 | ) -> Tag: 20 | for table in soup.find_all('table'): 21 | if table.caption: 22 | if table.caption.text.strip() == caption_text.strip(): 23 | return table 24 | raise RuntimeError(f"
{caption_text}
with caption {caption_text!r} not found") 25 | 26 | def extract_rows(table: Tag) -> Iterator[list[str]]: 27 | for tr in table.tbody.find_all('tr'): 28 | values = [td.text.strip() for td in tr.find_all('td')] 29 | yield values 30 | 31 | def from_wikipedia(): 32 | url = "https://en.wikipedia.org/wiki/Anscombe%27s_quartet" 33 | page_soup = get_page(url) 34 | table = find_table_caption(page_soup, "Anscombe's quartet") 35 | for row in extract_rows(table): 36 | print(row) 37 | 38 | if __name__ == "__main__": 39 | from_wikipedia() 40 | -------------------------------------------------------------------------------- /project_1_5/src/db_extract.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.5: Acquire data from a SQL extract 4 | """ 5 | import model 6 | import sqlite3 7 | from typing import Any 8 | from collections.abc import Iterator 9 | 10 | class Extract: 11 | def build_samples( 12 | self, 13 | connection: sqlite3.Connection, 14 | config: dict[str, Any], 15 | name: str 16 | ) -> list[model.SeriesSample]: 17 | print(config['query']['samples'], {"name": name}) 18 | samples_cursor = connection.execute(config['query']['samples'], {"name": name}) 19 | samples = [ 20 | model.SeriesSample( 21 | x=row[0], 22 | y=row[1]) 23 | for row in samples_cursor 24 | ] 25 | return samples 26 | 27 | def series_iter( 28 | self, 29 | connection: sqlite3.Connection, 30 | config: dict[str, Any] 31 | ) -> Iterator[model.Series]: 32 | print(config['query']['names']) 33 | names_cursor = connection.execute(config['query']['names']) 34 | for row in names_cursor: 35 | name=row[0] 36 | yield model.Series( 37 | name=name, 38 | samples=self.build_samples(connection, config, name) 39 | ) 40 | -------------------------------------------------------------------------------- /project_1_4/tests/test_build_db.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.4: Local SQL Database 4 | """ 5 | from unittest.mock import MagicMock, Mock, call, sentinel 6 | from pytest import fixture 7 | import build_db 8 | 9 | @fixture 10 | def mock_connection(): 11 | mock_cursor = [] 12 | mock_connection = Mock( 13 | name='connection', 14 | execute=Mock(return_value=mock_cursor) 15 | ) 16 | return mock_connection 17 | 18 | def test_execute_statements(mock_connection): 19 | build_db.execute_statements(mock_connection, [sentinel.S1, sentinel.S2]) 20 | assert mock_connection.execute.mock_calls == [ 21 | call(sentinel.S1), 22 | call(sentinel.S2) 23 | ] 24 | 25 | @fixture 26 | def mock_query_connection(): 27 | mock_cursor = [sentinel.ROW] 28 | mock_connection = Mock( 29 | name='connection', 30 | execute=Mock(return_value=mock_cursor) 31 | ) 32 | return mock_connection 33 | 34 | def test_execute_statements_query(mock_query_connection, capsys): 35 | build_db.execute_statements(mock_query_connection, sentinel.QUERY) 36 | assert mock_query_connection.execute.mock_calls == [ 37 | call(sentinel.QUERY), 38 | ] 39 | output, error = capsys.readouterr() 40 | assert output.splitlines() == [ 41 | 'sentinel.ROW' 42 | ] 43 | -------------------------------------------------------------------------------- /project_0/docs/conf.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project Zero: A template for other projects 4 | """ 5 | # Configuration file for the Sphinx documentation builder. 6 | # 7 | # For the full list of built-in configuration values, see the documentation: 8 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 9 | 10 | # -- Project information ----------------------------------------------------- 11 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 12 | 13 | project = 'Project Zero' 14 | copyright = '2022, S.Lott' 15 | author = 'S.Lott' 16 | release = '1.0' 17 | 18 | # -- General configuration --------------------------------------------------- 19 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 20 | 21 | extensions = [ 22 | 'sphinx.ext.autodoc', 23 | 'sphinxcontrib.plantuml', 24 | ] 25 | 26 | templates_path = ['_templates'] 27 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 28 | 29 | 30 | 31 | # -- Options for HTML output ------------------------------------------------- 32 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 33 | 34 | html_theme = 'alabaster' 35 | html_static_path = ['_static'] 36 | 37 | import sys 38 | from pathlib import Path 39 | sys.path.append(str(Path.cwd().parent/"src")) 40 | -------------------------------------------------------------------------------- /project_1_3/src/acquire.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.3: Scrape data from a web page 4 | """ 5 | 6 | import argparse 7 | import logging 8 | from pathlib import Path 9 | import sys 10 | import html_extract 11 | 12 | logger = logging.getLogger("acquire") 13 | 14 | def dump_table(url: str, caption: str): 15 | soup = html_extract.get_page(url) 16 | table_tag = html_extract.find_table_caption(soup, caption) 17 | source = iter( 18 | html_extract.table_row_data_iter(table_tag) 19 | ) 20 | empty = next(source) 21 | header = next(source) 22 | logging.info("header: %r", header) 23 | count = 0 24 | for row in source: 25 | count += 1 26 | logging.info(row) 27 | logging.info("count: %d", count) 28 | 29 | def get_options(argv: list[str]) -> argparse.Namespace: 30 | parser = argparse.ArgumentParser() 31 | parser.add_argument("-o", "--output", type=Path) 32 | parser.add_argument("-p", "--page", type=str) 33 | parser.add_argument("-c", "--caption", default="Anscombe's quartet") 34 | return parser.parse_args(argv) 35 | 36 | def main(argv: list[str] | None = None) -> None: 37 | options = get_options(argv or sys.argv[1:]) 38 | dump_table(options.page, options.caption) 39 | 40 | if __name__ == "__main__": 41 | logging.basicConfig(level=logging.INFO) 42 | main() 43 | -------------------------------------------------------------------------------- /project_1_4/src/query_examples.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.4: Local SQL Database 4 | """ 5 | import sqlite3 6 | from textwrap import dedent 7 | 8 | count_join_query = dedent("""\ 9 | SELECT s.name, COUNT(*) 10 | FROM series s JOIN series_sample sv 11 | ON s.series_id = sv.series_id 12 | GROUP BY s.series_id 13 | """) 14 | 15 | detail_join_query = dedent("""\ 16 | SELECT s.name, sv.x, sv.y 17 | FROM series s JOIN series_sample sv ON s.series_id = sv.series_id 18 | """) 19 | 20 | series_query = dedent("""\ 21 | SELECT s.name, s.series_id 22 | FROM series s 23 | """) 24 | 25 | detail_query = dedent("""\ 26 | SELECT sv.x, sv.y 27 | FROM series_sample sv 28 | WHERE sv.series_id = :series_id 29 | ORDER BY sv.sequence 30 | """) 31 | 32 | def main(): 33 | with sqlite3.connect("file:../example.db", uri=True) as connection: 34 | cursor = connection.execute(count_join_query) 35 | for row in cursor: 36 | print(row) 37 | cursor = connection.execute(detail_join_query) 38 | for row in cursor: 39 | print(row) 40 | 41 | outer = connection.execute(series_query) 42 | for series in outer: 43 | print(series) 44 | inner = connection.execute(detail_query, {"series_id": series[1]}) 45 | for row in inner: 46 | print(row) 47 | if __name__ == "__main__": 48 | main() 49 | -------------------------------------------------------------------------------- /project_1_3/tests/steps/cli_interface.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.3: Scrape data from a web page 4 | """ 5 | from behave import given, when, then 6 | from pathlib import Path 7 | import subprocess 8 | import shlex 9 | from textwrap import dedent 10 | 11 | @given(u'an HTML page "{filename}"') 12 | def step_impl(context, filename): 13 | # print(filename) 14 | context.path = Path(filename) 15 | context.path.write_text(dedent(context.text)) 16 | context.add_cleanup(context.path.unlink) 17 | 18 | @when(u'we run the html extract command') 19 | def step_impl(context): 20 | command = [ 21 | 'python', 'src/acquire.py', '-o', 'quartet', '--page', '$URL', '--caption', "Anscombe's quartet" 22 | ] 23 | url = f"file://{str(context.path.absolute())}" 24 | command[command.index('$URL')] = url 25 | print(shlex.join(command)) 26 | output_path = Path("output.log") 27 | with output_path.open('w') as target: 28 | status = subprocess.run( 29 | command, 30 | check=True, text=True, stdout=target, stderr=subprocess.STDOUT) 31 | context.status = status 32 | context.output = output_path.read_text() 33 | output_path.unlink() 34 | # print(f"{context=} {context.status=} {context.output=}") 35 | 36 | 37 | @then(u'log has INFO line with "{log_line}"') 38 | def step_impl(context, log_line): 39 | print(context.output) 40 | assert log_line in context.output, f"No {log_line!r} in output" 41 | -------------------------------------------------------------------------------- /project_1_4/schema.toml: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects -- Project 1.4: Local SQL Database 2 | # Project 1.4 3 | 4 | [definition] 5 | 6 | create = [ 7 | """ 8 | CREATE TABLE IF NOT EXISTS series( 9 | series_id INTEGER, 10 | name TEXT, 11 | 12 | PRIMARY KEY (series_id) 13 | ) 14 | """, 15 | 16 | """ 17 | CREATE TABLE IF NOT EXISTS series_sample( 18 | series_id INTEGER, 19 | sequence INTEGER, 20 | x TEXT, 21 | y TEXT, 22 | 23 | PRIMARY KEY (series_id, sequence), 24 | FOREIGN KEY (series_id) REFERENCES series(series_id) 25 | ) 26 | """ 27 | ] 28 | 29 | drop = [ 30 | """ 31 | DROP TABLE IF EXISTS series_sample 32 | """, 33 | """ 34 | DROP TABLE IF EXISTS series 35 | """ 36 | ] 37 | 38 | [manipulation] 39 | 40 | insert_series = """ 41 | INSERT INTO series(series_id, name) VALUES(:series_id, :name) 42 | """ 43 | 44 | insert_values = """ 45 | INSERT INTO series_sample(series_id, sequence, x, y) 46 | VALUES(:series_id, :sequence, :x, :y) 47 | """ 48 | 49 | [query] 50 | summary = """ 51 | SELECT s.name, COUNT(*) 52 | FROM series s JOIN series_sample sv ON s.series_id = sv.series_id 53 | GROUP BY s.series_id 54 | """ 55 | 56 | detail = """ 57 | SELECT s.name, s.series_id, sv.sequence, sv.x, sv.y 58 | FROM series s JOIN series_value sv ON s.series_id = sv.series_id 59 | """ 60 | 61 | names = """ 62 | SELECT s.name FROM series s 63 | """ 64 | 65 | samples = """ 66 | SELECT sv.x, sv.y 67 | FROM series_sample sv JOIN series s ON s.series_id = sv.series_id 68 | WHERE s.name = :name 69 | ORDER BY sv.sequence 70 | """ 71 | -------------------------------------------------------------------------------- /project_1_2/tests/environment.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.2: Acquire data from web service 4 | """ 5 | from collections.abc import Iterator 6 | from typing import Any 7 | import subprocess 8 | import time 9 | import os 10 | import sys 11 | from behave import fixture, use_fixture 12 | from behave.runner import Context 13 | 14 | @fixture 15 | def kaggle_server(context: Context) -> Iterator[Any]: 16 | """ 17 | Used by a Scenario (or Feature) to start the server running. 18 | """ 19 | if "environment" not in context: 20 | context.environment = os.environ 21 | context.environment["ACQUIRE_BASE_URL"] = "http://127.0.0.1:8080" 22 | # Save server-side log for debugging 23 | server = subprocess.Popen( 24 | [sys.executable, "tests/mock_kaggle_bottle.py"], 25 | ) 26 | time.sleep(0.5) # 500 ms delay to allow service to open socket 27 | yield server 28 | server.kill() 29 | 30 | def before_tag(context: Context, tag: str) -> None: 31 | """ 32 | Expands a @fixture to invoke the appropriate generator. 33 | """ 34 | if tag == "fixture.kaggle_server": 35 | # This will invoke the definition generator. 36 | # It consumes a value before and after the tagged scenario. 37 | use_fixture(kaggle_server, context) 38 | 39 | def before_scenario(context, scenario): 40 | if "environment" not in context: 41 | context.environment = os.environ 42 | 43 | def after_scenario(context, scenario): 44 | if "temp_path" in context: 45 | context.temp_path.unlink() 46 | -------------------------------------------------------------------------------- /project_1_5/schema.toml: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects -- Project 1.5: Acquire data from a SQL extract 2 | # Project 1.5: Acquire data from a SQL extract 3 | 4 | [definition] 5 | 6 | create = [ 7 | """ 8 | CREATE TABLE IF NOT EXISTS series( 9 | series_id INTEGER, 10 | name TEXT, 11 | 12 | PRIMARY KEY (series_id) 13 | ) 14 | """, 15 | 16 | """ 17 | CREATE TABLE IF NOT EXISTS series_sample( 18 | series_id INTEGER, 19 | sequence INTEGER, 20 | x TEXT, 21 | y TEXT, 22 | 23 | PRIMARY KEY (series_id, sequence), 24 | FOREIGN KEY (series_id) REFERENCES series(series_id) 25 | ) 26 | """ 27 | ] 28 | 29 | drop = [ 30 | """ 31 | DROP TABLE IF EXISTS series_sample 32 | """, 33 | """ 34 | DROP TABLE IF EXISTS series 35 | """ 36 | ] 37 | 38 | [manipulation] 39 | 40 | insert_series = """ 41 | INSERT INTO series(series_id, name) VALUES(:series_id, :name) 42 | """ 43 | 44 | insert_values = """ 45 | INSERT INTO series_sample(series_id, sequence, x, y) 46 | VALUES(:series_id, :sequence, :x, :y) 47 | """ 48 | 49 | [query] 50 | summary = """ 51 | SELECT s.name, COUNT(*) 52 | FROM series s JOIN series_sample sv ON s.series_id = sv.series_id 53 | GROUP BY s.series_id 54 | """ 55 | 56 | detail = """ 57 | SELECT s.name, s.series_id, sv.sequence, sv.x, sv.y 58 | FROM series s JOIN series_value sv ON s.series_id = sv.series_id 59 | """ 60 | 61 | names = """ 62 | SELECT s.name FROM series s 63 | """ 64 | 65 | samples = """ 66 | SELECT sv.x, sv.y 67 | FROM series_sample sv JOIN series s ON s.series_id = sv.series_id 68 | WHERE s.name = :name 69 | ORDER BY sv.sequence 70 | """ 71 | -------------------------------------------------------------------------------- /project_1_2/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile --extra=test 6 | # 7 | beautifulsoup4==4.11.1 8 | # via project-1-2 (pyproject.toml) 9 | behave==1.2.6 10 | # via project-1-2 (pyproject.toml) 11 | bottle==0.12.23 12 | # via project-1-2 (pyproject.toml) 13 | cachetools==5.3.1 14 | # via tox 15 | certifi==2023.5.7 16 | # via requests 17 | chardet==5.2.0 18 | # via tox 19 | charset-normalizer==2.1.1 20 | # via requests 21 | colorama==0.4.6 22 | # via tox 23 | distlib==0.3.7 24 | # via virtualenv 25 | filelock==3.12.2 26 | # via 27 | # tox 28 | # virtualenv 29 | idna==3.4 30 | # via requests 31 | iniconfig==2.0.0 32 | # via pytest 33 | packaging==23.1 34 | # via 35 | # pyproject-api 36 | # pytest 37 | # tox 38 | parse==1.19.0 39 | # via 40 | # behave 41 | # parse-type 42 | parse-type==0.6.0 43 | # via behave 44 | platformdirs==3.10.0 45 | # via 46 | # tox 47 | # virtualenv 48 | pluggy==1.2.0 49 | # via 50 | # pytest 51 | # tox 52 | pyproject-api==1.5.4 53 | # via tox 54 | pytest==7.4.0 55 | # via project-1-2 (pyproject.toml) 56 | requests==2.28.1 57 | # via project-1-2 (pyproject.toml) 58 | six==1.16.0 59 | # via 60 | # behave 61 | # parse-type 62 | soupsieve==2.4.1 63 | # via beautifulsoup4 64 | tox==4.9.0 65 | # via project-1-2 (pyproject.toml) 66 | urllib3==1.26.18 67 | # via requests 68 | virtualenv==20.24.3 69 | # via tox 70 | -------------------------------------------------------------------------------- /project_1_3/tests/test_html_extract.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.3: Scrape data from a web page 4 | """ 5 | 6 | import html_extract 7 | from pytest import fixture 8 | from textwrap import dedent 9 | 10 | @fixture 11 | def example_1(tmp_path): 12 | html_page = tmp_path / "works.html" 13 | html_page.write_text( 14 | dedent(""" 15 | 16 | 17 | 18 | 19 |

Some Text

20 |
21 | 22 | 23 | 24 | 25 |
Wrong Table
Wrong Table
26 | 27 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | """ 37 | ) 38 | ) 39 | yield f"file://{str(html_page)}" 40 | html_page.unlink() 41 | 42 | def test_dump(example_1, capsys): 43 | html_extract.dump_table(example_1, "Anscombe's quartet") 44 | out, err = capsys.readouterr() 45 | assert out == "[]\n['Keep this', 'Data']\n['And this', 'Data']\n" 46 | 47 | def test_steps(example_1): 48 | soup = html_extract.get_page(example_1) 49 | table_tag = html_extract.find_table_caption(soup, "Anscombe's quartet") 50 | rows = list(html_extract.table_row_data_iter(table_tag)) 51 | assert rows == [ 52 | [], 53 | ['Keep this', 'Data'], 54 | ['And this', 'Data'], 55 | ] 56 | -------------------------------------------------------------------------------- /project_1_1/src/acquire.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.1: Data Acquisition Base Application 4 | """ 5 | import argparse 6 | from dataclasses import asdict 7 | import json 8 | from pathlib import Path 9 | import sys 10 | from csv_extract import * 11 | 12 | def get_options(argv: list[str]) -> argparse.Namespace: 13 | defaults = argparse.Namespace( 14 | extract_class=Extract, 15 | series_classes=[Series1Pair, Series2Pair, Series3Pair, Series4Pair], 16 | ) 17 | 18 | parser = argparse.ArgumentParser() 19 | parser.add_argument('-o', '--output', type=Path, default="data") 20 | parser.add_argument('source', type=Path, nargs='*') 21 | return parser.parse_args(argv, defaults) 22 | 23 | 24 | EXTRACT_CLASS: type[Extract] = Extract 25 | BUILDER_CLASSES: list[type[PairBuilder]] = [Series1Pair, Series2Pair, Series3Pair, Series4Pair] 26 | 27 | def main(argv: list[str]) -> None: 28 | builders = [cls() for vls in BUILDER_CLASSES] 29 | extractor = EXTRACT_CLASS(builders) 30 | # etc. 31 | 32 | options = get_options(argv) 33 | 34 | targets = [ 35 | options.output / "Series_1.ndjson", 36 | options.output / "Series_2.ndjson", 37 | options.output / "Series_3.ndjson", 38 | options.output / "Series_4.ndjson", 39 | ] 40 | target_files = [ 41 | target.open('w') for target in targets 42 | ] 43 | for source in options.source: 44 | with source.open() as source: 45 | rdr = csv.reader(source) 46 | for row in rdr: 47 | for row, wtr in zip(extractor.build_pairs(row), target_files): 48 | wtr.write(json.dumps(asdict(row)) + '\n') 49 | for target in target_files: 50 | target.close() 51 | 52 | if __name__ == "__main__": 53 | main() 54 | -------------------------------------------------------------------------------- /project_1_5/tests/test_db_extract.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.5: Acquire data from a SQL extract 4 | """ 5 | import sqlite3 6 | from typing import Any, cast 7 | from unittest.mock import Mock, call, sentinel 8 | from pytest import fixture 9 | import db_extract 10 | import model 11 | 12 | @fixture 13 | def mock_connection() -> sqlite3.Connection: 14 | names_cursor: list[tuple[Any, ...]] = [ 15 | (sentinel.Name,) 16 | ] 17 | samples_cursor: list[tuple[Any, ...]] = [ 18 | (sentinel.X, sentinel.Y) 19 | ] 20 | query_to_cursor: dict[sentinel, list[tuple[Any, ...]]] = { 21 | sentinel.Names_Query: names_cursor, 22 | sentinel.Samples_Query: samples_cursor 23 | } 24 | 25 | connection = Mock( 26 | execute=Mock( 27 | side_effect=lambda query, param=None: query_to_cursor[query] 28 | ) 29 | ) 30 | return cast(sqlite3.Connection, connection) 31 | 32 | @fixture 33 | def mock_config(): 34 | return { 35 | 'query': { 36 | 'names': sentinel.Names_Query, 37 | 'samples': sentinel.Samples_Query, 38 | } 39 | } 40 | 41 | def test_build_sample( 42 | mock_connection: sqlite3.Connection, 43 | mock_config: dict[str, Any] 44 | ): 45 | extract = db_extract.Extract() 46 | results = list( 47 | extract.series_iter(mock_connection, mock_config) 48 | ) 49 | assert results == [ 50 | model.Series( 51 | name=sentinel.Name, 52 | samples=[ 53 | model.SeriesSample(sentinel.X, sentinel.Y) 54 | ] 55 | ) 56 | ] 57 | assert cast(Mock, mock_connection).execute.mock_calls == [ 58 | call(sentinel.Names_Query), 59 | call(sentinel.Samples_Query, {'name': sentinel.Name}) 60 | ] 61 | -------------------------------------------------------------------------------- /project_1_5/src/acquire.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.5: Acquire data from a SQL extract 4 | """ 5 | import argparse 6 | import csv 7 | import sqlite3 8 | try: 9 | import tomllib # type: ignore[import] 10 | except ImportError: 11 | # Python < 3.11 requires an install of toml==0.10.2 12 | import toml as tomllib # type: ignore[import] 13 | from pathlib import Path 14 | from typing import Any 15 | import sys 16 | 17 | from db_extract import Extract 18 | 19 | def get_options(argv: list[str]) -> argparse.Namespace: 20 | parser = argparse.ArgumentParser() 21 | parser.add_argument("-o", "--output", type=Path) 22 | parser.add_argument("-d", "--db_uri", default="file:example.db") 23 | parser.add_argument("-s", "--schema", type=Path, default="schema.toml") 24 | return parser.parse_args(argv) 25 | 26 | def main(argv: list[str] = sys.argv[1:]) -> None: 27 | options = get_options(argv) 28 | with options.schema.open('rb') as config_file: # 'rb' for tomllib and 3.11 29 | config = tomllib.load(config_file) 30 | print("config", config) 31 | print("db_uri", options.db_uri) 32 | 33 | extract = Extract() 34 | with sqlite3.connect(options.db_uri, uri=True) as connection: 35 | for s in extract.series_iter(connection, config): 36 | print(s) 37 | print(f"series: {s.name}\ncount: {len(s.samples)}") 38 | target = (options.output / s.name).with_suffix(".csv") 39 | print(f"Create {target}") 40 | with target.open('w', newline='') as output_file: 41 | writer = csv.DictWriter(output_file, ['x', 'y']) 42 | writer.writeheader() 43 | writer.writerows( 44 | [{'x': sample.x, 'y': sample.y} for sample in s.samples] 45 | ) 46 | 47 | if __name__ == "__main__": 48 | main() 49 | -------------------------------------------------------------------------------- /project_1_2/tests/steps/cli_interface.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.2: Acquire data from web service 4 | """ 5 | from behave import * 6 | from urllib.parse import urlparse 7 | from pathlib import Path 8 | import json 9 | import subprocess 10 | import shlex 11 | 12 | @given(u'proper keys are in "{local_path_str}"') 13 | def step_impl(context, local_path_str): 14 | proper_keys = {'username': 'test', 'key': 'some-hex-string'} 15 | temp_path = Path(local_path_str) 16 | with temp_path.open('w') as output: 17 | json.dump(proper_keys, output) 18 | context.temp_path = temp_path 19 | 20 | 21 | @when(u'we run the kaggle download command') 22 | def step_impl(context): 23 | command = "python src/acquire.py -k kaggle.json -o quartet --zip carlmcbrideellis/data-anscombes-quartet" 24 | # print(f"{context.environment=}") 25 | output_path = Path("output.log") 26 | with output_path.open('w') as target: 27 | status = subprocess.run( 28 | shlex.split(command), 29 | env=context.environment, 30 | check=True, text=True, stdout=target, stderr=subprocess.STDOUT) 31 | context.status = status 32 | context.output = output_path.read_text() 33 | output_path.unlink() 34 | # print(f"{context=} {context.status=} {context.output=}") 35 | 36 | 37 | @then(u'log has INFO line with "{log_line}"') 38 | def step_impl(context, log_line): 39 | print(context.output) 40 | assert log_line in context.output, f"No {log_line!r} in output" 41 | 42 | @when(u'we run the html extract command') 43 | def step_impl(context): 44 | command = [ 45 | 'python', 'src/acquire.py', 46 | '-o', 'quartet', 47 | '--page', '$URL', 48 | '--caption', "Anscombe's quartet" 49 | ] 50 | url = f"file://{str(context.path.absolute())}" 51 | command[command.index('$URL')] = url 52 | print(shlex.join(command)) 53 | # etc. with subprocess.run() to execute the command 54 | -------------------------------------------------------------------------------- /project_1_1/src/csv_extract.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.1: Data Acquisition Base Application 4 | """ 5 | from model import RawData, XYPair 6 | from abc import ABC, abstractmethod 7 | import csv 8 | from pathlib import Path 9 | 10 | 11 | class PairBuilder(ABC): 12 | target_class: type[RawData] 13 | 14 | @abstractmethod 15 | def from_row(self, row: list[str]) -> RawData: 16 | ... 17 | 18 | 19 | class Series1Pair(PairBuilder): 20 | target_class = XYPair 21 | 22 | def from_row(self, row: list[str]) -> RawData: 23 | cls = self.target_class 24 | # the rest of the implementation... 25 | return cls(row[0], row[1]) 26 | 27 | class Series2Pair(PairBuilder): 28 | target_class = XYPair 29 | 30 | def from_row(self, row: list[str]) -> RawData: 31 | cls = self.target_class 32 | return cls(row[0], row[2]) 33 | 34 | class Series3Pair(PairBuilder): 35 | target_class = XYPair 36 | 37 | def from_row(self, row: list[str]) -> RawData: 38 | cls = self.target_class 39 | return cls(row[0], row[3]) 40 | 41 | class Series4Pair(PairBuilder): 42 | target_class = XYPair 43 | 44 | def from_row(self, row: list[str]) -> RawData: 45 | cls = self.target_class 46 | return cls(row[4], row[5]) 47 | 48 | class Extract: 49 | def __init__(self, builders: list[PairBuilder]) -> None: 50 | self.builders = builders 51 | 52 | def build_pairs(self, row: list[str]) -> list[RawData]: 53 | return [bldr.from_row(row) for bldr in self.builders] 54 | 55 | EXTRACT_CLASS: type[Extract] = Extract 56 | BUILDER_CLASSES: list[type[PairBuilder]] = [Series1Pair,] 57 | 58 | def test_series1pair() -> None: 59 | from unittest.mock import Mock, sentinel, call 60 | mock_raw_class = Mock() 61 | p1 = Series1Pair() 62 | p1.target_class = mock_raw_class 63 | xypair = p1.from_row([sentinel.X, sentinel.Y]) 64 | assert mock_raw_class.mock_calls == [ 65 | call(sentinel.X, sentinel.Y) 66 | ] 67 | -------------------------------------------------------------------------------- /project_1_5/tests/environment.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.5: Acquire data from a SQL extract 4 | """ 5 | from collections.abc import Iterator 6 | from pathlib import Path 7 | import shutil 8 | import sqlite3 9 | from tempfile import mkdtemp 10 | try: 11 | import tomllib # type: ignore[import] 12 | except ImportError: 13 | # Python < 3.11 requires an install of toml==0.10.2 14 | import toml as tomllib # type: ignore[import] 15 | 16 | from behave import fixture, use_fixture 17 | from behave.runner import Context 18 | 19 | @fixture 20 | def sqlite_database(context: Context) -> Iterator[str]: 21 | # Get Config with SQL to build schema. 22 | config_path = Path.cwd() / "schema.toml" 23 | with config_path.open('rb') as config_file: # 'rb' for tomllib and 3.11 24 | config = tomllib.load(config_file) 25 | create_sql = config['definition']['create'] 26 | context.manipulation_sql = config['manipulation'] 27 | # Build database file. 28 | context.working_path = Path(mkdtemp()) 29 | context.db_path = context.working_path / "test_example.db" 30 | context.db_uri = f"file:{context.db_path}" 31 | context.connection = sqlite3.connect(context.db_uri, uri=True) 32 | for stmt in create_sql: 33 | context.connection.execute(stmt) 34 | context.connection.commit() 35 | 36 | # Yield to allow scenario to run. 37 | yield context.db_uri 38 | 39 | # Delete DB (and output files) after scenario is completed. 40 | context.connection.close() 41 | shutil.rmtree(context.working_path) 42 | assert not context.working_path.exists(), f"Files in {context.working_path} not removed" 43 | 44 | def before_tag(context: Context, tag: str) -> None: 45 | """ 46 | Expands a @fixture to invoke the appropriate generator. 47 | """ 48 | if tag == "fixture.sqlite": 49 | # This will invoke the definition generator. 50 | # It consumes a value before and after the tagged scenario. 51 | use_fixture(sqlite_database, context) 52 | -------------------------------------------------------------------------------- /project_0/README.md: -------------------------------------------------------------------------------- 1 | # Project Zero 2 | 3 | A demonstration of a (relatively) complete 4 | solution with test cases, and documentation. 5 | 6 | ## Installation 7 | 8 | This isn't installed with PIP. Instead, 9 | checkout the Github repository. 10 | 11 | After checkout, use the ``requirements-dev.txt`` 12 | to install the needed development components. 13 | 14 | ```bash 15 | python -m pip install -r requirements-dev.txt 16 | ``` 17 | 18 | The documentation uses PlantUML. 19 | See https://pypi.org/project/sphinxcontrib-plantuml/ 20 | and https://plantuml.com/running for more information. 21 | 22 | ## Demonstration 23 | 24 | To run the application, using the following command: 25 | 26 | ```bash 27 | python src/hello_world.py 28 | ``` 29 | 30 | ## Testing 31 | 32 | To show that it works, run the `tox` command. 33 | 34 | ```bash 35 | tox 36 | ``` 37 | 38 | ## Documentation 39 | 40 | To rebuild the documentation use Sphinx. 41 | 42 | ```bash 43 | cd docs 44 | make html 45 | ``` 46 | 47 | ## Building an Initial Python Environment 48 | 49 | One way to get started is to use Conda to build the environment. 50 | Conda can be downloaded via the Miniconda installer. 51 | See https://docs.conda.io/en/latest/miniconda.html 52 | 53 | Then the following conda commands will populate enough 54 | Python (and tools) to build an environment 55 | 56 | ```bash 57 | conda create -n projectbook --channel=conda-forge python=3.11 58 | conda activate projectbook 59 | conda install --channel=conda-forge pip-tools 60 | ``` 61 | 62 | When the conda environment is active, the name will be 63 | at the start of the prompt. 64 | It might look like this: 65 | 66 | ``` 67 | (projectbook) % 68 | ``` 69 | 70 | It might be more involved, depending on how much information is included 71 | in your prompt. 72 | 73 | With the `pip-compile` command, the list of required packages 74 | in the `pyproject.toml` can be turned into a complete list 75 | of packages to install. 76 | 77 | ```bash 78 | pip-compile --all-extras -o requirements-dev.txt 79 | pip install -r requirements-dev.txt 80 | ``` 81 | 82 | 83 | -------------------------------------------------------------------------------- /project_1_1/tests/steps/fake_cli_steps.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.1: Data Acquisition Base Application 4 | """ 5 | 6 | # These step definitions will "pass" a test run. 7 | # These serve to confirm the Feature file syntax. 8 | 9 | @given(u'the "Anscombe_quartet_data.csv" source file exists') 10 | def step_impl(context): 11 | pass # raise NotImplementedError(u'STEP: Given the "Anscombe_quartet_data.csv" source file exists') 12 | 13 | 14 | @given(u'the "quartet" directory exists') 15 | def step_impl(context): 16 | pass # raise NotImplementedError(u'STEP: Given the "quartet" directory exists') 17 | 18 | 19 | @when(u'we run command "python src/acquire.py -o quartet Anscombe_quartet_data.csv"') 20 | def step_impl(context): 21 | pass # raise NotImplementedError(u'STEP: When we run command "python src/acquire.py -o quartet Anscombe_quartet_data.csv"') 22 | 23 | 24 | @then(u'the "quartet/series_1.json" file exists') 25 | def step_impl(context): 26 | pass # raise NotImplementedError(u'STEP: Then the "quartet/series_1.json" file exists') 27 | 28 | 29 | @then(u'the "quartet/series_2.json" file exists') 30 | def step_impl(context): 31 | pass # raise NotImplementedError(u'STEP: Then the "quartet/series_2.json" file exists') 32 | 33 | 34 | @then(u'the "quartet/series_3.json" file exists') 35 | def step_impl(context): 36 | pass # raise NotImplementedError(u'STEP: Then the "quartet/series_3.json" file exists') 37 | 38 | 39 | @then(u'the "quartet/series_1.json" file starts with \'{"x": "10.0", "y": "8.04"}\'') 40 | def step_impl(context): 41 | pass # raise NotImplementedError(u'STEP: Then the "quartet/series_1.json" file starts with \'{"x": "10.0", "y": "8.04"}\'') 42 | 43 | 44 | @given(u'the "Anscombe_quartet_data.csv" source file does not exist') 45 | def step_impl(context): 46 | pass # raise NotImplementedError(u'STEP: Given the "Anscombe_quartet_data.csv" source file does not exist') 47 | 48 | 49 | @then(u'the log contains "File not found: Anscombe_quartet_data.csv"') 50 | def step_impl(context): 51 | pass # raise NotImplementedError(u'STEP: Then the log contains "File not found: Anscombe_quartet_data.csv"') 52 | 53 | -------------------------------------------------------------------------------- /project_1_2/src/acquire.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.2: Acquire data from web service 4 | """ 5 | import argparse 6 | import csv 7 | import json 8 | import logging 9 | import os 10 | from pathlib import Path 11 | import sys 12 | import zipfile 13 | from model import * 14 | from kaggle_client import RestAccess 15 | 16 | logger = logging.getLogger("acquire") 17 | 18 | def get_options(argv: list[str]) -> argparse.Namespace: 19 | base_url = os.environ.get("ACQUIRE_BASE_URL", "https://www.kaggle.com") 20 | parser = argparse.ArgumentParser() 21 | parser.add_argument("-o", "--output", type=Path) 22 | parser.add_argument("-k", "--key", type=Path) 23 | parser.add_argument("-z", "--zip", default="carlmcbrideellis/data-anscombes-quartet") 24 | parser.add_argument("-b", "--baseurl", default=base_url) 25 | return parser.parse_args(argv) 26 | 27 | def main(argv: list[str] | None = None) -> None: 28 | options = get_options(argv or sys.argv[1:]) 29 | if options.key: 30 | with options.key.open() as key_file: 31 | credentials = json.load(key_file) 32 | else: 33 | logger.error("No credentials file provided on command line.") 34 | sys.exit(2) 35 | 36 | access = RestAccess(credentials) 37 | base_url = options.baseurl 38 | ref = options.zip 39 | download_url = f"{base_url}/api/v1/datasets/download/{ref}" 40 | logger.info("Downloading %s", download_url) 41 | member_name = "Anscombe_quartet_data.csv" 42 | logger.info("Opening %s", member_name) 43 | 44 | zip_data = access.get_zip(download_url) 45 | if member_name not in zip_data.namelist(): 46 | logger.error("Could not find %s in %s", member_name, zip_data.infolist()) 47 | count = 0 48 | zp = zipfile.Path(zip_data, member_name) 49 | with zp.open('r') as quartet_data: 50 | reader = csv.DictReader(quartet_data) 51 | logger.info("header: %s", reader.fieldnames) 52 | for line in reader: 53 | logger.debug(line) 54 | count += 1 55 | logger.info("count: %d", count) 56 | 57 | if __name__ == "__main__": 58 | logging.basicConfig(level=logging.INFO) 59 | main() 60 | -------------------------------------------------------------------------------- /project_1_5/tests/steps/db_interface.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.5: Acquire data from a SQL extract 4 | """ 5 | from behave import given, when, then 6 | from ast import literal_eval 7 | import subprocess 8 | import shlex 9 | from pathlib import Path 10 | import textwrap 11 | 12 | @given(u'a series named "{name}"') 13 | def step_impl(context, name): 14 | insert_series = context.manipulation_sql['insert_series'] 15 | cursor = context.connection.execute( 16 | insert_series, 17 | {'series_id': 99, 'name': name} 18 | ) 19 | # DEBUG: print(f"Loaded series {name}: {cursor.rowcount}") 20 | context.connection.commit() 21 | 22 | 23 | @given(u'sample values "{list_of_pairs}"') 24 | def step_impl(context, list_of_pairs): 25 | pairs = literal_eval(list_of_pairs) 26 | insert_values = context.manipulation_sql['insert_values'] 27 | for seq, row in enumerate(pairs): 28 | cursor = context.connection.execute( 29 | insert_values, 30 | {'series_id': 99, 'sequence': seq, 'x': row[0], 'y': row[1]} 31 | ) 32 | # DEBUG: print(f"Loaded pairs {row}", cursor.rowcount) 33 | context.connection.commit() 34 | 35 | 36 | @when(u'we run the database extract command with the test fixture database') 37 | def step_impl(context): 38 | output_path = context.working_path / "quartet" 39 | output_path.mkdir() 40 | command = f"python src/acquire.py -o '{output_path!s}' --db_uri '{context.db_uri!s}' --schema schema.toml" 41 | output_path = Path("output.log") 42 | with output_path.open('w') as target: 43 | status = subprocess.run( 44 | shlex.split(command), 45 | # check=True, # Makes debugging awkward 46 | text=True, stdout=target, stderr=subprocess.STDOUT) 47 | context.status = status 48 | context.output = output_path.read_text() 49 | output_path.unlink() 50 | # print(f"{context=} {context.status=} {context.output=}") 51 | 52 | 53 | @then(u'log has INFO line with "{log_line}"') 54 | def step_impl(context, log_line): 55 | print("Log Output:") 56 | print(textwrap.indent(context.output, '| ')) 57 | assert log_line in context.output, f"No {log_line!r} in output" 58 | 59 | 60 | @then(u'output directory has file named "{output_name}"') 61 | def step_impl(context, output_name): 62 | output_path = context.working_path / output_name 63 | assert output_path.exists() and output_path.is_file(), f"No {output_name} file found" 64 | -------------------------------------------------------------------------------- /project_0/src/project_0.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: project-0 3 | Version: 1.0.0 4 | Summary: Real-World Python Projects -- Project 0. 5 | Author-email: Author 6 | Classifier: Natural Language :: English 7 | Classifier: Operating System :: OS Independent 8 | Classifier: Programming Language :: Python 9 | Requires-Python: >=3.11 10 | Description-Content-Type: text/markdown 11 | Provides-Extra: dev 12 | Provides-Extra: test 13 | 14 | # Project Zero 15 | 16 | A demonstration of a (relatively) complete 17 | solution with test cases, and documentation. 18 | 19 | ## Installation 20 | 21 | This isn't installed with PIP. Instead, 22 | checkout the Github repository. 23 | 24 | After checkout, use the ``requirements-dev.txt`` 25 | to install the needed development components. 26 | 27 | ```bash 28 | python -m pip install -r requirements-dev.txt 29 | ``` 30 | 31 | The documentation uses PlantUML. 32 | See https://pypi.org/project/sphinxcontrib-plantuml/ 33 | and https://plantuml.com/running for more information. 34 | 35 | ## Demonstration 36 | 37 | To run the application, using the following command: 38 | 39 | ```bash 40 | python src/hello_world.py 41 | ``` 42 | 43 | ## Testing 44 | 45 | To show that it works, run the `tox` command. 46 | 47 | ```bash 48 | tox 49 | ``` 50 | 51 | ## Documentation 52 | 53 | To rebuild the documentation use Sphinx. 54 | 55 | ```bash 56 | cd docs 57 | make html 58 | ``` 59 | 60 | ## Building an Initial Python Environment 61 | 62 | One way to get started is to use Conda to build the environment. 63 | Conda can be downloaded via the Miniconda installer. 64 | See https://docs.conda.io/en/latest/miniconda.html 65 | 66 | Then the following conda commands will populate enough 67 | Python (and tools) to build an environment 68 | 69 | ```bash 70 | conda create -n projectbook --channel=conda-forge python=3.11 71 | conda activate projectbook 72 | conda install --channel=conda-forge pip-tools 73 | ``` 74 | 75 | When the conda environment is active, the name will be 76 | at the start of the prompt. 77 | It might look like this: 78 | 79 | ``` 80 | (projectbook) % 81 | ``` 82 | 83 | It might be more involved, depending on how much information is included 84 | in your prompt. 85 | 86 | With the `pip-compile` command, the list of required packages 87 | in the `pyproject.toml` can be turned into a complete list 88 | of packages to install. 89 | 90 | ```bash 91 | pip-compile --all-extras -o requirements-dev.txt 92 | pip install -r requirements-dev.txt 93 | ``` 94 | 95 | 96 | -------------------------------------------------------------------------------- /project_0/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile --all-extras --output-file=requirements-dev.txt 6 | # 7 | alabaster==0.7.13 8 | # via sphinx 9 | babel==2.12.1 10 | # via sphinx 11 | behave==1.2.6 12 | # via project-0 (pyproject.toml) 13 | build==0.10.0 14 | # via pip-tools 15 | cachetools==5.3.1 16 | # via tox 17 | certifi==2023.7.22 18 | # via requests 19 | chardet==5.2.0 20 | # via tox 21 | charset-normalizer==3.1.0 22 | # via requests 23 | click==8.1.3 24 | # via pip-tools 25 | colorama==0.4.6 26 | # via tox 27 | distlib==0.3.7 28 | # via virtualenv 29 | docutils==0.20.1 30 | # via sphinx 31 | filelock==3.12.2 32 | # via 33 | # tox 34 | # virtualenv 35 | idna==3.4 36 | # via requests 37 | imagesize==1.4.1 38 | # via sphinx 39 | iniconfig==2.0.0 40 | # via pytest 41 | jinja2==3.1.2 42 | # via sphinx 43 | markupsafe==2.1.3 44 | # via jinja2 45 | packaging==23.1 46 | # via 47 | # build 48 | # pyproject-api 49 | # pytest 50 | # sphinx 51 | # tox 52 | parse==1.19.0 53 | # via 54 | # behave 55 | # parse-type 56 | parse-type==0.6.0 57 | # via behave 58 | pip-tools==7.3.0 59 | # via project-0 (pyproject.toml) 60 | platformdirs==3.10.0 61 | # via 62 | # tox 63 | # virtualenv 64 | pluggy==1.2.0 65 | # via 66 | # pytest 67 | # tox 68 | pygments==2.15.1 69 | # via sphinx 70 | pyproject-api==1.5.4 71 | # via tox 72 | pyproject-hooks==1.0.0 73 | # via build 74 | pytest==7.4.0 75 | # via project-0 (pyproject.toml) 76 | requests==2.31.0 77 | # via sphinx 78 | six==1.16.0 79 | # via 80 | # behave 81 | # parse-type 82 | snowballstemmer==2.2.0 83 | # via sphinx 84 | sphinx==7.2.2 85 | # via 86 | # project-0 (pyproject.toml) 87 | # sphinxcontrib-plantuml 88 | sphinxcontrib-applehelp==1.0.4 89 | # via sphinx 90 | sphinxcontrib-devhelp==1.0.2 91 | # via sphinx 92 | sphinxcontrib-htmlhelp==2.0.1 93 | # via sphinx 94 | sphinxcontrib-jsmath==1.0.1 95 | # via sphinx 96 | sphinxcontrib-plantuml==0.25 97 | # via project-0 (pyproject.toml) 98 | sphinxcontrib-qthelp==1.0.3 99 | # via sphinx 100 | sphinxcontrib-serializinghtml==1.1.5 101 | # via sphinx 102 | tox==4.9.0 103 | # via project-0 (pyproject.toml) 104 | urllib3==2.0.7 105 | # via requests 106 | virtualenv==20.24.3 107 | # via tox 108 | wheel==0.40.0 109 | # via pip-tools 110 | 111 | # The following packages are considered to be unsafe in a requirements file: 112 | # pip 113 | # setuptools 114 | -------------------------------------------------------------------------------- /project_1_2/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile --all-extras --output-file=requirements-dev.txt 6 | # 7 | alabaster==0.7.13 8 | # via sphinx 9 | babel==2.12.1 10 | # via sphinx 11 | beautifulsoup4==4.11.1 12 | # via project-1-2 (pyproject.toml) 13 | behave==1.2.6 14 | # via project-1-2 (pyproject.toml) 15 | bottle==0.12.23 16 | # via project-1-2 (pyproject.toml) 17 | build==0.10.0 18 | # via pip-tools 19 | cachetools==5.3.1 20 | # via tox 21 | certifi==2023.7.22 22 | # via requests 23 | chardet==5.2.0 24 | # via tox 25 | charset-normalizer==2.1.1 26 | # via requests 27 | click==8.1.7 28 | # via pip-tools 29 | colorama==0.4.6 30 | # via tox 31 | distlib==0.3.7 32 | # via virtualenv 33 | docutils==0.20.1 34 | # via sphinx 35 | filelock==3.12.2 36 | # via 37 | # tox 38 | # virtualenv 39 | idna==3.4 40 | # via requests 41 | imagesize==1.4.1 42 | # via sphinx 43 | iniconfig==2.0.0 44 | # via pytest 45 | jinja2==3.1.2 46 | # via sphinx 47 | markupsafe==2.1.3 48 | # via jinja2 49 | packaging==23.1 50 | # via 51 | # build 52 | # pyproject-api 53 | # pytest 54 | # sphinx 55 | # tox 56 | parse==1.19.1 57 | # via 58 | # behave 59 | # parse-type 60 | parse-type==0.6.2 61 | # via behave 62 | pip-tools==7.3.0 63 | # via project-1-2 (pyproject.toml) 64 | platformdirs==3.10.0 65 | # via 66 | # tox 67 | # virtualenv 68 | pluggy==1.2.0 69 | # via 70 | # pytest 71 | # tox 72 | pygments==2.16.1 73 | # via sphinx 74 | pyproject-api==1.5.4 75 | # via tox 76 | pyproject-hooks==1.0.0 77 | # via build 78 | pytest==7.4.0 79 | # via project-1-2 (pyproject.toml) 80 | requests==2.28.1 81 | # via 82 | # project-1-2 (pyproject.toml) 83 | # sphinx 84 | six==1.16.0 85 | # via 86 | # behave 87 | # parse-type 88 | snowballstemmer==2.2.0 89 | # via sphinx 90 | soupsieve==2.4.1 91 | # via beautifulsoup4 92 | sphinx==7.2.2 93 | # via 94 | # project-1-2 (pyproject.toml) 95 | # sphinxcontrib-applehelp 96 | # sphinxcontrib-devhelp 97 | # sphinxcontrib-htmlhelp 98 | # sphinxcontrib-plantuml 99 | # sphinxcontrib-qthelp 100 | # sphinxcontrib-serializinghtml 101 | sphinxcontrib-applehelp==1.0.7 102 | # via sphinx 103 | sphinxcontrib-devhelp==1.0.5 104 | # via sphinx 105 | sphinxcontrib-htmlhelp==2.0.4 106 | # via sphinx 107 | sphinxcontrib-jsmath==1.0.1 108 | # via sphinx 109 | sphinxcontrib-plantuml==0.25 110 | # via project-1-2 (pyproject.toml) 111 | sphinxcontrib-qthelp==1.0.6 112 | # via sphinx 113 | sphinxcontrib-serializinghtml==1.1.8 114 | # via sphinx 115 | tox==4.9.0 116 | # via project-1-2 (pyproject.toml) 117 | urllib3==1.26.18 118 | # via requests 119 | virtualenv==20.24.3 120 | # via tox 121 | wheel==0.41.1 122 | # via pip-tools 123 | 124 | # The following packages are considered to be unsafe in a requirements file: 125 | # pip 126 | # setuptools 127 | -------------------------------------------------------------------------------- /project_1_2/tests/mock_kaggle_bottle.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.2: Acquire data from web service 4 | """ 5 | """ 6 | Example headers from datasets/list request 7 | 8 | Query: GET https://www.kaggle.com/api/v1/datasets/list?user=carlmcbrideellis&page=1 9 | 'Content-Type': 'application/json' 10 | 'Date': 'Thu, 26 Jan 2023 16:05:40 GMT' 11 | 'Access-Control-Allow-Credentials': 'true' 12 | 'Content-Encoding': 'br' 13 | 'Set-Cookie': 'ka_sessionid=12b3101f27c9e87d7199fcc570ae6bda; max-age=2626560; path=/, GCLB=CLzXifGh49P1QQ; path=/; HttpOnly' 14 | 'Transfer-Encoding': 'chunked' 15 | 'Vary': 'Accept-Encoding' 16 | 'Turbolinks-Location': 'https://www.kaggle.com/api/v1/datasets/list?user=carlmcbrideellis&page=1' 17 | 'X-Kaggle-MillisecondsElapsed': '364' 18 | 'X-Kaggle-RequestId': '6779bd23d387c911743e8ba9f4b657dd' 19 | 'X-Kaggle-ApiVersion': '1.5.12' 20 | 'X-Frame-Options': 'SAMEORIGIN' 21 | 'Strict-Transport-Security': 'max-age=63072000; includeSubDomains; preload' 22 | 'Content-Security-Policy': "object-src 'none'; script-src 'nonce-5ExpnWEdmsjwQc3OnS2Wug==' 'report-sample' 'unsafe-inline' 'unsafe-eval' 'strict- [...]" 23 | 'X-Content-Type-Options': 'nosniff' 24 | 'Referrer-Policy': 'strict-origin-when-cross-origin' 25 | 'Via': '1.1 google' 26 | 'Alt-Svc': 'h3=":443"; ma=2592000,h3-29=":443"; ma=2592000' 27 | 28 | """ 29 | import csv 30 | import io 31 | import json 32 | import zipfile 33 | from bottle import route, run, request, HTTPResponse 34 | 35 | @route('/api/v1/datasets/list') 36 | def datasets_list(name): 37 | page = request.query.page or '1' 38 | if page == '1': 39 | mock_body = [ 40 | {'title': 'example1'}, 41 | {'title': 'example2'} 42 | ] 43 | response = HTTPResponse( 44 | body=json.dumps(mock_body), 45 | status=200, 46 | headers={'Content-Type': 'application/json'} 47 | ) 48 | elif page == '2': 49 | response = HTTPResponse( 50 | status=429, 51 | headers={'Retry-After': '30'} 52 | ) 53 | else: 54 | response = HTTPResponse( 55 | body=json.dumps([]), 56 | status=200, 57 | headers={'Content-Type': 'application/json'} 58 | ) 59 | return response 60 | 61 | @route('/api/v1/datasets/download//') 62 | def datasets_download(ownerSlug, datasetSlug): 63 | if ownerSlug == "carlmcbrideellis" and datasetSlug == "data-anscombes-quartet": 64 | zip_content = io.BytesIO() 65 | with zipfile.ZipFile(zip_content, 'w') as archive: 66 | target_path = zipfile.Path(archive, 'Anscombe_quartet_data.csv') 67 | with target_path.open('w') as member_file: 68 | writer = csv.writer(member_file) 69 | writer.writerow(['mock', 'data']) 70 | writer.writerow(['line', 'two']) 71 | response = HTTPResponse( 72 | body=zip_content.getvalue(), 73 | status=200, 74 | headers={"Content-Type": "application/zip"} 75 | ) 76 | return response 77 | # All other requests... 78 | response = HTTPResponse( 79 | status=404 80 | ) 81 | return response 82 | 83 | if __name__ == "__main__": 84 | run(host='127.0.0.1', port=8080) 85 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Search — Project Zero 1.0 documentation 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 |
33 |
34 |
35 | 36 | 37 |
38 | 39 |

Search

40 | 41 | 49 | 50 | 51 |

52 | Searching for multiple words only shows matches that contain 53 | all words. 54 |

55 | 56 | 57 |
58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 |
66 | 67 |
68 | 69 | 70 |
71 | 72 |
73 |
74 | 110 |
111 |
112 | 120 | 121 | 122 | 123 | 124 | 125 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/py-modindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Python Module Index — Project Zero 1.0 documentation 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 28 | 29 | 30 | 31 | 32 | 33 |
34 |
35 |
36 | 37 | 38 |
39 | 40 | 41 |

Python Module Index

42 | 43 |
44 | h 45 |
46 | 47 |
Anscombe's quartet 28 |
Skip titlesIn th tags
Keep thisData
And thisData
48 | 49 | 51 | 52 | 53 | 56 |
 
50 | h
54 | hello_world 55 |
57 | 58 | 59 | 60 | 61 | 62 | 63 | 109 |
110 | 111 | 119 | 120 | 121 | 122 | 123 | 124 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/overview.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Overview — Project Zero 1.0 documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 |
30 |
31 |
32 | 33 | 34 |
35 | 36 |
37 |

Overview

38 |

This application writes a cheerful greeting.

39 |
40 | 41 | 42 |
43 | 44 |
45 |
46 | 94 |
95 |
96 | 107 | 108 | 109 | 110 | 111 | 112 | -------------------------------------------------------------------------------- /project_1_4/src/build_db.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.4: Local SQL Database 4 | """ 5 | import sqlite3 6 | try: 7 | import tomllib # type: ignore[import] 8 | except ImportError: 9 | # Python < 3.11 requires an install of toml==0.10.2 10 | import toml as tomllib # type: ignore[import] 11 | from pathlib import Path 12 | import csv 13 | from typing import Any 14 | from dataclasses import dataclass, asdict 15 | 16 | def execute_statements( 17 | connection: sqlite3.Connection, 18 | statements: list[str] | str) -> None: 19 | match statements: 20 | case list(str): 21 | pass 22 | case str: 23 | statements = [statements] 24 | 25 | for statement in statements: 26 | try: 27 | cursor = connection.execute(statement) 28 | # Queries have rows... 29 | for row in cursor: 30 | print(row) 31 | except sqlite3.OperationalError: 32 | print("FAILURE") 33 | print(statement) 34 | raise 35 | connection.commit() 36 | 37 | SERIES_ROWS = [ 38 | {"series_id": 1, "name": "Series I"}, 39 | {"series_id": 2, "name": "Series II"}, 40 | {"series_id": 3, "name": "Series III"}, 41 | {"series_id": 4, "name": "Series IV"}, 42 | ] 43 | 44 | @dataclass 45 | class XYPair: 46 | x: str 47 | y: str 48 | 49 | def series_1(row: dict[str, str]) -> XYPair: 50 | return XYPair( 51 | x=row['x123'], 52 | y=row['y1'], 53 | ) 54 | 55 | def series_2(row: dict[str, str]) -> XYPair: 56 | return XYPair( 57 | x=row['x123'], 58 | y=row['y2'], 59 | ) 60 | 61 | def series_3(row: dict[str, str]) -> XYPair: 62 | return XYPair( 63 | x=row['x123'], 64 | y=row['y3'], 65 | ) 66 | 67 | def series_4(row: dict[str, str]) -> XYPair: 68 | return XYPair( 69 | x=row['x4'], 70 | y=row['y4'] 71 | ) 72 | 73 | SERIES_BUILDERS = [ 74 | (1, series_1), 75 | (2, series_2), 76 | (3, series_3), 77 | (4, series_4) 78 | ] 79 | 80 | def load_values( 81 | connection: sqlite3.Connection, 82 | insert_values_SQL: str, 83 | reader: csv.DictReader) -> None: 84 | 85 | for sequence, row in enumerate(reader): 86 | for series_id, extractor in SERIES_BUILDERS: 87 | param_values = ( 88 | asdict(extractor(row)) | {"series_id": series_id, "sequence": sequence} 89 | ) 90 | connection.execute(insert_values_SQL, param_values) 91 | connection.commit() 92 | 93 | def schema_build_load( 94 | connection: sqlite3.Connection, 95 | config: dict[str, Any], 96 | data_source: Path 97 | ) -> None: 98 | execute_statements(connection, config['definition']['drop']) 99 | execute_statements(connection, config['definition']['create']) 100 | 101 | insert_series_SQL = config['manipulation']['insert_series'] 102 | for series in SERIES_ROWS: 103 | connection.execute(insert_series_SQL, series) 104 | connection.commit() 105 | 106 | insert_values_SQL = config['manipulation']['insert_values'] 107 | with data_source.open() as data_file: 108 | reader = csv.DictReader(data_file) 109 | load_values(connection, insert_values_SQL, reader) 110 | 111 | execute_statements(connection, config['query']['summary']) 112 | # execute_statements(connection, config['query']['detail']) 113 | 114 | def main(): 115 | config_path = Path.cwd() / "schema.toml" 116 | with config_path.open() as config_file: 117 | config = tomllib.load(config_file) 118 | 119 | data_path = Path.cwd().parent / "data" / "Anscombe_quartet_data.csv" 120 | with sqlite3.connect("file:example.db", uri=True) as connection: 121 | schema_build_load(connection, config, data_path) 122 | 123 | 124 | if __name__ == "__main__": 125 | main() 126 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Python Real-World Projects 2 | 3 | Python Real-World Projects 4 | 5 | This is the code repository for [Python Real-World Projects](https://www.packtpub.com/product/python-real-world-projects/9781803246765?utm_source=github&utm_medium=repository&utm_campaign=), published by Packt. 6 | 7 | **Craft your Python portfolio with deployable applications** 8 | 9 | ## What is this book about? 10 | Amplify your developer journey by curating a dynamic project portfolio that outshines traditional resumes. Delve into the Python realm through immersive projects, mastering core concepts while constructing comprehensive modules and applications. From data acquisition prowess to impactful data visualization, Python Real-World Projects arms you with essential skills to beat the competition. 11 | 12 | This book covers the following exciting features: 13 | * Explore core deliverables for an application including documentation and test cases 14 | * Discover approaches to data acquisition such as file processing, RESTful APIs, and SQL queries 15 | * Create a data inspection notebook to establish properties of source data 16 | * Write applications to validate, clean, convert, and normalize source data 17 | * Use foundational graphical analysis techniques to visualize data 18 | * Build basic univariate and multivariate statistical analysis tools 19 | * Create reports from raw data using JupyterLab publication tools 20 | 21 | If you feel this book is for you, get your [copy](https://www.amazon.com/dp/) today! 22 | 23 | https://www.packtpub.com/ 25 | 26 | ## Instructions and Navigations 27 | All of the code is organized into folders. For example, Chapter02. 28 | 29 | The code will look like the following: 30 | ``` 31 | print("Hello, World!") 32 | ``` 33 | 34 | **Following is what you need for this book:** 35 | This book is for beginner-to-intermediate level Python programmers looking to enhance their resume by adding a portfolio of 12 practical projects. A basic understanding of the Python language and its aligned technologies is a must. The book helps you polish your Python skills and project-building prowess without delving into basic Python fundamentals. 36 | 37 | With the following software and hardware list you can run all code files present in the book (Chapter 1-17). 38 | ### Software and Hardware List 39 | | Chapter | Software required | OS required | 40 | | -------- | ------------------------------------ | ----------------------------------- | 41 | | 1-17 | Python 3 | Windows, Mac OS X, and Linux (Any) | 42 | 43 | 44 | 45 | 46 | ### Related products 47 | * Causal Inference and Discovery in Python [[Packt]](https://www.packtpub.com/product/causal-inference-and-discovery-in-python/9781804612989?utm_source=github&utm_medium=repository&utm_campaign=) [[Amazon]](https://www.amazon.com/dp/1804612987) 48 | 49 | * Python for Geeks [[Packt]](https://www.packtpub.com/product/python-for-geeks/9781801070119?utm_source=github&utm_medium=repository&utm_campaign=) [[Amazon]](https://www.amazon.com/dp/1801070113) 50 | 51 | * Python Data Analysis - Third Edition [[Packt]](https://www.packtpub.com/product/python-data-analysis-third-edition/9781789955248?utm_source=github&utm_medium=repository&utm_campaign=) [[Amazon]](https://www.amazon.com/dp/1789955246) 52 | 53 | 54 | ## Get to Know the Author 55 | **Steven Lott** 56 | has been programming since computers were large, expensive, and rare. Working for decades in high tech has given him exposure to a lot of ideas and techniques, some bad, but most are helpful to others. Since the 1990s, Steven has been engaged with Python, crafting an array of indispensable tools and applications. His profound expertise has led him to contribute significantly to Packt Publishing, penning notable titles like "Mastering Object-Oriented," "The Modern Python Cookbook," and "Functional Python Programming." A self-proclaimed technomad, Steven's unconventional lifestyle sees him residing on a boat, often anchored along the vibrant east coast of the US. He tries to live by the words “Don’t come home until you have a story.” 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/design.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Design — Project Zero 1.0 documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 |
30 |
31 |
32 | 33 | 34 |
35 | 36 |
37 |

Design

38 |

The application has the following 39 | structure:

40 |

41 | class hello_world.greeting << (F, white) >> {
 42 | }
 43 | hide hello_world.greeting members
 44 | 
 45 | class hello_world.main << (F, white) >> {
 46 | }
 47 | hide hello_world.main members
 48 | 
 49 | class hello_world.get_options << (F, white) >> {
 50 | }
 51 | hide hello_world.get_options members
 52 | 
 53 | hello_world.main -> hello_world.get_options : "calls"
 54 | hello_world.main -> hello_world.greeting : "calls" 55 |

56 |

There are three functions that interact to produce 57 | the greeting.

58 |
59 | 60 | 61 |
62 | 63 |
64 |
65 | 113 |
114 |
115 | 126 | 127 | 128 | 129 | 130 | 131 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/genindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Index — Project Zero 1.0 documentation 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 |
27 |
28 |
29 | 30 | 31 |
32 | 33 | 34 |

Index

35 | 36 |
37 | G 38 | | H 39 | | M 40 | 41 |
42 |

G

43 | 44 | 48 | 52 |
53 | 54 |

H

55 | 56 | 65 |
    57 |
  • 58 | hello_world 59 | 60 |
  • 64 |
66 | 67 |

M

68 | 69 | 73 | 82 |
    74 |
  • 75 | module 76 | 77 |
  • 81 |
83 | 84 | 85 | 86 |
87 | 88 |
89 |
90 | 136 |
137 |
138 | 146 | 147 | 148 | 149 | 150 | 151 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Welcome to Project Zero’s documentation! — Project Zero 1.0 documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 |
29 |
30 |
31 | 32 | 33 |
34 | 35 |
36 |

Welcome to Project Zero’s documentation!

37 |
38 |

Contents:

39 | 44 |
45 |
46 |
47 |

Indices and tables

48 | 53 |
54 | 55 | 56 |
57 | 58 |
59 |
60 | 107 |
108 |
109 | 120 | 121 | 122 | 123 | 124 | 125 | -------------------------------------------------------------------------------- /project_1_2/tests/mock_kaggle_server.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.2: Acquire data from web service 4 | """ 5 | """ 6 | Example headers from datasets/list request 7 | 8 | Query: GET https://www.kaggle.com/api/v1/datasets/list?user=carlmcbrideellis&page=1 9 | 'Content-Type': 'application/json' 10 | 'Date': 'Thu, 26 Jan 2023 16:05:40 GMT' 11 | 'Access-Control-Allow-Credentials': 'true' 12 | 'Content-Encoding': 'br' 13 | 'Set-Cookie': 'ka_sessionid=12b3101f27c9e87d7199fcc570ae6bda; max-age=2626560; path=/, GCLB=CLzXifGh49P1QQ; path=/; HttpOnly' 14 | 'Transfer-Encoding': 'chunked' 15 | 'Vary': 'Accept-Encoding' 16 | 'Turbolinks-Location': 'https://www.kaggle.com/api/v1/datasets/list?user=carlmcbrideellis&page=1' 17 | 'X-Kaggle-MillisecondsElapsed': '364' 18 | 'X-Kaggle-RequestId': '6779bd23d387c911743e8ba9f4b657dd' 19 | 'X-Kaggle-ApiVersion': '1.5.12' 20 | 'X-Frame-Options': 'SAMEORIGIN' 21 | 'Strict-Transport-Security': 'max-age=63072000; includeSubDomains; preload' 22 | 'Content-Security-Policy': "object-src 'none'; script-src 'nonce-5ExpnWEdmsjwQc3OnS2Wug==' 'report-sample' 'unsafe-inline' 'unsafe-eval' 'strict- [...]" 23 | 'X-Content-Type-Options': 'nosniff' 24 | 'Referrer-Policy': 'strict-origin-when-cross-origin' 25 | 'Via': '1.1 google' 26 | 'Alt-Svc': 'h3=":443"; ma=2592000,h3-29=":443"; ma=2592000' 27 | 28 | """ 29 | import csv 30 | from http.server import HTTPServer, BaseHTTPRequestHandler 31 | import io 32 | import json 33 | from pathlib import Path 34 | import urllib.parse 35 | import zipfile 36 | 37 | class KaggleServer(BaseHTTPRequestHandler): 38 | """ 39 | See https://github.com/Kaggle/kaggle-api/blob/master/KaggleSwagger.yaml 40 | 41 | JSON datasets look at only a few attributes: title, ref, url, totalBytes 42 | """ 43 | def do_GET(self): 44 | if self.path.startswith("/api/v1/datasets/list"): 45 | parsed = urllib.parse.urlparse(self.path) 46 | query = urllib.parse.parse_qs(parsed.query) 47 | page = int(query.get('page', ['1'])[0]) 48 | if page == 1: 49 | self.send_response(200,'OK') 50 | self.send_header("Content-Type", "application/json") 51 | self.end_headers() 52 | str_content = json.dumps([{'title': 'example1'}, {'title': 'example2'}]) 53 | self.wfile.write(str_content.encode('utf-8')) 54 | elif page == 2: 55 | self.send_response(429, "Too Many Requests") 56 | self.send_header("Retry-After", "30") 57 | self.end_headers() 58 | else: 59 | # Final 60 | self.send_response(200, 'OK') 61 | self.send_header("Content-Type", "application/json") 62 | self.end_headers() 63 | self.wfile.write(b'[]') 64 | elif self.path.startswith("/api/v1/datasets/download"): 65 | parsed = urllib.parse.urlparse(self.path) 66 | path = parsed.path.split('/') 67 | if path[-2:] == ["carlmcbrideellis", "data-anscombes-quartet"]: 68 | self.send_response(200,'OK') 69 | self.send_header("Content-Type", "application/zip") 70 | self.end_headers() 71 | zip_content = io.BytesIO() 72 | with zipfile.ZipFile(zip_content, 'w') as archive: 73 | target_path = zipfile.Path(archive, 'Anscombe_quartet_data.csv') 74 | with target_path.open('w') as member_file: 75 | writer = csv.writer(member_file) 76 | writer.writerow(['mock', 'data']) 77 | writer.writerow(['line', 'two']) 78 | # write ZIP Archive to wfile 79 | self.wfile.write(zip_content.getvalue()) 80 | self.wfile.flush() 81 | self.close_connection = True 82 | else: 83 | self.send_error(404, f"Unknown dataset {path}") 84 | elif self.path.startswith("/api/v1/datasets/metadata"): 85 | parsed = urllib.parse.urlparse(self.path) 86 | path = parsed.path.split('/') 87 | if path[-2:] == ["carlmcbrideellis", "data-anscombes-quartet"]: 88 | self.send_header("Content-Type", "application/json") 89 | self.end_headers() 90 | str_content = json.dumps({"name": "Data: Anscombe's quartet"}) 91 | self.wfile.write(str_content.encode('utf-8')) 92 | else: 93 | self.send_error(404, f"Unknown dataset {path}") 94 | else: 95 | self.log_error("Unknown path: %s", self.path) 96 | self.send_error(404, f"Unknown path {self.path}") 97 | 98 | if __name__ == "__main__": 99 | server = HTTPServer(("127.0.0.1", 8080), KaggleServer) 100 | server.serve_forever() 101 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_static/_sphinx_javascript_frameworks_compat.js: -------------------------------------------------------------------------------- 1 | /* 2 | * _sphinx_javascript_frameworks_compat.js 3 | * ~~~~~~~~~~ 4 | * 5 | * Compatability shim for jQuery and underscores.js. 6 | * 7 | * WILL BE REMOVED IN Sphinx 6.0 8 | * xref RemovedInSphinx60Warning 9 | * 10 | */ 11 | 12 | /** 13 | * select a different prefix for underscore 14 | */ 15 | $u = _.noConflict(); 16 | 17 | 18 | /** 19 | * small helper function to urldecode strings 20 | * 21 | * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL 22 | */ 23 | jQuery.urldecode = function(x) { 24 | if (!x) { 25 | return x 26 | } 27 | return decodeURIComponent(x.replace(/\+/g, ' ')); 28 | }; 29 | 30 | /** 31 | * small helper function to urlencode strings 32 | */ 33 | jQuery.urlencode = encodeURIComponent; 34 | 35 | /** 36 | * This function returns the parsed url parameters of the 37 | * current request. Multiple values per key are supported, 38 | * it will always return arrays of strings for the value parts. 39 | */ 40 | jQuery.getQueryParameters = function(s) { 41 | if (typeof s === 'undefined') 42 | s = document.location.search; 43 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 44 | var result = {}; 45 | for (var i = 0; i < parts.length; i++) { 46 | var tmp = parts[i].split('=', 2); 47 | var key = jQuery.urldecode(tmp[0]); 48 | var value = jQuery.urldecode(tmp[1]); 49 | if (key in result) 50 | result[key].push(value); 51 | else 52 | result[key] = [value]; 53 | } 54 | return result; 55 | }; 56 | 57 | /** 58 | * highlight a given string on a jquery object by wrapping it in 59 | * span elements with the given class name. 60 | */ 61 | jQuery.fn.highlightText = function(text, className) { 62 | function highlight(node, addItems) { 63 | if (node.nodeType === 3) { 64 | var val = node.nodeValue; 65 | var pos = val.toLowerCase().indexOf(text); 66 | if (pos >= 0 && 67 | !jQuery(node.parentNode).hasClass(className) && 68 | !jQuery(node.parentNode).hasClass("nohighlight")) { 69 | var span; 70 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); 71 | if (isInSVG) { 72 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 73 | } else { 74 | span = document.createElement("span"); 75 | span.className = className; 76 | } 77 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 78 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 79 | document.createTextNode(val.substr(pos + text.length)), 80 | node.nextSibling)); 81 | node.nodeValue = val.substr(0, pos); 82 | if (isInSVG) { 83 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); 84 | var bbox = node.parentElement.getBBox(); 85 | rect.x.baseVal.value = bbox.x; 86 | rect.y.baseVal.value = bbox.y; 87 | rect.width.baseVal.value = bbox.width; 88 | rect.height.baseVal.value = bbox.height; 89 | rect.setAttribute('class', className); 90 | addItems.push({ 91 | "parent": node.parentNode, 92 | "target": rect}); 93 | } 94 | } 95 | } 96 | else if (!jQuery(node).is("button, select, textarea")) { 97 | jQuery.each(node.childNodes, function() { 98 | highlight(this, addItems); 99 | }); 100 | } 101 | } 102 | var addItems = []; 103 | var result = this.each(function() { 104 | highlight(this, addItems); 105 | }); 106 | for (var i = 0; i < addItems.length; ++i) { 107 | jQuery(addItems[i].parent).before(addItems[i].target); 108 | } 109 | return result; 110 | }; 111 | 112 | /* 113 | * backward compatibility for jQuery.browser 114 | * This will be supported until firefox bug is fixed. 115 | */ 116 | if (!jQuery.browser) { 117 | jQuery.uaMatch = function(ua) { 118 | ua = ua.toLowerCase(); 119 | 120 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 121 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 122 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 123 | /(msie) ([\w.]+)/.exec(ua) || 124 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 125 | []; 126 | 127 | return { 128 | browser: match[ 1 ] || "", 129 | version: match[ 2 ] || "0" 130 | }; 131 | }; 132 | jQuery.browser = {}; 133 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 134 | } 135 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_static/sphinx_highlight.js: -------------------------------------------------------------------------------- 1 | /* Highlighting utilities for Sphinx HTML documentation. */ 2 | "use strict"; 3 | 4 | const SPHINX_HIGHLIGHT_ENABLED = true 5 | 6 | /** 7 | * highlight a given string on a node by wrapping it in 8 | * span elements with the given class name. 9 | */ 10 | const _highlight = (node, addItems, text, className) => { 11 | if (node.nodeType === Node.TEXT_NODE) { 12 | const val = node.nodeValue; 13 | const parent = node.parentNode; 14 | const pos = val.toLowerCase().indexOf(text); 15 | if ( 16 | pos >= 0 && 17 | !parent.classList.contains(className) && 18 | !parent.classList.contains("nohighlight") 19 | ) { 20 | let span; 21 | 22 | const closestNode = parent.closest("body, svg, foreignObject"); 23 | const isInSVG = closestNode && closestNode.matches("svg"); 24 | if (isInSVG) { 25 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 26 | } else { 27 | span = document.createElement("span"); 28 | span.classList.add(className); 29 | } 30 | 31 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 32 | parent.insertBefore( 33 | span, 34 | parent.insertBefore( 35 | document.createTextNode(val.substr(pos + text.length)), 36 | node.nextSibling 37 | ) 38 | ); 39 | node.nodeValue = val.substr(0, pos); 40 | 41 | if (isInSVG) { 42 | const rect = document.createElementNS( 43 | "http://www.w3.org/2000/svg", 44 | "rect" 45 | ); 46 | const bbox = parent.getBBox(); 47 | rect.x.baseVal.value = bbox.x; 48 | rect.y.baseVal.value = bbox.y; 49 | rect.width.baseVal.value = bbox.width; 50 | rect.height.baseVal.value = bbox.height; 51 | rect.setAttribute("class", className); 52 | addItems.push({ parent: parent, target: rect }); 53 | } 54 | } 55 | } else if (node.matches && !node.matches("button, select, textarea")) { 56 | node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); 57 | } 58 | }; 59 | const _highlightText = (thisNode, text, className) => { 60 | let addItems = []; 61 | _highlight(thisNode, addItems, text, className); 62 | addItems.forEach((obj) => 63 | obj.parent.insertAdjacentElement("beforebegin", obj.target) 64 | ); 65 | }; 66 | 67 | /** 68 | * Small JavaScript module for the documentation. 69 | */ 70 | const SphinxHighlight = { 71 | 72 | /** 73 | * highlight the search words provided in localstorage in the text 74 | */ 75 | highlightSearchWords: () => { 76 | if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight 77 | 78 | // get and clear terms from localstorage 79 | const url = new URL(window.location); 80 | const highlight = 81 | localStorage.getItem("sphinx_highlight_terms") 82 | || url.searchParams.get("highlight") 83 | || ""; 84 | localStorage.removeItem("sphinx_highlight_terms") 85 | url.searchParams.delete("highlight"); 86 | window.history.replaceState({}, "", url); 87 | 88 | // get individual terms from highlight string 89 | const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); 90 | if (terms.length === 0) return; // nothing to do 91 | 92 | // There should never be more than one element matching "div.body" 93 | const divBody = document.querySelectorAll("div.body"); 94 | const body = divBody.length ? divBody[0] : document.querySelector("body"); 95 | window.setTimeout(() => { 96 | terms.forEach((term) => _highlightText(body, term, "highlighted")); 97 | }, 10); 98 | 99 | const searchBox = document.getElementById("searchbox"); 100 | if (searchBox === null) return; 101 | searchBox.appendChild( 102 | document 103 | .createRange() 104 | .createContextualFragment( 105 | '" 109 | ) 110 | ); 111 | }, 112 | 113 | /** 114 | * helper function to hide the search marks again 115 | */ 116 | hideSearchWords: () => { 117 | document 118 | .querySelectorAll("#searchbox .highlight-link") 119 | .forEach((el) => el.remove()); 120 | document 121 | .querySelectorAll("span.highlighted") 122 | .forEach((el) => el.classList.remove("highlighted")); 123 | localStorage.removeItem("sphinx_highlight_terms") 124 | }, 125 | 126 | initEscapeListener: () => { 127 | // only install a listener if it is really needed 128 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; 129 | 130 | document.addEventListener("keydown", (event) => { 131 | // bail for input elements 132 | if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; 133 | // bail with special keys 134 | if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; 135 | if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { 136 | SphinxHighlight.hideSearchWords(); 137 | event.preventDefault(); 138 | } 139 | }); 140 | }, 141 | }; 142 | 143 | _ready(SphinxHighlight.highlightSearchWords); 144 | _ready(SphinxHighlight.initEscapeListener); 145 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_static/pygments.css: -------------------------------------------------------------------------------- 1 | pre { line-height: 125%; } 2 | td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } 3 | span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } 4 | td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } 5 | span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } 6 | .highlight .hll { background-color: #ffffcc } 7 | .highlight { background: #f8f8f8; } 8 | .highlight .c { color: #8f5902; font-style: italic } /* Comment */ 9 | .highlight .err { color: #a40000; border: 1px solid #ef2929 } /* Error */ 10 | .highlight .g { color: #000000 } /* Generic */ 11 | .highlight .k { color: #004461; font-weight: bold } /* Keyword */ 12 | .highlight .l { color: #000000 } /* Literal */ 13 | .highlight .n { color: #000000 } /* Name */ 14 | .highlight .o { color: #582800 } /* Operator */ 15 | .highlight .x { color: #000000 } /* Other */ 16 | .highlight .p { color: #000000; font-weight: bold } /* Punctuation */ 17 | .highlight .ch { color: #8f5902; font-style: italic } /* Comment.Hashbang */ 18 | .highlight .cm { color: #8f5902; font-style: italic } /* Comment.Multiline */ 19 | .highlight .cp { color: #8f5902 } /* Comment.Preproc */ 20 | .highlight .cpf { color: #8f5902; font-style: italic } /* Comment.PreprocFile */ 21 | .highlight .c1 { color: #8f5902; font-style: italic } /* Comment.Single */ 22 | .highlight .cs { color: #8f5902; font-style: italic } /* Comment.Special */ 23 | .highlight .gd { color: #a40000 } /* Generic.Deleted */ 24 | .highlight .ge { color: #000000; font-style: italic } /* Generic.Emph */ 25 | .highlight .gr { color: #ef2929 } /* Generic.Error */ 26 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 27 | .highlight .gi { color: #00A000 } /* Generic.Inserted */ 28 | .highlight .go { color: #888888 } /* Generic.Output */ 29 | .highlight .gp { color: #745334 } /* Generic.Prompt */ 30 | .highlight .gs { color: #000000; font-weight: bold } /* Generic.Strong */ 31 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 32 | .highlight .gt { color: #a40000; font-weight: bold } /* Generic.Traceback */ 33 | .highlight .kc { color: #004461; font-weight: bold } /* Keyword.Constant */ 34 | .highlight .kd { color: #004461; font-weight: bold } /* Keyword.Declaration */ 35 | .highlight .kn { color: #004461; font-weight: bold } /* Keyword.Namespace */ 36 | .highlight .kp { color: #004461; font-weight: bold } /* Keyword.Pseudo */ 37 | .highlight .kr { color: #004461; font-weight: bold } /* Keyword.Reserved */ 38 | .highlight .kt { color: #004461; font-weight: bold } /* Keyword.Type */ 39 | .highlight .ld { color: #000000 } /* Literal.Date */ 40 | .highlight .m { color: #990000 } /* Literal.Number */ 41 | .highlight .s { color: #4e9a06 } /* Literal.String */ 42 | .highlight .na { color: #c4a000 } /* Name.Attribute */ 43 | .highlight .nb { color: #004461 } /* Name.Builtin */ 44 | .highlight .nc { color: #000000 } /* Name.Class */ 45 | .highlight .no { color: #000000 } /* Name.Constant */ 46 | .highlight .nd { color: #888888 } /* Name.Decorator */ 47 | .highlight .ni { color: #ce5c00 } /* Name.Entity */ 48 | .highlight .ne { color: #cc0000; font-weight: bold } /* Name.Exception */ 49 | .highlight .nf { color: #000000 } /* Name.Function */ 50 | .highlight .nl { color: #f57900 } /* Name.Label */ 51 | .highlight .nn { color: #000000 } /* Name.Namespace */ 52 | .highlight .nx { color: #000000 } /* Name.Other */ 53 | .highlight .py { color: #000000 } /* Name.Property */ 54 | .highlight .nt { color: #004461; font-weight: bold } /* Name.Tag */ 55 | .highlight .nv { color: #000000 } /* Name.Variable */ 56 | .highlight .ow { color: #004461; font-weight: bold } /* Operator.Word */ 57 | .highlight .pm { color: #000000; font-weight: bold } /* Punctuation.Marker */ 58 | .highlight .w { color: #f8f8f8; text-decoration: underline } /* Text.Whitespace */ 59 | .highlight .mb { color: #990000 } /* Literal.Number.Bin */ 60 | .highlight .mf { color: #990000 } /* Literal.Number.Float */ 61 | .highlight .mh { color: #990000 } /* Literal.Number.Hex */ 62 | .highlight .mi { color: #990000 } /* Literal.Number.Integer */ 63 | .highlight .mo { color: #990000 } /* Literal.Number.Oct */ 64 | .highlight .sa { color: #4e9a06 } /* Literal.String.Affix */ 65 | .highlight .sb { color: #4e9a06 } /* Literal.String.Backtick */ 66 | .highlight .sc { color: #4e9a06 } /* Literal.String.Char */ 67 | .highlight .dl { color: #4e9a06 } /* Literal.String.Delimiter */ 68 | .highlight .sd { color: #8f5902; font-style: italic } /* Literal.String.Doc */ 69 | .highlight .s2 { color: #4e9a06 } /* Literal.String.Double */ 70 | .highlight .se { color: #4e9a06 } /* Literal.String.Escape */ 71 | .highlight .sh { color: #4e9a06 } /* Literal.String.Heredoc */ 72 | .highlight .si { color: #4e9a06 } /* Literal.String.Interpol */ 73 | .highlight .sx { color: #4e9a06 } /* Literal.String.Other */ 74 | .highlight .sr { color: #4e9a06 } /* Literal.String.Regex */ 75 | .highlight .s1 { color: #4e9a06 } /* Literal.String.Single */ 76 | .highlight .ss { color: #4e9a06 } /* Literal.String.Symbol */ 77 | .highlight .bp { color: #3465a4 } /* Name.Builtin.Pseudo */ 78 | .highlight .fm { color: #000000 } /* Name.Function.Magic */ 79 | .highlight .vc { color: #000000 } /* Name.Variable.Class */ 80 | .highlight .vg { color: #000000 } /* Name.Variable.Global */ 81 | .highlight .vi { color: #000000 } /* Name.Variable.Instance */ 82 | .highlight .vm { color: #000000 } /* Name.Variable.Magic */ 83 | .highlight .il { color: #990000 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /project_0/docs/_build/html/api.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | API — Project Zero 1.0 documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 |
29 |
30 |
31 | 32 | 33 |
34 | 35 |
36 |

API

37 |

Python Real-World Projects 38 | Project Zero: A template for other projects

39 |
40 |
41 | hello_world.get_options(argv: list[str]) argparse.Namespace
42 |

Parse command-line

43 |
44 | 45 |
46 |
47 | hello_world.greeting(who: str = 'World') None
48 |

Write greeting.

49 |
50 | 51 |
52 |
53 | hello_world.main(argv: list[str] = ['-M', 'html', '.', '_build']) None
54 |

Get options and write greeting.

55 |
56 | 57 |
58 | 59 | 60 |
61 | 62 |
63 |
64 | 111 |
112 |
113 | 124 | 125 | 126 | 127 | 128 | 129 | -------------------------------------------------------------------------------- /project_1_2/src/kaggle_client.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Real-World Projects 3 | Project 1.2: Acquire data from web service 4 | """ 5 | from abc import ABC, abstractmethod 6 | from collections.abc import Iterator 7 | from dataclasses import dataclass 8 | import io 9 | import json 10 | import logging 11 | from pathlib import Path 12 | from pprint import pprint 13 | from textwrap import shorten 14 | import zipfile 15 | 16 | from bs4 import BeautifulSoup 17 | from model import RawData, XYPair 18 | 19 | # Option 1: Static Dependency 20 | # import requests 21 | # import requests.auth 22 | 23 | # Option 2: Dependency Injection 24 | import importlib 25 | # Can be changed for testing 26 | requests_name = "requests" 27 | requests = importlib.import_module(requests_name) 28 | 29 | 30 | class PairBuilder(ABC): 31 | target_class: type[RawData] 32 | 33 | @abstractmethod 34 | def from_row(self, row: list[str]) -> RawData: 35 | ... 36 | 37 | @dataclass 38 | class Dataset: 39 | title: str 40 | ref: str 41 | url: str 42 | totalBytes: int 43 | viewCount: int 44 | voteCount: int 45 | downloadCount: int 46 | usabilityRating: float 47 | 48 | class DSBuilder: 49 | target_class: type[Dataset] = Dataset 50 | 51 | def from_json(self, obj: dict[str, str]) -> "Dataset": 52 | return self.target_class( 53 | title=obj['title'], 54 | ref=obj['ref'], 55 | url=obj['url'], 56 | totalBytes=int(obj['totalBytes']), 57 | viewCount=int(obj['viewCount']), 58 | voteCount=int(obj['voteCount']), 59 | downloadCount=int(obj['downloadCount']), 60 | usabilityRating=int(obj['usabilityRating']), 61 | ) 62 | 63 | 64 | class RestAccess: 65 | logger: logging.Logger = logging.getLogger("RestAccess") 66 | 67 | def __init__(self, kaggle_doc: dict[str, str]) -> None: 68 | self.credentials = requests.auth.HTTPBasicAuth( 69 | kaggle_doc['username'], kaggle_doc['key'] 70 | ) 71 | 72 | @staticmethod 73 | def error_dump(prefix: str, response: requests.Response) -> None: 74 | RestAccess.logger.debug("%s %r", prefix, response) 75 | RestAccess.logger.debug("Query: %s %s", response.request.method, response.request.url) 76 | for k, v in response.headers.items(): 77 | RestAccess.logger.debug(" %r: %r", k, shorten(v, 128)) 78 | RestAccess.logger.debug(response.content) 79 | 80 | def get_paged_json( 81 | self, 82 | url: str, 83 | query: dict[str, str] | None = None 84 | ) -> Iterator[dict[str, str]]: 85 | """Paginated requests, e.g., datasets/list""" 86 | query = {} if query is None else query 87 | page = 1 88 | while True: 89 | response = requests.get( 90 | url, 91 | auth=self.credentials, 92 | params=query | {'page': str(page)}, 93 | headers={"Accept": "application/json"}, 94 | ) 95 | self.error_dump(f"{url}", response) 96 | if response.status_code == 200: 97 | details = response.json() 98 | if details: 99 | yield from iter(details) 100 | page += 1 101 | else: 102 | RestAccess.logger.info("Final Content: %r", response.content) 103 | break # No more data. 104 | else: 105 | # Unexpected response, e.g., a 429 Too Many Requests 106 | RestAccess.logger.error("Page %d", page) 107 | self.error_dump(f"UNEXPECTED", response) 108 | break 109 | 110 | def get_json(self, url: str, params: dict[str, str] | None = None) -> dict[str, any]: 111 | mime_type = "application/json" 112 | response = requests.get(url, auth=self.credentials, params=params, headers={"Accept": mime_type}) 113 | if response.status_code == 200: 114 | if response.headers.get('Content-Type', 'plain/text').startswith(mime_type): 115 | return response.json() 116 | else: 117 | self.error_dump(f"NOT {mime_type}", response) 118 | else: 119 | self.error_dump(f"UNEXPECTED", response) 120 | 121 | def get_html(self, url: str, params: dict[str, str] | None = None) -> bytes: 122 | mime_type = "text/html" 123 | response = requests.get(url, auth=self.credentials, params=params, headers={"Accept": mime_type}) 124 | if response.status_code == 200: 125 | if response.headers.get('Content-Type', 'plain/text').startswith(mime_type): 126 | return response.content 127 | self.error_dump(f"UNEXPECTED", response) 128 | 129 | def get_zip(self, url: str, params: dict[str, str] | None = None) -> zipfile.ZipFile: 130 | mime_type = "application/zip" 131 | response = requests.get(url, auth=self.credentials, params=params, headers={"Accept": mime_type}) 132 | if response.status_code == 200: 133 | self.error_dump(f"{url}", response) 134 | if response.headers.get('Content-Type', 'plain/text').startswith(mime_type): 135 | content = io.BytesIO(response.content) 136 | zip_file = zipfile.ZipFile(content) 137 | return zip_file 138 | else: 139 | self.error_dump(f"NOT {mime_type}", response) 140 | else: 141 | self.error_dump(f"UNEXPECTED", response) 142 | 143 | def close(self): 144 | pass 145 | 146 | class RestExtract: 147 | def __init__(self, builders: list[PairBuilder]) -> None: 148 | self.builders = builders 149 | 150 | def build_pairs(self, row: list[str]) -> list[RawData]: 151 | return [bldr.from_row(row) for bldr in self.builders] 152 | 153 | ### SPIKE SOLUTIONS 154 | 155 | def find_json() -> None: 156 | keypath = Path.home()/"Downloads"/"kaggle.json" 157 | with keypath.open() as keyfile: 158 | credentials = json.load(keyfile) 159 | reader = RestAccess(credentials) 160 | 161 | builder = DSBuilder() 162 | list_url = "https://www.kaggle.com/api/v1/datasets/list" 163 | query = {"filetype": "json", "maxSize": 1_000_000, "group": "public"} 164 | count = 0 165 | for row in reader.get_paged_json(list_url, query): 166 | count += 1 167 | ds = builder.from_json(row) 168 | if ds.usabilityRating > 0.5: 169 | print(ds) 170 | print(f"Found {count} datasets") 171 | 172 | def main(argv: list[str] | None = None) -> None: 173 | keypath = Path.home()/"Downloads"/"kaggle.json" 174 | with keypath.open() as keyfile: 175 | credentials = json.load(keyfile) 176 | reader = RestAccess(credentials) 177 | 178 | data_url = None 179 | data_ref = None 180 | list_url = "https://www.kaggle.com/api/v1/datasets/list" 181 | for row in reader.get_paged_json(list_url, {"user": "carlmcbrideellis"}): 182 | print(row['title'], row['ref'], row['url'], row['totalBytes']) 183 | if "Anscombe" in row['title']: 184 | data_ref = row['ref'] 185 | data_url = row['url'] 186 | currentVersionNumber = row['currentVersionNumber'] 187 | pprint(row) 188 | 189 | # An HTML page about the dataaset. 190 | # url = 'https://www.kaggle.com/datasets/carlmcbrideellis/data-anscombes-quartet' 191 | print() 192 | print(f"DATASET PAGE: {data_url}") 193 | soup = BeautifulSoup(reader.get_html(data_url), "html.parser") 194 | for script in soup.head.find_all("script"): 195 | if script.attrs.get("type") == "application/ld+json": 196 | print(script.attrs) 197 | content_object = json.loads(script.text) 198 | pprint(content_object) 199 | print() 200 | 201 | # Metadata 202 | # url = "https://www.kaggle.com/datasets/metadata/{ownerSlug}/{datasetSlug}" 203 | metadata_url = f'https://www.kaggle.com/api/v1/datasets/metadata/{data_ref}' 204 | print() 205 | print(f"METADATA PAGE: {data_url}") 206 | metadata_page = reader.get_json(metadata_url) 207 | pprint(metadata_page) 208 | print() 209 | 210 | # The downloadable data 211 | # From the metadata page... 212 | # HTML url = "https://www.kaggle.com/datasets/carlmcbrideellis/data-anscombes-quartet/download?datasetVersionNumber=1" 213 | # url = "https://www.kaggle.com/api/v1/datasets/download/{ownerSlug}/{datasetSlug} 214 | download_url = f'https://www.kaggle.com/api/v1/datasets/download/{data_ref}' 215 | 216 | series_1 = None # PairBuilder() 217 | extractor = RestExtract([series_1]) 218 | print() 219 | print(f"DATA: {download_url!r}") 220 | zip_data = reader.get_zip(download_url, params={"datasetVersionNumber": currentVersionNumber}) 221 | print(zip_data) 222 | print(zip_data.infolist()) 223 | with zip_data.open("Anscombe_quartet_data.csv") as quartet_data: 224 | for line in quartet_data: 225 | print(line) 226 | 227 | 228 | if __name__ == "__main__": 229 | logging.basicConfig(level=logging.INFO) 230 | main() 231 | # find_json() 232 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_static/language_data.js: -------------------------------------------------------------------------------- 1 | /* 2 | * language_data.js 3 | * ~~~~~~~~~~~~~~~~ 4 | * 5 | * This script contains the language-specific data used by searchtools.js, 6 | * namely the list of stopwords, stemmer, scorer and splitter. 7 | * 8 | * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. 9 | * :license: BSD, see LICENSE for details. 10 | * 11 | */ 12 | 13 | var stopwords = ["a","and","are","as","at","be","but","by","for","if","in","into","is","it","near","no","not","of","on","or","such","that","the","their","then","there","these","they","this","to","was","will","with"]; 14 | 15 | 16 | /* Non-minified version is copied as a separate JS file, is available */ 17 | 18 | /** 19 | * Porter Stemmer 20 | */ 21 | var Stemmer = function() { 22 | 23 | var step2list = { 24 | ational: 'ate', 25 | tional: 'tion', 26 | enci: 'ence', 27 | anci: 'ance', 28 | izer: 'ize', 29 | bli: 'ble', 30 | alli: 'al', 31 | entli: 'ent', 32 | eli: 'e', 33 | ousli: 'ous', 34 | ization: 'ize', 35 | ation: 'ate', 36 | ator: 'ate', 37 | alism: 'al', 38 | iveness: 'ive', 39 | fulness: 'ful', 40 | ousness: 'ous', 41 | aliti: 'al', 42 | iviti: 'ive', 43 | biliti: 'ble', 44 | logi: 'log' 45 | }; 46 | 47 | var step3list = { 48 | icate: 'ic', 49 | ative: '', 50 | alize: 'al', 51 | iciti: 'ic', 52 | ical: 'ic', 53 | ful: '', 54 | ness: '' 55 | }; 56 | 57 | var c = "[^aeiou]"; // consonant 58 | var v = "[aeiouy]"; // vowel 59 | var C = c + "[^aeiouy]*"; // consonant sequence 60 | var V = v + "[aeiou]*"; // vowel sequence 61 | 62 | var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 63 | var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 64 | var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 65 | var s_v = "^(" + C + ")?" + v; // vowel in stem 66 | 67 | this.stemWord = function (w) { 68 | var stem; 69 | var suffix; 70 | var firstch; 71 | var origword = w; 72 | 73 | if (w.length < 3) 74 | return w; 75 | 76 | var re; 77 | var re2; 78 | var re3; 79 | var re4; 80 | 81 | firstch = w.substr(0,1); 82 | if (firstch == "y") 83 | w = firstch.toUpperCase() + w.substr(1); 84 | 85 | // Step 1a 86 | re = /^(.+?)(ss|i)es$/; 87 | re2 = /^(.+?)([^s])s$/; 88 | 89 | if (re.test(w)) 90 | w = w.replace(re,"$1$2"); 91 | else if (re2.test(w)) 92 | w = w.replace(re2,"$1$2"); 93 | 94 | // Step 1b 95 | re = /^(.+?)eed$/; 96 | re2 = /^(.+?)(ed|ing)$/; 97 | if (re.test(w)) { 98 | var fp = re.exec(w); 99 | re = new RegExp(mgr0); 100 | if (re.test(fp[1])) { 101 | re = /.$/; 102 | w = w.replace(re,""); 103 | } 104 | } 105 | else if (re2.test(w)) { 106 | var fp = re2.exec(w); 107 | stem = fp[1]; 108 | re2 = new RegExp(s_v); 109 | if (re2.test(stem)) { 110 | w = stem; 111 | re2 = /(at|bl|iz)$/; 112 | re3 = new RegExp("([^aeiouylsz])\\1$"); 113 | re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 114 | if (re2.test(w)) 115 | w = w + "e"; 116 | else if (re3.test(w)) { 117 | re = /.$/; 118 | w = w.replace(re,""); 119 | } 120 | else if (re4.test(w)) 121 | w = w + "e"; 122 | } 123 | } 124 | 125 | // Step 1c 126 | re = /^(.+?)y$/; 127 | if (re.test(w)) { 128 | var fp = re.exec(w); 129 | stem = fp[1]; 130 | re = new RegExp(s_v); 131 | if (re.test(stem)) 132 | w = stem + "i"; 133 | } 134 | 135 | // Step 2 136 | re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; 137 | if (re.test(w)) { 138 | var fp = re.exec(w); 139 | stem = fp[1]; 140 | suffix = fp[2]; 141 | re = new RegExp(mgr0); 142 | if (re.test(stem)) 143 | w = stem + step2list[suffix]; 144 | } 145 | 146 | // Step 3 147 | re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; 148 | if (re.test(w)) { 149 | var fp = re.exec(w); 150 | stem = fp[1]; 151 | suffix = fp[2]; 152 | re = new RegExp(mgr0); 153 | if (re.test(stem)) 154 | w = stem + step3list[suffix]; 155 | } 156 | 157 | // Step 4 158 | re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; 159 | re2 = /^(.+?)(s|t)(ion)$/; 160 | if (re.test(w)) { 161 | var fp = re.exec(w); 162 | stem = fp[1]; 163 | re = new RegExp(mgr1); 164 | if (re.test(stem)) 165 | w = stem; 166 | } 167 | else if (re2.test(w)) { 168 | var fp = re2.exec(w); 169 | stem = fp[1] + fp[2]; 170 | re2 = new RegExp(mgr1); 171 | if (re2.test(stem)) 172 | w = stem; 173 | } 174 | 175 | // Step 5 176 | re = /^(.+?)e$/; 177 | if (re.test(w)) { 178 | var fp = re.exec(w); 179 | stem = fp[1]; 180 | re = new RegExp(mgr1); 181 | re2 = new RegExp(meq1); 182 | re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 183 | if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) 184 | w = stem; 185 | } 186 | re = /ll$/; 187 | re2 = new RegExp(mgr1); 188 | if (re.test(w) && re2.test(w)) { 189 | re = /.$/; 190 | w = w.replace(re,""); 191 | } 192 | 193 | // and turn initial Y back to y 194 | if (firstch == "y") 195 | w = firstch.toLowerCase() + w.substr(1); 196 | return w; 197 | } 198 | } 199 | 200 | 201 | 202 | 203 | var splitChars = (function() { 204 | var result = {}; 205 | var singles = [96, 180, 187, 191, 215, 247, 749, 885, 903, 907, 909, 930, 1014, 1648, 206 | 1748, 1809, 2416, 2473, 2481, 2526, 2601, 2609, 2612, 2615, 2653, 2702, 207 | 2706, 2729, 2737, 2740, 2857, 2865, 2868, 2910, 2928, 2948, 2961, 2971, 208 | 2973, 3085, 3089, 3113, 3124, 3213, 3217, 3241, 3252, 3295, 3341, 3345, 209 | 3369, 3506, 3516, 3633, 3715, 3721, 3736, 3744, 3748, 3750, 3756, 3761, 210 | 3781, 3912, 4239, 4347, 4681, 4695, 4697, 4745, 4785, 4799, 4801, 4823, 211 | 4881, 5760, 5901, 5997, 6313, 7405, 8024, 8026, 8028, 8030, 8117, 8125, 212 | 8133, 8181, 8468, 8485, 8487, 8489, 8494, 8527, 11311, 11359, 11687, 11695, 213 | 11703, 11711, 11719, 11727, 11735, 12448, 12539, 43010, 43014, 43019, 43587, 214 | 43696, 43713, 64286, 64297, 64311, 64317, 64319, 64322, 64325, 65141]; 215 | var i, j, start, end; 216 | for (i = 0; i < singles.length; i++) { 217 | result[singles[i]] = true; 218 | } 219 | var ranges = [[0, 47], [58, 64], [91, 94], [123, 169], [171, 177], [182, 184], [706, 709], 220 | [722, 735], [741, 747], [751, 879], [888, 889], [894, 901], [1154, 1161], 221 | [1318, 1328], [1367, 1368], [1370, 1376], [1416, 1487], [1515, 1519], [1523, 1568], 222 | [1611, 1631], [1642, 1645], [1750, 1764], [1767, 1773], [1789, 1790], [1792, 1807], 223 | [1840, 1868], [1958, 1968], [1970, 1983], [2027, 2035], [2038, 2041], [2043, 2047], 224 | [2070, 2073], [2075, 2083], [2085, 2087], [2089, 2307], [2362, 2364], [2366, 2383], 225 | [2385, 2391], [2402, 2405], [2419, 2424], [2432, 2436], [2445, 2446], [2449, 2450], 226 | [2483, 2485], [2490, 2492], [2494, 2509], [2511, 2523], [2530, 2533], [2546, 2547], 227 | [2554, 2564], [2571, 2574], [2577, 2578], [2618, 2648], [2655, 2661], [2672, 2673], 228 | [2677, 2692], [2746, 2748], [2750, 2767], [2769, 2783], [2786, 2789], [2800, 2820], 229 | [2829, 2830], [2833, 2834], [2874, 2876], [2878, 2907], [2914, 2917], [2930, 2946], 230 | [2955, 2957], [2966, 2968], [2976, 2978], [2981, 2983], [2987, 2989], [3002, 3023], 231 | [3025, 3045], [3059, 3076], [3130, 3132], [3134, 3159], [3162, 3167], [3170, 3173], 232 | [3184, 3191], [3199, 3204], [3258, 3260], [3262, 3293], [3298, 3301], [3312, 3332], 233 | [3386, 3388], [3390, 3423], [3426, 3429], [3446, 3449], [3456, 3460], [3479, 3481], 234 | [3518, 3519], [3527, 3584], [3636, 3647], [3655, 3663], [3674, 3712], [3717, 3718], 235 | [3723, 3724], [3726, 3731], [3752, 3753], [3764, 3772], [3774, 3775], [3783, 3791], 236 | [3802, 3803], [3806, 3839], [3841, 3871], [3892, 3903], [3949, 3975], [3980, 4095], 237 | [4139, 4158], [4170, 4175], [4182, 4185], [4190, 4192], [4194, 4196], [4199, 4205], 238 | [4209, 4212], [4226, 4237], [4250, 4255], [4294, 4303], [4349, 4351], [4686, 4687], 239 | [4702, 4703], [4750, 4751], [4790, 4791], [4806, 4807], [4886, 4887], [4955, 4968], 240 | [4989, 4991], [5008, 5023], [5109, 5120], [5741, 5742], [5787, 5791], [5867, 5869], 241 | [5873, 5887], [5906, 5919], [5938, 5951], [5970, 5983], [6001, 6015], [6068, 6102], 242 | [6104, 6107], [6109, 6111], [6122, 6127], [6138, 6159], [6170, 6175], [6264, 6271], 243 | [6315, 6319], [6390, 6399], [6429, 6469], [6510, 6511], [6517, 6527], [6572, 6592], 244 | [6600, 6607], [6619, 6655], [6679, 6687], [6741, 6783], [6794, 6799], [6810, 6822], 245 | [6824, 6916], [6964, 6980], [6988, 6991], [7002, 7042], [7073, 7085], [7098, 7167], 246 | [7204, 7231], [7242, 7244], [7294, 7400], [7410, 7423], [7616, 7679], [7958, 7959], 247 | [7966, 7967], [8006, 8007], [8014, 8015], [8062, 8063], [8127, 8129], [8141, 8143], 248 | [8148, 8149], [8156, 8159], [8173, 8177], [8189, 8303], [8306, 8307], [8314, 8318], 249 | [8330, 8335], [8341, 8449], [8451, 8454], [8456, 8457], [8470, 8472], [8478, 8483], 250 | [8506, 8507], [8512, 8516], [8522, 8525], [8586, 9311], [9372, 9449], [9472, 10101], 251 | [10132, 11263], [11493, 11498], [11503, 11516], [11518, 11519], [11558, 11567], 252 | [11622, 11630], [11632, 11647], [11671, 11679], [11743, 11822], [11824, 12292], 253 | [12296, 12320], [12330, 12336], [12342, 12343], [12349, 12352], [12439, 12444], 254 | [12544, 12548], [12590, 12592], [12687, 12689], [12694, 12703], [12728, 12783], 255 | [12800, 12831], [12842, 12880], [12896, 12927], [12938, 12976], [12992, 13311], 256 | [19894, 19967], [40908, 40959], [42125, 42191], [42238, 42239], [42509, 42511], 257 | [42540, 42559], [42592, 42593], [42607, 42622], [42648, 42655], [42736, 42774], 258 | [42784, 42785], [42889, 42890], [42893, 43002], [43043, 43055], [43062, 43071], 259 | [43124, 43137], [43188, 43215], [43226, 43249], [43256, 43258], [43260, 43263], 260 | [43302, 43311], [43335, 43359], [43389, 43395], [43443, 43470], [43482, 43519], 261 | [43561, 43583], [43596, 43599], [43610, 43615], [43639, 43641], [43643, 43647], 262 | [43698, 43700], [43703, 43704], [43710, 43711], [43715, 43738], [43742, 43967], 263 | [44003, 44015], [44026, 44031], [55204, 55215], [55239, 55242], [55292, 55295], 264 | [57344, 63743], [64046, 64047], [64110, 64111], [64218, 64255], [64263, 64274], 265 | [64280, 64284], [64434, 64466], [64830, 64847], [64912, 64913], [64968, 65007], 266 | [65020, 65135], [65277, 65295], [65306, 65312], [65339, 65344], [65371, 65381], 267 | [65471, 65473], [65480, 65481], [65488, 65489], [65496, 65497]]; 268 | for (i = 0; i < ranges.length; i++) { 269 | start = ranges[i][0]; 270 | end = ranges[i][1]; 271 | for (j = start; j <= end; j++) { 272 | result[j] = true; 273 | } 274 | } 275 | return result; 276 | })(); 277 | 278 | function splitQuery(query) { 279 | var result = []; 280 | var start = -1; 281 | for (var i = 0; i < query.length; i++) { 282 | if (splitChars[query.charCodeAt(i)]) { 283 | if (start !== -1) { 284 | result.push(query.slice(start, i)); 285 | start = -1; 286 | } 287 | } else if (start === -1) { 288 | start = i; 289 | } 290 | } 291 | if (start !== -1) { 292 | result.push(query.slice(start)); 293 | } 294 | return result; 295 | } 296 | 297 | 298 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Sphinx JavaScript utilities for all documentation. 6 | * 7 | * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /** 13 | * select a different prefix for underscore 14 | */ 15 | $u = _.noConflict(); 16 | 17 | /** 18 | * make the code below compatible with browsers without 19 | * an installed firebug like debugger 20 | if (!window.console || !console.firebug) { 21 | var names = ["log", "debug", "info", "warn", "error", "assert", "dir", 22 | "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", 23 | "profile", "profileEnd"]; 24 | window.console = {}; 25 | for (var i = 0; i < names.length; ++i) 26 | window.console[names[i]] = function() {}; 27 | } 28 | */ 29 | 30 | /** 31 | * small helper function to urldecode strings 32 | * 33 | * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL 34 | */ 35 | jQuery.urldecode = function(x) { 36 | if (!x) { 37 | return x 38 | } 39 | return decodeURIComponent(x.replace(/\+/g, ' ')); 40 | }; 41 | 42 | /** 43 | * small helper function to urlencode strings 44 | */ 45 | jQuery.urlencode = encodeURIComponent; 46 | 47 | /** 48 | * This function returns the parsed url parameters of the 49 | * current request. Multiple values per key are supported, 50 | * it will always return arrays of strings for the value parts. 51 | */ 52 | jQuery.getQueryParameters = function(s) { 53 | if (typeof s === 'undefined') 54 | s = document.location.search; 55 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 56 | var result = {}; 57 | for (var i = 0; i < parts.length; i++) { 58 | var tmp = parts[i].split('=', 2); 59 | var key = jQuery.urldecode(tmp[0]); 60 | var value = jQuery.urldecode(tmp[1]); 61 | if (key in result) 62 | result[key].push(value); 63 | else 64 | result[key] = [value]; 65 | } 66 | return result; 67 | }; 68 | 69 | /** 70 | * highlight a given string on a jquery object by wrapping it in 71 | * span elements with the given class name. 72 | */ 73 | jQuery.fn.highlightText = function(text, className) { 74 | function highlight(node, addItems) { 75 | if (node.nodeType === 3) { 76 | var val = node.nodeValue; 77 | var pos = val.toLowerCase().indexOf(text); 78 | if (pos >= 0 && 79 | !jQuery(node.parentNode).hasClass(className) && 80 | !jQuery(node.parentNode).hasClass("nohighlight")) { 81 | var span; 82 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); 83 | if (isInSVG) { 84 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 85 | } else { 86 | span = document.createElement("span"); 87 | span.className = className; 88 | } 89 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 90 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 91 | document.createTextNode(val.substr(pos + text.length)), 92 | node.nextSibling)); 93 | node.nodeValue = val.substr(0, pos); 94 | if (isInSVG) { 95 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); 96 | var bbox = node.parentElement.getBBox(); 97 | rect.x.baseVal.value = bbox.x; 98 | rect.y.baseVal.value = bbox.y; 99 | rect.width.baseVal.value = bbox.width; 100 | rect.height.baseVal.value = bbox.height; 101 | rect.setAttribute('class', className); 102 | addItems.push({ 103 | "parent": node.parentNode, 104 | "target": rect}); 105 | } 106 | } 107 | } 108 | else if (!jQuery(node).is("button, select, textarea")) { 109 | jQuery.each(node.childNodes, function() { 110 | highlight(this, addItems); 111 | }); 112 | } 113 | } 114 | var addItems = []; 115 | var result = this.each(function() { 116 | highlight(this, addItems); 117 | }); 118 | for (var i = 0; i < addItems.length; ++i) { 119 | jQuery(addItems[i].parent).before(addItems[i].target); 120 | } 121 | return result; 122 | }; 123 | 124 | /* 125 | * backward compatibility for jQuery.browser 126 | * This will be supported until firefox bug is fixed. 127 | */ 128 | if (!jQuery.browser) { 129 | jQuery.uaMatch = function(ua) { 130 | ua = ua.toLowerCase(); 131 | 132 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 133 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 134 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 135 | /(msie) ([\w.]+)/.exec(ua) || 136 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 137 | []; 138 | 139 | return { 140 | browser: match[ 1 ] || "", 141 | version: match[ 2 ] || "0" 142 | }; 143 | }; 144 | jQuery.browser = {}; 145 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 146 | } 147 | 148 | /** 149 | * Small JavaScript module for the documentation. 150 | */ 151 | var Documentation = { 152 | 153 | init : function() { 154 | this.fixFirefoxAnchorBug(); 155 | this.highlightSearchWords(); 156 | this.initIndexTable(); 157 | this.initOnKeyListeners(); 158 | }, 159 | 160 | /** 161 | * i18n support 162 | */ 163 | TRANSLATIONS : {}, 164 | PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; }, 165 | LOCALE : 'unknown', 166 | 167 | // gettext and ngettext don't access this so that the functions 168 | // can safely bound to a different name (_ = Documentation.gettext) 169 | gettext : function(string) { 170 | var translated = Documentation.TRANSLATIONS[string]; 171 | if (typeof translated === 'undefined') 172 | return string; 173 | return (typeof translated === 'string') ? translated : translated[0]; 174 | }, 175 | 176 | ngettext : function(singular, plural, n) { 177 | var translated = Documentation.TRANSLATIONS[singular]; 178 | if (typeof translated === 'undefined') 179 | return (n == 1) ? singular : plural; 180 | return translated[Documentation.PLURALEXPR(n)]; 181 | }, 182 | 183 | addTranslations : function(catalog) { 184 | for (var key in catalog.messages) 185 | this.TRANSLATIONS[key] = catalog.messages[key]; 186 | this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); 187 | this.LOCALE = catalog.locale; 188 | }, 189 | 190 | /** 191 | * add context elements like header anchor links 192 | */ 193 | addContextElements : function() { 194 | $('div[id] > :header:first').each(function() { 195 | $('\u00B6'). 196 | attr('href', '#' + this.id). 197 | attr('title', _('Permalink to this headline')). 198 | appendTo(this); 199 | }); 200 | $('dt[id]').each(function() { 201 | $('\u00B6'). 202 | attr('href', '#' + this.id). 203 | attr('title', _('Permalink to this definition')). 204 | appendTo(this); 205 | }); 206 | }, 207 | 208 | /** 209 | * workaround a firefox stupidity 210 | * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 211 | */ 212 | fixFirefoxAnchorBug : function() { 213 | if (document.location.hash && $.browser.mozilla) 214 | window.setTimeout(function() { 215 | document.location.href += ''; 216 | }, 10); 217 | }, 218 | 219 | /** 220 | * highlight the search words provided in the url in the text 221 | */ 222 | highlightSearchWords : function() { 223 | var params = $.getQueryParameters(); 224 | var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; 225 | if (terms.length) { 226 | var body = $('div.body'); 227 | if (!body.length) { 228 | body = $('body'); 229 | } 230 | window.setTimeout(function() { 231 | $.each(terms, function() { 232 | body.highlightText(this.toLowerCase(), 'highlighted'); 233 | }); 234 | }, 10); 235 | $('') 237 | .appendTo($('#searchbox')); 238 | } 239 | }, 240 | 241 | /** 242 | * init the domain index toggle buttons 243 | */ 244 | initIndexTable : function() { 245 | var togglers = $('img.toggler').click(function() { 246 | var src = $(this).attr('src'); 247 | var idnum = $(this).attr('id').substr(7); 248 | $('tr.cg-' + idnum).toggle(); 249 | if (src.substr(-9) === 'minus.png') 250 | $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); 251 | else 252 | $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); 253 | }).css('display', ''); 254 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { 255 | togglers.click(); 256 | } 257 | }, 258 | 259 | /** 260 | * helper function to hide the search marks again 261 | */ 262 | hideSearchWords : function() { 263 | $('#searchbox .highlight-link').fadeOut(300); 264 | $('span.highlighted').removeClass('highlighted'); 265 | var url = new URL(window.location); 266 | url.searchParams.delete('highlight'); 267 | window.history.replaceState({}, '', url); 268 | }, 269 | 270 | /** 271 | * helper function to focus on search bar 272 | */ 273 | focusSearchBar : function() { 274 | $('input[name=q]').first().focus(); 275 | }, 276 | 277 | /** 278 | * make the url absolute 279 | */ 280 | makeURL : function(relativeURL) { 281 | return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; 282 | }, 283 | 284 | /** 285 | * get the current relative url 286 | */ 287 | getCurrentURL : function() { 288 | var path = document.location.pathname; 289 | var parts = path.split(/\//); 290 | $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { 291 | if (this === '..') 292 | parts.pop(); 293 | }); 294 | var url = parts.join('/'); 295 | return path.substring(url.lastIndexOf('/') + 1, path.length - 1); 296 | }, 297 | 298 | initOnKeyListeners: function() { 299 | // only install a listener if it is really needed 300 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && 301 | !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) 302 | return; 303 | 304 | $(document).keydown(function(event) { 305 | var activeElementType = document.activeElement.tagName; 306 | // don't navigate when in search box, textarea, dropdown or button 307 | if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT' 308 | && activeElementType !== 'BUTTON') { 309 | if (event.altKey || event.ctrlKey || event.metaKey) 310 | return; 311 | 312 | if (!event.shiftKey) { 313 | switch (event.key) { 314 | case 'ArrowLeft': 315 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) 316 | break; 317 | var prevHref = $('link[rel="prev"]').prop('href'); 318 | if (prevHref) { 319 | window.location.href = prevHref; 320 | return false; 321 | } 322 | break; 323 | case 'ArrowRight': 324 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) 325 | break; 326 | var nextHref = $('link[rel="next"]').prop('href'); 327 | if (nextHref) { 328 | window.location.href = nextHref; 329 | return false; 330 | } 331 | break; 332 | case 'Escape': 333 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) 334 | break; 335 | Documentation.hideSearchWords(); 336 | return false; 337 | } 338 | } 339 | 340 | // some keyboard layouts may need Shift to get / 341 | switch (event.key) { 342 | case '/': 343 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) 344 | break; 345 | Documentation.focusSearchBar(); 346 | return false; 347 | } 348 | } 349 | }); 350 | } 351 | }; 352 | 353 | // quick alias for translations 354 | _ = Documentation.gettext; 355 | 356 | $(document).ready(function() { 357 | Documentation.init(); 358 | }); 359 | -------------------------------------------------------------------------------- /project_0/docs/_build/html/_static/alabaster.css: -------------------------------------------------------------------------------- 1 | @import url("basic.css"); 2 | 3 | /* -- page layout ----------------------------------------------------------- */ 4 | 5 | body { 6 | font-family: Georgia, serif; 7 | font-size: 17px; 8 | background-color: #fff; 9 | color: #000; 10 | margin: 0; 11 | padding: 0; 12 | } 13 | 14 | 15 | div.document { 16 | width: 940px; 17 | margin: 30px auto 0 auto; 18 | } 19 | 20 | div.documentwrapper { 21 | float: left; 22 | width: 100%; 23 | } 24 | 25 | div.bodywrapper { 26 | margin: 0 0 0 220px; 27 | } 28 | 29 | div.sphinxsidebar { 30 | width: 220px; 31 | font-size: 14px; 32 | line-height: 1.5; 33 | } 34 | 35 | hr { 36 | border: 1px solid #B1B4B6; 37 | } 38 | 39 | div.body { 40 | background-color: #fff; 41 | color: #3E4349; 42 | padding: 0 30px 0 30px; 43 | } 44 | 45 | div.body > .section { 46 | text-align: left; 47 | } 48 | 49 | div.footer { 50 | width: 940px; 51 | margin: 20px auto 30px auto; 52 | font-size: 14px; 53 | color: #888; 54 | text-align: right; 55 | } 56 | 57 | div.footer a { 58 | color: #888; 59 | } 60 | 61 | p.caption { 62 | font-family: inherit; 63 | font-size: inherit; 64 | } 65 | 66 | 67 | div.relations { 68 | display: none; 69 | } 70 | 71 | 72 | div.sphinxsidebar a { 73 | color: #444; 74 | text-decoration: none; 75 | border-bottom: 1px dotted #999; 76 | } 77 | 78 | div.sphinxsidebar a:hover { 79 | border-bottom: 1px solid #999; 80 | } 81 | 82 | div.sphinxsidebarwrapper { 83 | padding: 18px 10px; 84 | } 85 | 86 | div.sphinxsidebarwrapper p.logo { 87 | padding: 0; 88 | margin: -10px 0 0 0px; 89 | text-align: center; 90 | } 91 | 92 | div.sphinxsidebarwrapper h1.logo { 93 | margin-top: -10px; 94 | text-align: center; 95 | margin-bottom: 5px; 96 | text-align: left; 97 | } 98 | 99 | div.sphinxsidebarwrapper h1.logo-name { 100 | margin-top: 0px; 101 | } 102 | 103 | div.sphinxsidebarwrapper p.blurb { 104 | margin-top: 0; 105 | font-style: normal; 106 | } 107 | 108 | div.sphinxsidebar h3, 109 | div.sphinxsidebar h4 { 110 | font-family: Georgia, serif; 111 | color: #444; 112 | font-size: 24px; 113 | font-weight: normal; 114 | margin: 0 0 5px 0; 115 | padding: 0; 116 | } 117 | 118 | div.sphinxsidebar h4 { 119 | font-size: 20px; 120 | } 121 | 122 | div.sphinxsidebar h3 a { 123 | color: #444; 124 | } 125 | 126 | div.sphinxsidebar p.logo a, 127 | div.sphinxsidebar h3 a, 128 | div.sphinxsidebar p.logo a:hover, 129 | div.sphinxsidebar h3 a:hover { 130 | border: none; 131 | } 132 | 133 | div.sphinxsidebar p { 134 | color: #555; 135 | margin: 10px 0; 136 | } 137 | 138 | div.sphinxsidebar ul { 139 | margin: 10px 0; 140 | padding: 0; 141 | color: #000; 142 | } 143 | 144 | div.sphinxsidebar ul li.toctree-l1 > a { 145 | font-size: 120%; 146 | } 147 | 148 | div.sphinxsidebar ul li.toctree-l2 > a { 149 | font-size: 110%; 150 | } 151 | 152 | div.sphinxsidebar input { 153 | border: 1px solid #CCC; 154 | font-family: Georgia, serif; 155 | font-size: 1em; 156 | } 157 | 158 | div.sphinxsidebar hr { 159 | border: none; 160 | height: 1px; 161 | color: #AAA; 162 | background: #AAA; 163 | 164 | text-align: left; 165 | margin-left: 0; 166 | width: 50%; 167 | } 168 | 169 | div.sphinxsidebar .badge { 170 | border-bottom: none; 171 | } 172 | 173 | div.sphinxsidebar .badge:hover { 174 | border-bottom: none; 175 | } 176 | 177 | /* To address an issue with donation coming after search */ 178 | div.sphinxsidebar h3.donation { 179 | margin-top: 10px; 180 | } 181 | 182 | /* -- body styles ----------------------------------------------------------- */ 183 | 184 | a { 185 | color: #004B6B; 186 | text-decoration: underline; 187 | } 188 | 189 | a:hover { 190 | color: #6D4100; 191 | text-decoration: underline; 192 | } 193 | 194 | div.body h1, 195 | div.body h2, 196 | div.body h3, 197 | div.body h4, 198 | div.body h5, 199 | div.body h6 { 200 | font-family: Georgia, serif; 201 | font-weight: normal; 202 | margin: 30px 0px 10px 0px; 203 | padding: 0; 204 | } 205 | 206 | div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } 207 | div.body h2 { font-size: 180%; } 208 | div.body h3 { font-size: 150%; } 209 | div.body h4 { font-size: 130%; } 210 | div.body h5 { font-size: 100%; } 211 | div.body h6 { font-size: 100%; } 212 | 213 | a.headerlink { 214 | color: #DDD; 215 | padding: 0 4px; 216 | text-decoration: none; 217 | } 218 | 219 | a.headerlink:hover { 220 | color: #444; 221 | background: #EAEAEA; 222 | } 223 | 224 | div.body p, div.body dd, div.body li { 225 | line-height: 1.4em; 226 | } 227 | 228 | div.admonition { 229 | margin: 20px 0px; 230 | padding: 10px 30px; 231 | background-color: #EEE; 232 | border: 1px solid #CCC; 233 | } 234 | 235 | div.admonition tt.xref, div.admonition code.xref, div.admonition a tt { 236 | background-color: #FBFBFB; 237 | border-bottom: 1px solid #fafafa; 238 | } 239 | 240 | div.admonition p.admonition-title { 241 | font-family: Georgia, serif; 242 | font-weight: normal; 243 | font-size: 24px; 244 | margin: 0 0 10px 0; 245 | padding: 0; 246 | line-height: 1; 247 | } 248 | 249 | div.admonition p.last { 250 | margin-bottom: 0; 251 | } 252 | 253 | div.highlight { 254 | background-color: #fff; 255 | } 256 | 257 | dt:target, .highlight { 258 | background: #FAF3E8; 259 | } 260 | 261 | div.warning { 262 | background-color: #FCC; 263 | border: 1px solid #FAA; 264 | } 265 | 266 | div.danger { 267 | background-color: #FCC; 268 | border: 1px solid #FAA; 269 | -moz-box-shadow: 2px 2px 4px #D52C2C; 270 | -webkit-box-shadow: 2px 2px 4px #D52C2C; 271 | box-shadow: 2px 2px 4px #D52C2C; 272 | } 273 | 274 | div.error { 275 | background-color: #FCC; 276 | border: 1px solid #FAA; 277 | -moz-box-shadow: 2px 2px 4px #D52C2C; 278 | -webkit-box-shadow: 2px 2px 4px #D52C2C; 279 | box-shadow: 2px 2px 4px #D52C2C; 280 | } 281 | 282 | div.caution { 283 | background-color: #FCC; 284 | border: 1px solid #FAA; 285 | } 286 | 287 | div.attention { 288 | background-color: #FCC; 289 | border: 1px solid #FAA; 290 | } 291 | 292 | div.important { 293 | background-color: #EEE; 294 | border: 1px solid #CCC; 295 | } 296 | 297 | div.note { 298 | background-color: #EEE; 299 | border: 1px solid #CCC; 300 | } 301 | 302 | div.tip { 303 | background-color: #EEE; 304 | border: 1px solid #CCC; 305 | } 306 | 307 | div.hint { 308 | background-color: #EEE; 309 | border: 1px solid #CCC; 310 | } 311 | 312 | div.seealso { 313 | background-color: #EEE; 314 | border: 1px solid #CCC; 315 | } 316 | 317 | div.topic { 318 | background-color: #EEE; 319 | } 320 | 321 | p.admonition-title { 322 | display: inline; 323 | } 324 | 325 | p.admonition-title:after { 326 | content: ":"; 327 | } 328 | 329 | pre, tt, code { 330 | font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; 331 | font-size: 0.9em; 332 | } 333 | 334 | .hll { 335 | background-color: #FFC; 336 | margin: 0 -12px; 337 | padding: 0 12px; 338 | display: block; 339 | } 340 | 341 | img.screenshot { 342 | } 343 | 344 | tt.descname, tt.descclassname, code.descname, code.descclassname { 345 | font-size: 0.95em; 346 | } 347 | 348 | tt.descname, code.descname { 349 | padding-right: 0.08em; 350 | } 351 | 352 | img.screenshot { 353 | -moz-box-shadow: 2px 2px 4px #EEE; 354 | -webkit-box-shadow: 2px 2px 4px #EEE; 355 | box-shadow: 2px 2px 4px #EEE; 356 | } 357 | 358 | table.docutils { 359 | border: 1px solid #888; 360 | -moz-box-shadow: 2px 2px 4px #EEE; 361 | -webkit-box-shadow: 2px 2px 4px #EEE; 362 | box-shadow: 2px 2px 4px #EEE; 363 | } 364 | 365 | table.docutils td, table.docutils th { 366 | border: 1px solid #888; 367 | padding: 0.25em 0.7em; 368 | } 369 | 370 | table.field-list, table.footnote { 371 | border: none; 372 | -moz-box-shadow: none; 373 | -webkit-box-shadow: none; 374 | box-shadow: none; 375 | } 376 | 377 | table.footnote { 378 | margin: 15px 0; 379 | width: 100%; 380 | border: 1px solid #EEE; 381 | background: #FDFDFD; 382 | font-size: 0.9em; 383 | } 384 | 385 | table.footnote + table.footnote { 386 | margin-top: -15px; 387 | border-top: none; 388 | } 389 | 390 | table.field-list th { 391 | padding: 0 0.8em 0 0; 392 | } 393 | 394 | table.field-list td { 395 | padding: 0; 396 | } 397 | 398 | table.field-list p { 399 | margin-bottom: 0.8em; 400 | } 401 | 402 | /* Cloned from 403 | * https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68 404 | */ 405 | .field-name { 406 | -moz-hyphens: manual; 407 | -ms-hyphens: manual; 408 | -webkit-hyphens: manual; 409 | hyphens: manual; 410 | } 411 | 412 | table.footnote td.label { 413 | width: .1px; 414 | padding: 0.3em 0 0.3em 0.5em; 415 | } 416 | 417 | table.footnote td { 418 | padding: 0.3em 0.5em; 419 | } 420 | 421 | dl { 422 | margin: 0; 423 | padding: 0; 424 | } 425 | 426 | dl dd { 427 | margin-left: 30px; 428 | } 429 | 430 | blockquote { 431 | margin: 0 0 0 30px; 432 | padding: 0; 433 | } 434 | 435 | ul, ol { 436 | /* Matches the 30px from the narrow-screen "li > ul" selector below */ 437 | margin: 10px 0 10px 30px; 438 | padding: 0; 439 | } 440 | 441 | pre { 442 | background: #EEE; 443 | padding: 7px 30px; 444 | margin: 15px 0px; 445 | line-height: 1.3em; 446 | } 447 | 448 | div.viewcode-block:target { 449 | background: #ffd; 450 | } 451 | 452 | dl pre, blockquote pre, li pre { 453 | margin-left: 0; 454 | padding-left: 30px; 455 | } 456 | 457 | tt, code { 458 | background-color: #ecf0f3; 459 | color: #222; 460 | /* padding: 1px 2px; */ 461 | } 462 | 463 | tt.xref, code.xref, a tt { 464 | background-color: #FBFBFB; 465 | border-bottom: 1px solid #fff; 466 | } 467 | 468 | a.reference { 469 | text-decoration: none; 470 | border-bottom: 1px dotted #004B6B; 471 | } 472 | 473 | /* Don't put an underline on images */ 474 | a.image-reference, a.image-reference:hover { 475 | border-bottom: none; 476 | } 477 | 478 | a.reference:hover { 479 | border-bottom: 1px solid #6D4100; 480 | } 481 | 482 | a.footnote-reference { 483 | text-decoration: none; 484 | font-size: 0.7em; 485 | vertical-align: top; 486 | border-bottom: 1px dotted #004B6B; 487 | } 488 | 489 | a.footnote-reference:hover { 490 | border-bottom: 1px solid #6D4100; 491 | } 492 | 493 | a:hover tt, a:hover code { 494 | background: #EEE; 495 | } 496 | 497 | 498 | @media screen and (max-width: 870px) { 499 | 500 | div.sphinxsidebar { 501 | display: none; 502 | } 503 | 504 | div.document { 505 | width: 100%; 506 | 507 | } 508 | 509 | div.documentwrapper { 510 | margin-left: 0; 511 | margin-top: 0; 512 | margin-right: 0; 513 | margin-bottom: 0; 514 | } 515 | 516 | div.bodywrapper { 517 | margin-top: 0; 518 | margin-right: 0; 519 | margin-bottom: 0; 520 | margin-left: 0; 521 | } 522 | 523 | ul { 524 | margin-left: 0; 525 | } 526 | 527 | li > ul { 528 | /* Matches the 30px from the "ul, ol" selector above */ 529 | margin-left: 30px; 530 | } 531 | 532 | .document { 533 | width: auto; 534 | } 535 | 536 | .footer { 537 | width: auto; 538 | } 539 | 540 | .bodywrapper { 541 | margin: 0; 542 | } 543 | 544 | .footer { 545 | width: auto; 546 | } 547 | 548 | .github { 549 | display: none; 550 | } 551 | 552 | 553 | 554 | } 555 | 556 | 557 | 558 | @media screen and (max-width: 875px) { 559 | 560 | body { 561 | margin: 0; 562 | padding: 20px 30px; 563 | } 564 | 565 | div.documentwrapper { 566 | float: none; 567 | background: #fff; 568 | } 569 | 570 | div.sphinxsidebar { 571 | display: block; 572 | float: none; 573 | width: 102.5%; 574 | margin: 50px -30px -20px -30px; 575 | padding: 10px 20px; 576 | background: #333; 577 | color: #FFF; 578 | } 579 | 580 | div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, 581 | div.sphinxsidebar h3 a { 582 | color: #fff; 583 | } 584 | 585 | div.sphinxsidebar a { 586 | color: #AAA; 587 | } 588 | 589 | div.sphinxsidebar p.logo { 590 | display: none; 591 | } 592 | 593 | div.document { 594 | width: 100%; 595 | margin: 0; 596 | } 597 | 598 | div.footer { 599 | display: none; 600 | } 601 | 602 | div.bodywrapper { 603 | margin: 0; 604 | } 605 | 606 | div.body { 607 | min-height: 0; 608 | padding: 0; 609 | } 610 | 611 | .rtd_doc_footer { 612 | display: none; 613 | } 614 | 615 | .document { 616 | width: auto; 617 | } 618 | 619 | .footer { 620 | width: auto; 621 | } 622 | 623 | .footer { 624 | width: auto; 625 | } 626 | 627 | .github { 628 | display: none; 629 | } 630 | } 631 | 632 | 633 | /* misc. */ 634 | 635 | .revsys-inline { 636 | display: none!important; 637 | } 638 | 639 | /* Make nested-list/multi-paragraph items look better in Releases changelog 640 | * pages. Without this, docutils' magical list fuckery causes inconsistent 641 | * formatting between different release sub-lists. 642 | */ 643 | div#changelog > div.section > ul > li > p:only-child { 644 | margin-bottom: 0; 645 | } 646 | 647 | /* Hide fugly table cell borders in ..bibliography:: directive output */ 648 | table.docutils.citation, table.docutils.citation td, table.docutils.citation th { 649 | border: none; 650 | /* Below needed in some edge cases; if not applied, bottom shadows appear */ 651 | -moz-box-shadow: none; 652 | -webkit-box-shadow: none; 653 | box-shadow: none; 654 | } 655 | 656 | 657 | /* relbar */ 658 | 659 | .related { 660 | line-height: 30px; 661 | width: 100%; 662 | font-size: 0.9rem; 663 | } 664 | 665 | .related.top { 666 | border-bottom: 1px solid #EEE; 667 | margin-bottom: 20px; 668 | } 669 | 670 | .related.bottom { 671 | border-top: 1px solid #EEE; 672 | } 673 | 674 | .related ul { 675 | padding: 0; 676 | margin: 0; 677 | list-style: none; 678 | } 679 | 680 | .related li { 681 | display: inline; 682 | } 683 | 684 | nav#rellinks { 685 | float: right; 686 | } 687 | 688 | nav#rellinks li+li:before { 689 | content: "|"; 690 | } 691 | 692 | nav#breadcrumbs li+li:before { 693 | content: "\00BB"; 694 | } 695 | 696 | /* Hide certain items when printing */ 697 | @media print { 698 | div.related { 699 | display: none; 700 | } 701 | } --------------------------------------------------------------------------------