├── Chapter 11
├── 01 - Anatomy of Python package
│ ├── LICENSE
│ ├── MANIFEST.in
│ ├── README.md
│ ├── CHANGELOG.md
│ ├── tests
│ │ ├── __init__.py
│ │ └── conftest.py
│ ├── package_name
│ │ └── __init__.py
│ ├── setup.py
│ └── setup.cfg
├── 03 - Package scripts and entry points
│ ├── scripts
│ │ └── findimports
│ ├── setup.py
│ └── findimports.py
├── 02 - Namespace packages
│ ├── setup.py
│ └── acme
│ │ └── templating
│ │ └── __init__.py
└── 04 - Convenient configuration handling
│ ├── plain_config.py
│ └── environ_config.py
├── Chapter 2
├── 01 - Writing your first Dockerfile
│ ├── requirements.txt
│ ├── Dockerfile
│ └── echo.py
├── 02 - Setting up complex environments
│ ├── requirements.txt
│ ├── Dockerfile
│ ├── echo.py
│ └── docker-compose.yml
├── 03 - Reducing the size of containers
│ ├── requirements.txt
│ ├── Dockerfile
│ └── echo.py
├── 05 - Communicating between Docker Compose environments
│ ├── requirements.txt
│ ├── Dockerfile
│ ├── echo.py
│ ├── docker-compose.other.yml
│ └── docker-compose.yml
├── 04 - Addressing services inside of a Docker Compose environment
│ ├── requirements.txt
│ ├── Dockerfile
│ ├── docker-compose.yml
│ └── echo.py
├── 06 - Delaying code start up until service ports are open
│ ├── requirements.txt
│ ├── Dockerfile
│ ├── echo.py
│ └── docker-compose.yml
├── 07 - Adding live reload for absolutely any code
│ ├── requirements.txt
│ ├── Dockerfile
│ ├── docker-compose.yml
│ └── echo.py
├── 09 - Custom Python shells
│ └── pythonstartup.py
└── 08 - Virtual development environments using Vagrant
│ └── Vagrantfile
├── Chapter 4
├── 03 - Class instance initialization
│ ├── aggregator_independent.py
│ └── aggregator_shared.py
├── 06 - Dunder methods
│ ├── stub.cpp
│ ├── matrices.py
│ ├── matrices_with_scalars.py
│ └── matrices_with_singledistpatch.py
├── 02 - Multiple iheritance and method resolution order
│ ├── mro.py
│ └── C3.txt
├── 08 - Dataclasses
│ ├── vector_dataclasses.py
│ └── vector.py
├── 05 - Real-life example - lazily evaluated attributes
│ ├── lazy_property.py
│ ├── lazily_evaluated.py
│ └── lazy_class_attribute.py
├── 07 - Single dispatch
│ └── dispatch.py
├── 04 - Descriptors
│ └── reveal_access.py
└── 01 - Accessing super classes
│ └── caseinsensitive.py
├── Chapter 10
├── 06 - Test coverage
│ ├── setup.cfg
│ ├── docker-compose.yml
│ ├── README
│ ├── interfaces.py
│ ├── backends.py
│ └── test_backends.py
├── 04 - Using fakes
│ ├── docker-compose.yml
│ ├── README
│ ├── interfaces.py
│ ├── acme_fakes.py
│ ├── acme_sdk.py
│ ├── backends.py
│ └── test_backends.py
├── 03 - Pytests fixtures
│ ├── docker-compose.yml
│ ├── README
│ ├── interfaces.py
│ ├── test_fixtures.py
│ ├── backends.py
│ └── test_backends.py
├── 07 - Mutation testing
│ ├── setup.cfg
│ ├── test_primes.py
│ ├── primes.py
│ └── test_primes_after_fixes.py
├── 02 - Test parametrization
│ ├── setup.cfg
│ ├── batch.py
│ └── test_batch.py
├── 01 - Writing tests with pytest
│ ├── batch.py
│ ├── batch_1st_iteration.py
│ └── test_batch.py
├── 08 - Faking realistic data values
│ ├── mailer.py
│ └── test_mailer.py
├── 05 - Mocks and unittest.mock module
│ ├── mailer.py
│ └── test_mailer.py
└── 09 - Faking time
│ └── test_time.py
├── Chapter 9
├── 04 - Cython as a source-to-source compiler
│ ├── .gitignore
│ ├── fibonacci.py
│ └── setup.py
├── 02 - Pure C extensions
│ ├── setup.py
│ └── fibonacci.c
├── 03 - Exception handling
│ ├── setup.py
│ └── fibonacci.c
├── 01 - Writing extensions
│ └── fibonacci.py
├── 05 - Cython as a language
│ ├── fibonacci.pyx
│ └── setup.py
└── 06 - Calling C functions using ctypes
│ └── qsort.py
├── Chapter 8
├── 06 - Hy
│ └── hyllo.hy
├── 02 - Intercepting class instance creation process
│ ├── xlist.py
│ └── instance_counting.py
├── 05 - Falcons compiled router
│ └── api.py
├── 03 - Metaclass usage
│ └── case_user.py
├── 01 - One step deeper: class decorators
│ ├── autorepr.py
│ └── autorepr_subclassed.py
└── 04 - Using __init__subclass__ method as alternative to metaclasses
│ ├── autorepr.py
│ └── autorepr_with_init_subclass.py
├── Chapter 3
├── 03 - Positional only parameters
│ ├── cat3.py
│ ├── cat2.py
│ └── cat.py
├── 06 - Development mode
│ └── crasher.py
├── 07 - Structural pattern matching
│ ├── fizzbuzz.py
│ ├── platforms.py
│ ├── points.py
│ └── positional_points.py
├── 02 - Assignment expressions
│ ├── user_literal2.py
│ ├── user_literal.py
│ ├── findimports2.py
│ └── findimports.py
├── 04 - graphlib module
│ └── migrations.py
├── 05 - Module-level __getattr__ and __dir__ functions
│ └── import_warnings.py
└── 01 - ChainMap from collections module
│ └── user_maps.py
├── .gitignore
├── Chapter 5
├── 04 - Inversion of control in applications
│ ├── index.html
│ └── tracking.py
├── 05 - Inversion of control in applications p.2
│ ├── Dockerfile
│ ├── docker-compose.yml
│ ├── interfaces.py
│ ├── backends.py
│ └── tracking.py
├── 06 - Using dependency injection frameworks
│ ├── docker-compose.yml
│ ├── Dockerfile
│ ├── interfaces.py
│ ├── di.py
│ ├── backends.py
│ └── tracking.py
├── 02 - Using function annotations and abstract base classes
│ ├── dummy_interface.py
│ ├── colliders_abc.py
│ └── colliders_subclasshooks.py
├── 01 - A bit of history: zope.interface
│ ├── colliders_simple.py
│ ├── colliders_interfaces.py
│ └── colliders_invariants.py
└── 03 - Interfaces though type annotations
│ └── colliders_protocol.py
├── Chapter 6
├── 01 - What is multithreading
│ ├── start_new_thread.py
│ ├── start_new_threads.py
│ ├── thread_visits.py
│ └── thread_safe_visits.py
├── 13 - A practical example of asynchronous programming
│ ├── asyncrates.py
│ └── async_aiohttp.py
├── 12 - Python async and await keywords
│ ├── waiters.py
│ ├── async_print.py
│ └── waiters_await.py
├── 09 - The built-in multiprocessing module
│ ├── sharedctypes.py
│ ├── basic_multiprocessing.py
│ └── pipes.py
├── 08 - Multiprocessing
│ └── forks.py
├── 02 - An example of a threaded application
│ └── synchronous.py
├── 10 - Using process pools
│ └── process_pools.py
├── 03 - Using one thread per item
│ └── one_thread_per_item.py
├── 11 - Using multiprocessing.dummy as the multithreading interface
│ └── multiprocessing_dummy.py
├── 14 - Integrating non-asynchronous code with async using futures
│ └── async_futures.py
├── 04 - Using a thread pool
│ └── thread_pool.py
├── 05 - Using two-way queues
│ └── two_way_queues.py
├── 06 - Dealing with errors in threads
│ └── error_handling.py
└── 07 - Throttling
│ └── throttling.py
├── Chapter 12
├── 01 - Python logging essentials
│ └── basic_logging.py
├── 05 - Using Prometheus
│ ├── Dockerfile
│ ├── interfaces.py
│ ├── prometheus.yml
│ ├── di.py
│ ├── docker-compose.yml
│ ├── backends.py
│ └── tracking.py
├── 03 - Logging configuration
│ ├── acme_logger.py
│ ├── logging.conf
│ ├── configuration_file.py
│ └── configuration_dict.py
├── 06 - Distributed tracing with Jaeger
│ ├── interfaces.py
│ ├── prometheus.yml
│ ├── Dockerfile
│ ├── di.py
│ ├── docker-compose.yml
│ ├── backends.py
│ └── tracking.py
├── 04 - Capturing errors for later review
│ └── sentry_example.py
└── 02 - Logging system components
│ └── logging_handlers.py
├── Chapter 13
├── 01 - Macro-profiling
│ └── myapp.py
├── 04 - Memoization
│ └── fibonacci.py
├── 03 - Using objgraph module
│ └── myapp.py
└── 02 - Micro-profiling
│ └── myapp.py
├── README.md
├── Chapter 7
├── 01 - Event-driven programming in GUIs
│ └── tk_zen.py
├── 03 - Callback-based style
│ └── tk_zen_binding.py
├── 02 - Event-driven communication
│ └── flask_zen.py
├── 05 - Topic-based style
│ └── topic_based_events.py
└── 04 - Subject-based style
│ └── observers.py
├── requirements.txt
└── LICENSE
/Chapter 11/01 - Anatomy of Python package/LICENSE:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/Chapter 11/01 - Anatomy of Python package/MANIFEST.in:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/Chapter 11/01 - Anatomy of Python package/README.md:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/Chapter 11/01 - Anatomy of Python package/CHANGELOG.md:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/Chapter 11/01 - Anatomy of Python package/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/Chapter 11/01 - Anatomy of Python package/tests/conftest.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/Chapter 11/01 - Anatomy of Python package/package_name/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/Chapter 2/01 - Writing your first Dockerfile/requirements.txt:
--------------------------------------------------------------------------------
1 | flask==1.1.2
--------------------------------------------------------------------------------
/Chapter 2/02 - Setting up complex environments/requirements.txt:
--------------------------------------------------------------------------------
1 | flask==1.1.2
--------------------------------------------------------------------------------
/Chapter 2/03 - Reducing the size of containers/requirements.txt:
--------------------------------------------------------------------------------
1 | flask==1.1.2
--------------------------------------------------------------------------------
/Chapter 4/03 - Class instance initialization/aggregator_independent.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/Chapter 10/06 - Test coverage/setup.cfg:
--------------------------------------------------------------------------------
1 | [coverage:run]
2 | source =
3 | .
4 |
--------------------------------------------------------------------------------
/Chapter 9/04 - Cython as a source-to-source compiler/.gitignore:
--------------------------------------------------------------------------------
1 | fibonacci.c
2 |
--------------------------------------------------------------------------------
/Chapter 8/06 - Hy/hyllo.hy:
--------------------------------------------------------------------------------
1 | ;; hyllo.hy
2 | (defn hello [] (print "hello world!"))
3 |
--------------------------------------------------------------------------------
/Chapter 2/05 - Communicating between Docker Compose environments/requirements.txt:
--------------------------------------------------------------------------------
1 | flask==1.1.2
--------------------------------------------------------------------------------
/Chapter 2/04 - Addressing services inside of a Docker Compose environment/requirements.txt:
--------------------------------------------------------------------------------
1 | flask==1.1.2
--------------------------------------------------------------------------------
/Chapter 11/03 - Package scripts and entry points/scripts/findimports:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 | python -m findimports
3 |
--------------------------------------------------------------------------------
/Chapter 2/06 - Delaying code start up until service ports are open/requirements.txt:
--------------------------------------------------------------------------------
1 | flask==1.1.2
2 | wait-for-it==2.1.1
--------------------------------------------------------------------------------
/Chapter 2/07 - Adding live reload for absolutely any code/requirements.txt:
--------------------------------------------------------------------------------
1 | flask==1.1.2
2 | watchdog[watchmedo]==0.10.3
--------------------------------------------------------------------------------
/Chapter 3/03 - Positional only parameters/cat3.py:
--------------------------------------------------------------------------------
1 | def cat(*items, delim: str):
2 | return delim.join(items)
3 |
--------------------------------------------------------------------------------
/Chapter 11/01 - Anatomy of Python package/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | setup(
4 | name="mypackage",
5 | )
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | .venv
3 | *.egg-info
4 | *.so
5 | build/
6 | .mutmut-cache
7 | .pytest_cache
8 | .coverage
9 | htmlcov
10 |
--------------------------------------------------------------------------------
/Chapter 11/01 - Anatomy of Python package/setup.cfg:
--------------------------------------------------------------------------------
1 | [global]
2 | quiet=1
3 |
4 | [sdist]
5 | formats=zip,tar
6 |
7 | [bdist_wheel]
8 | universal=1
9 |
--------------------------------------------------------------------------------
/Chapter 10/04 - Using fakes/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 |
3 | services:
4 | redis:
5 | image: redis
6 | ports:
7 | - 6379:6379
8 |
--------------------------------------------------------------------------------
/Chapter 10/06 - Test coverage/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 |
3 | services:
4 | redis:
5 | image: redis
6 | ports:
7 | - 6379:6379
8 |
--------------------------------------------------------------------------------
/Chapter 5/04 - Inversion of control in applications/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/Chapter 10/03 - Pytests fixtures/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 |
3 | services:
4 | redis:
5 | image: redis
6 | ports:
7 | - 6379:6379
8 |
--------------------------------------------------------------------------------
/Chapter 11/02 - Namespace packages/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 |
4 | setup(
5 | name="acme.templating",
6 | packages=["acme.templating"],
7 | )
8 |
--------------------------------------------------------------------------------
/Chapter 10/04 - Using fakes/README:
--------------------------------------------------------------------------------
1 | To execute tests in this directory please run redis locally.
2 | You can use `docker-compose up` command to start redis as a
3 | docker container.
4 |
--------------------------------------------------------------------------------
/Chapter 10/06 - Test coverage/README:
--------------------------------------------------------------------------------
1 | To execute tests in this directory please run redis locally.
2 | You can use `docker-compose up` command to start redis as a
3 | docker container.
4 |
--------------------------------------------------------------------------------
/Chapter 10/07 - Mutation testing/setup.cfg:
--------------------------------------------------------------------------------
1 | [mutmut]
2 | paths_to_mutate=primes.py
3 | backup=False
4 | runner=python -m pytest -x --ignore test_primes_after_fixes.py
5 | tests_dir=.
6 |
--------------------------------------------------------------------------------
/Chapter 3/06 - Development mode/crasher.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | sys.setrecursionlimit(1 << 30)
4 |
5 |
6 | def crasher():
7 | return crasher()
8 |
9 |
10 | crasher()
11 |
--------------------------------------------------------------------------------
/Chapter 10/02 - Test parametrization/setup.cfg:
--------------------------------------------------------------------------------
1 | [mutmut]
2 | paths_to_mutate=batch.py
3 | backup=False
4 | runner=python -m pytest -x
5 | tests_dir=.
6 | dict_synonyms=Struct, NamedStruct
7 |
--------------------------------------------------------------------------------
/Chapter 10/03 - Pytests fixtures/README:
--------------------------------------------------------------------------------
1 | To execute tests in this directory please run redis locally.
2 | You can use `docker-compose up` command to start redis as a
3 | docker container.
4 |
--------------------------------------------------------------------------------
/Chapter 3/03 - Positional only parameters/cat2.py:
--------------------------------------------------------------------------------
1 | def cat(a: str, b: str, /, *, delim: str):
2 | return delim.join([a, b])
3 |
4 |
5 | if __name__ == "__main__":
6 | cat("John", "Doe", delim=" ")
7 |
--------------------------------------------------------------------------------
/Chapter 9/02 - Pure C extensions/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, Extension
2 |
3 | setup(
4 | name="fibonacci",
5 | ext_modules=[
6 | Extension("fibonacci", ["fibonacci.c"]),
7 | ],
8 | )
9 |
--------------------------------------------------------------------------------
/Chapter 9/03 - Exception handling/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, Extension
2 |
3 | setup(
4 | name="fibonacci",
5 | ext_modules=[
6 | Extension("fibonacci", ["fibonacci.c"]),
7 | ],
8 | )
9 |
--------------------------------------------------------------------------------
/Chapter 11/02 - Namespace packages/acme/templating/__init__.py:
--------------------------------------------------------------------------------
1 | # version as tuple for simple comparisons
2 | VERSION = (0, 0, 1)
3 | # string created from tuple to avoid inconsistency
4 | __version__ = ".".join([str(x) for x in VERSION])
5 |
--------------------------------------------------------------------------------
/Chapter 5/05 - Inversion of control in applications p.2/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.9-slim
2 | WORKDIR app
3 |
4 | RUN pip install \
5 | Flask==1.1.2 \
6 | redis==3.5.3
7 |
8 |
9 | ADD *.py ./
10 | CMD python3 tracking.py --reload
--------------------------------------------------------------------------------
/Chapter 3/07 - Structural pattern matching/fizzbuzz.py:
--------------------------------------------------------------------------------
1 | for i in range(100):
2 | match (i % 3, i % 5):
3 | case (0, 0): print("FizzBuzz")
4 | case (0, _): print("Fizz")
5 | case (_, 0): print("Buzz")
6 | case _: print(i)
7 |
--------------------------------------------------------------------------------
/Chapter 11/03 - Package scripts and entry points/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | setup(
4 | name="findimports",
5 | version="0.0.0",
6 | py_modules=["findimports"],
7 | entry_points={"console_scripts": ["findimports=findimports:main"]},
8 | )
9 |
--------------------------------------------------------------------------------
/Chapter 3/03 - Positional only parameters/cat.py:
--------------------------------------------------------------------------------
1 | def cat(a: str, b: str, delim: str):
2 | return delim.join([a, b])
3 |
4 |
5 | if __name__ == "__main__":
6 | cat("John", "Doe", " ")
7 | cat(a="John", b="Doe", delim=" ")
8 | cat("John", "Doe", delim=" ")
9 |
--------------------------------------------------------------------------------
/Chapter 4/03 - Class instance initialization/aggregator_shared.py:
--------------------------------------------------------------------------------
1 | class Aggregator:
2 | all_aggregated = []
3 | last_aggregated = None
4 |
5 | def aggregate(self, value):
6 | self.last_aggregated = value
7 | self.all_aggregated.append(value)
8 |
--------------------------------------------------------------------------------
/Chapter 5/05 - Inversion of control in applications p.2/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 |
3 | services:
4 | app:
5 | build:
6 | context: .
7 | ports:
8 | - 8000:8000
9 | volumes:
10 | - ".:/app/"
11 |
12 | redis:
13 | image: redis
--------------------------------------------------------------------------------
/Chapter 5/06 - Using dependency injection frameworks/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 |
3 | services:
4 | app:
5 | build:
6 | context: .
7 | ports:
8 | - 8000:8000
9 | volumes:
10 | - ".:/app/"
11 |
12 | redis:
13 | image: redis
--------------------------------------------------------------------------------
/Chapter 10/07 - Mutation testing/test_primes.py:
--------------------------------------------------------------------------------
1 | from primes import is_prime
2 |
3 |
4 | def test_primes_true():
5 | assert is_prime(5)
6 | assert is_prime(7)
7 |
8 |
9 | def test_primes_false():
10 | assert not is_prime(4)
11 | assert not is_prime(8)
12 |
--------------------------------------------------------------------------------
/Chapter 5/06 - Using dependency injection frameworks/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.9-slim
2 | WORKDIR app
3 |
4 | RUN pip install \
5 | Flask==1.1.2 \
6 | redis==3.5.3 \
7 | 'Werkzeug<2.0.0' \
8 | Flask_Injector==0.12.3
9 |
10 |
11 | ADD *.py ./
12 | CMD python3 tracking.py --reload
13 |
--------------------------------------------------------------------------------
/Chapter 6/01 - What is multithreading/start_new_thread.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 |
3 |
4 | def my_function():
5 | print("printing from thread")
6 |
7 |
8 | if __name__ == "__main__":
9 | thread = Thread(target=my_function)
10 | thread.start()
11 | thread.join()
12 |
--------------------------------------------------------------------------------
/Chapter 12/01 - Python logging essentials/basic_logging.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logger = logging.getLogger("my_logger")
4 | logging.basicConfig()
5 |
6 | logger.error("This is error message")
7 | logger.warning("This is warning message")
8 | logger.log(logging.CRITICAL, "This is critical message")
9 |
--------------------------------------------------------------------------------
/Chapter 12/05 - Using Prometheus/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.9-slim
2 | WORKDIR app
3 |
4 | RUN pip install \
5 | Flask==1.1.2 \
6 | redis==3.5.3 \
7 | Flask_Injector==0.12.3 \
8 | 'Werkzeug<2.0.0' \
9 | prometheus-client==0.10.1
10 |
11 |
12 | ADD *.py ./
13 | CMD python3 tracking.py --reload
14 |
--------------------------------------------------------------------------------
/Chapter 4/06 - Dunder methods/stub.cpp:
--------------------------------------------------------------------------------
1 | Matrix operator+(const Matrix& lhs, const Matrix& rhs)
2 | Matrix operator+(const Matrix& lhs, const int& rhs)
3 | Matrix operator+(const Matrix& lhs, const float& rhs)
4 | Matrix operator+(const int& lhs, const Matrix& rhs)
5 | Matrix operator+(const float& lhs, const Matrix& rhs)
6 |
--------------------------------------------------------------------------------
/Chapter 2/01 - Writing your first Dockerfile/Dockerfile:
--------------------------------------------------------------------------------
1 | # The "slim" versions are sensible starting
2 | # points for other lightweight Python-based images
3 | FROM python:3.9-slim
4 | WORKDIR /app/
5 |
6 | COPY requirements.txt .
7 | RUN pip install -r requirements.txt
8 |
9 | COPY echo.py .
10 | CMD ["python", "echo.py"]
11 |
--------------------------------------------------------------------------------
/Chapter 2/02 - Setting up complex environments/Dockerfile:
--------------------------------------------------------------------------------
1 | # The "slim" versions are sensible starting
2 | # points for other lightweight Python-based images
3 | FROM python:3.9-slim
4 | WORKDIR /app/
5 |
6 | COPY requirements.txt .
7 | RUN pip install -r requirements.txt
8 |
9 | COPY echo.py .
10 | CMD ["python", "echo.py"]
11 |
--------------------------------------------------------------------------------
/Chapter 8/02 - Intercepting class instance creation process/xlist.py:
--------------------------------------------------------------------------------
1 | from collections import UserList
2 |
3 |
4 | class XList(UserList):
5 | @classmethod
6 | def double(cls, iterable):
7 | return cls(iterable) * 2
8 |
9 | @classmethod
10 | def tripple(cls, iterable):
11 | return cls(iterable) * 3
12 |
--------------------------------------------------------------------------------
/Chapter 10/04 - Using fakes/interfaces.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from typing import Dict
3 |
4 |
5 | class ViewsStorageBackend(ABC):
6 | @abstractmethod
7 | def increment(self, key: str):
8 | ...
9 |
10 | @abstractmethod
11 | def most_common(self, n: int) -> Dict[str, int]:
12 | ...
13 |
--------------------------------------------------------------------------------
/Chapter 10/06 - Test coverage/interfaces.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from typing import Dict
3 |
4 |
5 | class ViewsStorageBackend(ABC):
6 | @abstractmethod
7 | def increment(self, key: str):
8 | ...
9 |
10 | @abstractmethod
11 | def most_common(self, n: int) -> Dict[str, int]:
12 | ...
13 |
--------------------------------------------------------------------------------
/Chapter 10/07 - Mutation testing/primes.py:
--------------------------------------------------------------------------------
1 | def is_prime(number):
2 | if not isinstance(number, int) or number < 0:
3 | return False
4 |
5 | if number in (0, 1):
6 | return False
7 |
8 | for element in range(2, number):
9 | if number % element == 0:
10 | return False
11 | return True
12 |
--------------------------------------------------------------------------------
/Chapter 2/03 - Reducing the size of containers/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM alpine:3.13
2 | WORKDIR /app/
3 |
4 | RUN apk add --no-cache python3
5 |
6 | COPY requirements.txt .
7 |
8 | RUN apk add --no-cache py3-pip && \
9 | pip3 install -r requirements.txt && \
10 | apk del py3-pip
11 |
12 | COPY echo.py .
13 | CMD ["python", "echo.py"]
14 |
--------------------------------------------------------------------------------
/Chapter 2/07 - Adding live reload for absolutely any code/Dockerfile:
--------------------------------------------------------------------------------
1 | # The "slim" versions are sensible starting
2 | # points for other lightweight Python-based images
3 | FROM python:3.9-slim
4 | WORKDIR /app/
5 |
6 | COPY requirements.txt .
7 | RUN pip install -r requirements.txt
8 |
9 | COPY echo.py .
10 | CMD ["python", "echo.py"]
11 |
--------------------------------------------------------------------------------
/Chapter 10/03 - Pytests fixtures/interfaces.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from typing import Dict
3 |
4 |
5 | class ViewsStorageBackend(ABC):
6 | @abstractmethod
7 | def increment(self, key: str):
8 | ...
9 |
10 | @abstractmethod
11 | def most_common(self, n: int) -> Dict[str, int]:
12 | ...
13 |
--------------------------------------------------------------------------------
/Chapter 12/03 - Logging configuration/acme_logger.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | acme_logger = logging.getLogger("acme.utils")
4 | acme_logger.disabled = True
5 |
6 | acme_logger = logging.getLogger("acme.utils")
7 | acme_logger.handlers.clear()
8 |
9 | acme_logger = logging.getLogger("acme.utils")
10 | acme_logger.setLevel(logging.CRITICAL)
11 |
--------------------------------------------------------------------------------
/Chapter 12/05 - Using Prometheus/interfaces.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from typing import Dict
3 |
4 |
5 | class ViewsStorageBackend(ABC):
6 | @abstractmethod
7 | def increment(self, key: str):
8 | ...
9 |
10 | @abstractmethod
11 | def most_common(self, n: int) -> Dict[str, int]:
12 | ...
13 |
--------------------------------------------------------------------------------
/Chapter 2/05 - Communicating between Docker Compose environments/Dockerfile:
--------------------------------------------------------------------------------
1 | # The "slim" versions are sensible starting
2 | # points for other lightweight Python-based images
3 | FROM python:3.9-slim
4 | WORKDIR /app/
5 |
6 | COPY requirements.txt .
7 | RUN pip install -r requirements.txt
8 |
9 | COPY echo.py .
10 | CMD ["python", "echo.py"]
11 |
--------------------------------------------------------------------------------
/Chapter 2/06 - Delaying code start up until service ports are open/Dockerfile:
--------------------------------------------------------------------------------
1 | # The "slim" versions are sensible starting
2 | # points for other lightweight Python-based images
3 | FROM python:3.9-slim
4 | WORKDIR /app/
5 |
6 | COPY requirements.txt .
7 | RUN pip install -r requirements.txt
8 |
9 | COPY echo.py .
10 | CMD ["python", "echo.py"]
11 |
--------------------------------------------------------------------------------
/Chapter 10/03 - Pytests fixtures/test_fixtures.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 |
4 | @pytest.fixture
5 | def dependency():
6 | return "fixture value"
7 |
8 |
9 | @pytest.fixture
10 | def dependency_as_generator():
11 | # setup code
12 | yield "fixture value"
13 | # teardown code
14 |
15 |
16 | def test_fixture(dependency):
17 | pass
18 |
--------------------------------------------------------------------------------
/Chapter 2/04 - Addressing services inside of a Docker Compose environment/Dockerfile:
--------------------------------------------------------------------------------
1 | # The "slim" versions are sensible starting
2 | # points for other lightweight Python-based images
3 | FROM python:3.9-slim
4 | WORKDIR /app/
5 |
6 | COPY requirements.txt .
7 | RUN pip install -r requirements.txt
8 |
9 | COPY echo.py .
10 | CMD ["python", "echo.py"]
11 |
--------------------------------------------------------------------------------
/Chapter 2/07 - Adding live reload for absolutely any code/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 |
3 | services:
4 | echo-server:
5 | build: .
6 | ports:
7 | - "5000:5000"
8 | tty: true
9 | command:
10 | watchmedo auto-restart --patterns "*.py" --recursive --
11 | python echo.py
12 | volumes:
13 | - .:/app/
14 |
--------------------------------------------------------------------------------
/Chapter 12/06 - Distributed tracing with Jaeger/interfaces.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from typing import Dict
3 |
4 |
5 | class ViewsStorageBackend(ABC):
6 | @abstractmethod
7 | def increment(self, key: str):
8 | ...
9 |
10 | @abstractmethod
11 | def most_common(self, n: int) -> Dict[str, int]:
12 | ...
13 |
--------------------------------------------------------------------------------
/Chapter 5/05 - Inversion of control in applications p.2/interfaces.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from typing import Dict
3 |
4 |
5 | class ViewsStorageBackend(ABC):
6 | @abstractmethod
7 | def increment(self, key: str):
8 | ...
9 |
10 | @abstractmethod
11 | def most_common(self, n: int) -> Dict[str, int]:
12 | ...
13 |
--------------------------------------------------------------------------------
/Chapter 5/06 - Using dependency injection frameworks/interfaces.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from typing import Dict
3 |
4 |
5 | class ViewsStorageBackend(ABC):
6 | @abstractmethod
7 | def increment(self, key: str):
8 | ...
9 |
10 | @abstractmethod
11 | def most_common(self, n: int) -> Dict[str, int]:
12 | ...
13 |
--------------------------------------------------------------------------------
/Chapter 9/01 - Writing extensions/fibonacci.py:
--------------------------------------------------------------------------------
1 | """Python module that provides fibonacci sequence function"""
2 |
3 |
4 | def fibonacci(n):
5 | """Return nth Fibonacci sequence number computed recursively."""
6 | if n == 0:
7 | return 0
8 | if n == 1:
9 | return 1
10 | else:
11 | return fibonacci(n - 1) + fibonacci(n - 2)
12 |
--------------------------------------------------------------------------------
/Chapter 3/07 - Structural pattern matching/platforms.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | match sys.platform:
4 | case "windows":
5 | print("Running on Windows")
6 | case "darwin" :
7 | print("Running on macOS")
8 | case "linux":
9 | print("Running on Linux")
10 | case _:
11 | raise NotImplementedError(f"{sys.platform} not supported!")
12 |
--------------------------------------------------------------------------------
/Chapter 6/13 - A practical example of asynchronous programming/asyncrates.py:
--------------------------------------------------------------------------------
1 | import aiohttp
2 |
3 |
4 | async def get_rates(session: aiohttp.ClientSession, base: str):
5 | async with session.get(f"https://api.vatcomply.com/rates?base={base}") as response:
6 | rates = (await response.json())["rates"]
7 | rates[base] = 1.0
8 |
9 | return base, rates
10 |
--------------------------------------------------------------------------------
/Chapter 9/04 - Cython as a source-to-source compiler/fibonacci.py:
--------------------------------------------------------------------------------
1 | """Python module that provides fibonacci sequence function"""
2 |
3 |
4 | def fibonacci(n):
5 | """Return nth Fibonacci sequence number computed recursively."""
6 | if n == 0:
7 | return 0
8 | if n == 1:
9 | return 1
10 | else:
11 | return fibonacci(n - 1) + fibonacci(n - 2)
12 |
--------------------------------------------------------------------------------
/Chapter 6/01 - What is multithreading/start_new_threads.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 |
3 |
4 | def my_function():
5 | print("printing from thread")
6 |
7 |
8 | if __name__ == "__main__":
9 | threads = [Thread(target=my_function) for _ in range(10)]
10 | for thread in threads:
11 | thread.start()
12 |
13 | for thread in threads:
14 | thread.join()
15 |
--------------------------------------------------------------------------------
/Chapter 10/02 - Test parametrization/batch.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Iterable, List
2 | from itertools import islice
3 |
4 |
5 | def batches(iterable: Iterable[Any], batch_size: int) -> Iterable[List[Any]]:
6 | iterator = iter(iterable)
7 |
8 | while True:
9 | batch = list(islice(iterator, batch_size))
10 |
11 | if not batch:
12 | return
13 |
14 | yield batch
15 |
--------------------------------------------------------------------------------
/Chapter 10/01 - Writing tests with pytest/batch.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Iterable, List
2 | from itertools import islice
3 |
4 |
5 | def batches(iterable: Iterable[Any], batch_size: int) -> Iterable[List[Any]]:
6 | iterator = iter(iterable)
7 |
8 | while True:
9 | batch = list(islice(iterator, batch_size))
10 |
11 | if not batch:
12 | return
13 |
14 | yield batch
15 |
--------------------------------------------------------------------------------
/Chapter 4/02 - Multiple iheritance and method resolution order/mro.py:
--------------------------------------------------------------------------------
1 | class CommonBase:
2 | pass
3 |
4 |
5 | class Base1(CommonBase):
6 | pass
7 |
8 |
9 | class Base2(CommonBase):
10 | def method(self):
11 | print("Base2.method() called")
12 |
13 |
14 | class MyClass(Base1, Base2):
15 | pass
16 |
17 |
18 | if __name__ == "__main__":
19 | print("MyClass's MRO:", MyClass.__mro__)
20 |
--------------------------------------------------------------------------------
/Chapter 10/04 - Using fakes/acme_fakes.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | from typing import Dict
3 |
4 |
5 | class AcmeHashMapFake:
6 | def __init__(self):
7 | self._counter = Counter()
8 |
9 | def atomic_incr(self, key: str, amount):
10 | self._counter[key] += amount
11 |
12 | def top_keys(self, count: int) -> Dict[str, int]:
13 | return dict(self._counter.most_common(count))
14 |
--------------------------------------------------------------------------------
/Chapter 3/02 - Assignment expressions/user_literal2.py:
--------------------------------------------------------------------------------
1 | from pprint import pprint
2 |
3 |
4 | user = {
5 | "first_name": (first_name := "John"),
6 | "last_name": (last_name := "Doe"),
7 | "display_name": f"{first_name} {last_name}",
8 | "height": (height := 168),
9 | "weight": (weight := 70),
10 | "bmi": weight / (height / 100) ** 2,
11 | }
12 |
13 | if __name__ == "__main__":
14 | pprint(user)
15 |
--------------------------------------------------------------------------------
/Chapter 2/01 - Writing your first Dockerfile/echo.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route("/")
7 | def echo():
8 | print(request.headers)
9 | return (
10 | f"METHOD: {request.method}\n"
11 | f"HEADERS:\n{request.headers}"
12 | f"BODY:\n{request.data.decode()}"
13 | )
14 |
15 |
16 | if __name__ == "__main__":
17 | app.run(host="0.0.0.0")
18 |
--------------------------------------------------------------------------------
/Chapter 2/02 - Setting up complex environments/echo.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route("/")
7 | def echo():
8 | print(request.headers)
9 | return (
10 | f"METHOD: {request.method}\n"
11 | f"HEADERS:\n{request.headers}"
12 | f"BODY:\n{request.data.decode()}"
13 | )
14 |
15 |
16 | if __name__ == "__main__":
17 | app.run(host="0.0.0.0")
18 |
--------------------------------------------------------------------------------
/Chapter 2/03 - Reducing the size of containers/echo.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route("/")
7 | def echo():
8 | print(request.headers)
9 | return (
10 | f"METHOD: {request.method}\n"
11 | f"HEADERS:\n{request.headers}"
12 | f"BODY:\n{request.data.decode()}"
13 | )
14 |
15 |
16 | if __name__ == "__main__":
17 | app.run(host="0.0.0.0")
18 |
--------------------------------------------------------------------------------
/Chapter 10/07 - Mutation testing/test_primes_after_fixes.py:
--------------------------------------------------------------------------------
1 | from primes import is_prime
2 |
3 |
4 | def test_primes_true():
5 | assert is_prime(2)
6 | assert is_prime(5)
7 | assert is_prime(7)
8 |
9 |
10 | def test_primes_false():
11 | assert not is_prime(-200)
12 | assert not is_prime(3.1)
13 | assert not is_prime(0)
14 | assert not is_prime(1)
15 | assert not is_prime(4)
16 | assert not is_prime(8)
17 |
--------------------------------------------------------------------------------
/Chapter 13/01 - Macro-profiling/myapp.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 |
4 | def medium():
5 | time.sleep(0.01)
6 |
7 |
8 | def light():
9 | time.sleep(0.001)
10 |
11 |
12 | def heavy():
13 | for i in range(100):
14 | light()
15 | medium()
16 | medium()
17 | time.sleep(2)
18 |
19 |
20 | def main():
21 | for i in range(2):
22 | heavy()
23 |
24 |
25 | if __name__ == "__main__":
26 | main()
27 |
--------------------------------------------------------------------------------
/Chapter 3/04 - graphlib module/migrations.py:
--------------------------------------------------------------------------------
1 | from graphlib import TopologicalSorter
2 |
3 | table_references = {
4 | "customers": set(),
5 | "accounts": {"customers"},
6 | "products": set(),
7 | "orders": {"accounts", "customers"},
8 | "order_products": {"orders", "products"},
9 | }
10 |
11 | if __name__ == "__main__":
12 | sorter = TopologicalSorter(table_references)
13 | print(list(sorter.static_order()))
14 |
--------------------------------------------------------------------------------
/Chapter 11/04 - Convenient configuration handling/plain_config.py:
--------------------------------------------------------------------------------
1 | import os
2 | from datetime import timedelta
3 |
4 | DATABASE_URI = os.environ["DATABASE_URI"]
5 | ENCRYPTION_KEY = os.environ["ENCRYPTION_KEY"]
6 |
7 | BIND_HOST = os.environ.get("BIND_HOST", "localhost")
8 | BIND_PORT = int(os.environ.get("BIND_PORT", "80"))
9 |
10 | SCHEDULE_INTERVAL = timedelta(
11 | seconds=int(os.environ.get("SHEDULE_INTERVAL_SECONDS", 50))
12 | )
13 |
--------------------------------------------------------------------------------
/Chapter 12/05 - Using Prometheus/prometheus.yml:
--------------------------------------------------------------------------------
1 | global:
2 | scrape_interval: 15s
3 | evaluation_interval: 15s
4 |
5 | external_labels:
6 | monitor: 'compose'
7 |
8 | scrape_configs:
9 | - job_name: 'prometheus'
10 | scrape_interval: 5s
11 | static_configs:
12 | - targets: ['localhost:9090']
13 |
14 | - job_name: 'app'
15 | scrape_interval: 5s
16 | static_configs:
17 | - targets: ["app:8000"]
18 |
--------------------------------------------------------------------------------
/Chapter 2/07 - Adding live reload for absolutely any code/echo.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route("/")
7 | def echo():
8 | print(request.headers)
9 | return (
10 | f"METHOD: {request.method}\n"
11 | f"HEADERS:\n{request.headers}"
12 | f"BODY:\n{request.data.decode()}"
13 | )
14 |
15 |
16 | if __name__ == "__main__":
17 | app.run(host="0.0.0.0")
18 |
--------------------------------------------------------------------------------
/Chapter 2/04 - Addressing services inside of a Docker Compose environment/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 |
3 | services:
4 | echo-server:
5 | build: .
6 | ports:
7 | - "5000:5000"
8 | tty: true
9 | environment:
10 | - DATABASE_HOSTNAME=database
11 | - DATABASE_PORT=5432
12 | - DATABASE_PASSWORD=password
13 |
14 | database:
15 | image: postgres
16 | environment:
17 | POSTGRES_PASSWORD: password
--------------------------------------------------------------------------------
/Chapter 2/05 - Communicating between Docker Compose environments/echo.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route("/")
7 | def echo():
8 | print(request.headers)
9 | return (
10 | f"METHOD: {request.method}\n"
11 | f"HEADERS:\n{request.headers}"
12 | f"BODY:\n{request.data.decode()}"
13 | )
14 |
15 |
16 | if __name__ == "__main__":
17 | app.run(host="0.0.0.0")
18 |
--------------------------------------------------------------------------------
/Chapter 2/06 - Delaying code start up until service ports are open/echo.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route("/")
7 | def echo():
8 | print(request.headers)
9 | return (
10 | f"METHOD: {request.method}\n"
11 | f"HEADERS:\n{request.headers}"
12 | f"BODY:\n{request.data.decode()}"
13 | )
14 |
15 |
16 | if __name__ == "__main__":
17 | app.run(host="0.0.0.0")
18 |
--------------------------------------------------------------------------------
/Chapter 2/05 - Communicating between Docker Compose environments/docker-compose.other.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 |
3 | networks:
4 | default:
5 | external:
6 | name: my-interservice-network
7 |
8 | services:
9 | other-service:
10 | build: .
11 | ports:
12 | - "80:80"
13 | tty: true
14 | environment:
15 | - DATABASE_HOSTNAME=database
16 | - DATABASE_PORT=5432
17 | - ECHO_SERVER_ADDRESS=http://echo-server:80
18 |
--------------------------------------------------------------------------------
/Chapter 2/06 - Delaying code start up until service ports are open/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 |
3 | services:
4 | echo-server:
5 | build: .
6 | ports:
7 | - "5000:5000"
8 | tty: true
9 | depends_on:
10 | - database
11 | command:
12 | wait-for-it --service database:5432 --
13 | python echo.py
14 |
15 | database:
16 | image: postgres
17 | environment:
18 | POSTGRES_PASSWORD: password
19 |
--------------------------------------------------------------------------------
/Chapter 3/05 - Module-level __getattr__ and __dir__ functions/import_warnings.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from warnings import warn
3 |
4 |
5 | def ci_lookup(d: dict[str, Any], key: str) -> Any:
6 | ...
7 |
8 |
9 | def __getattr__(name: str):
10 | if name == "get_ci":
11 | warn(f"{name} is deprecated", DeprecationWarning)
12 | return ci_lookup
13 |
14 | raise AttributeError(f"module {__name__} has no attribute {name}")
15 |
--------------------------------------------------------------------------------
/Chapter 12/06 - Distributed tracing with Jaeger/prometheus.yml:
--------------------------------------------------------------------------------
1 | global:
2 | scrape_interval: 15s
3 | evaluation_interval: 15s
4 |
5 | external_labels:
6 | monitor: 'compose'
7 |
8 | scrape_configs:
9 | - job_name: 'prometheus'
10 | scrape_interval: 5s
11 | static_configs:
12 | - targets: ['localhost:9090']
13 |
14 | - job_name: 'app'
15 | scrape_interval: 5s
16 | static_configs:
17 | - targets: ["app:8000"]
18 |
--------------------------------------------------------------------------------
/Chapter 2/04 - Addressing services inside of a Docker Compose environment/echo.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route("/")
7 | def echo():
8 | print(request.headers)
9 | return (
10 | f"METHOD: {request.method}\n"
11 | f"HEADERS:\n{request.headers}"
12 | f"BODY:\n{request.data.decode()}"
13 | )
14 |
15 |
16 | if __name__ == "__main__":
17 | app.run(host="0.0.0.0")
18 |
--------------------------------------------------------------------------------
/Chapter 12/06 - Distributed tracing with Jaeger/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.9-slim
2 | WORKDIR app
3 |
4 | RUN pip install \
5 | Flask==1.1.2 \
6 | redis==3.5.3 \
7 | Flask_Injector==0.12.3 \
8 | prometheus-client==0.10.1 \
9 | jaeger-client==4.4.0 \
10 | opentracing==2.4.0 \
11 | 'Werkzeug<2.0.0' \
12 | Flask-OpenTracing==1.1.0
13 |
14 | RUN pip install --no-deps redis_opentracing==1.0.0
15 |
16 |
17 | ADD *.py ./
18 | CMD python3 tracking.py --reload
19 |
--------------------------------------------------------------------------------
/Chapter 3/02 - Assignment expressions/user_literal.py:
--------------------------------------------------------------------------------
1 | from pprint import pprint
2 |
3 |
4 | first_name = "John"
5 | last_name = "Doe"
6 | height = 168
7 | weight = 70
8 |
9 | user = {
10 | "first_name": first_name,
11 | "last_name": last_name,
12 | "display_name": f"{first_name} {last_name}",
13 | "height": height,
14 | "weight": weight,
15 | "bmi": weight / (height / 100) ** 2,
16 | }
17 |
18 | if __name__ == "__main__":
19 | pprint(user)
20 |
--------------------------------------------------------------------------------
/Chapter 10/01 - Writing tests with pytest/batch_1st_iteration.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Iterable, List
2 |
3 |
4 | def batches(iterable: Iterable[Any], batch_size: int) -> Iterable[List[Any]]:
5 | results = []
6 | batch = []
7 |
8 | for item in iterable:
9 | batch.append(item)
10 | if len(batch) == batch_size:
11 | results.append(batch)
12 | batch = []
13 |
14 | if batch:
15 | results.append(batch)
16 |
17 | return results
18 |
--------------------------------------------------------------------------------
/Chapter 2/05 - Communicating between Docker Compose environments/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 |
3 | networks:
4 | default:
5 | external:
6 | name: my-interservice-network
7 |
8 | services:
9 | echo-server:
10 | build: .
11 | ports:
12 | - "5000:5000"
13 | tty: true
14 | environment:
15 | - DATABASE_HOSTNAME=database
16 | - DATABASE_PORT=5432
17 |
18 | database:
19 | image: postgres
20 | environment:
21 | POSTGRES_PASSWORD: password
--------------------------------------------------------------------------------
/Chapter 6/12 - Python async and await keywords/waiters.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import random
3 | import time
4 |
5 |
6 | async def waiter(name):
7 | for _ in range(4):
8 | time_to_sleep = random.randint(1, 3) / 4
9 | time.sleep(time_to_sleep)
10 | print(f"{name} waited {time_to_sleep} seconds")
11 |
12 |
13 | if __name__ == "__main__":
14 | loop = asyncio.get_event_loop()
15 | loop.run_until_complete(asyncio.gather(waiter("first"), waiter("second")))
16 | loop.close()
17 |
--------------------------------------------------------------------------------
/Chapter 8/05 - Falcons compiled router/api.py:
--------------------------------------------------------------------------------
1 | import falcon
2 | import json
3 |
4 |
5 | class QuoteResource:
6 | def on_get(self, req, resp):
7 | """Handles GET requests"""
8 | quote = {
9 | "quote": "I've always been more interested in "
10 | "the future than in the past.",
11 | "author": "Grace Hopper",
12 | }
13 |
14 | resp.body = json.dumps(quote)
15 |
16 |
17 | api = falcon.API()
18 | api.add_route("/quote", QuoteResource())
19 |
--------------------------------------------------------------------------------
/Chapter 12/03 - Logging configuration/logging.conf:
--------------------------------------------------------------------------------
1 | [formatters]
2 | keys=default
3 |
4 | [loggers]
5 | keys=root
6 |
7 | [handlers]
8 | keys=logfile
9 |
10 | [logger_root]
11 | handlers=logfile
12 | level=INFO
13 |
14 | [formatter_default]
15 | format=%(asctime)s | %(levelname)s | %(name)s | %(filename)s:%(lineno)d | %(message)s
16 |
17 | [handler_logfile]
18 | class=logging.handlers.TimedRotatingFileHandler
19 | formatter=default
20 | kwargs={"filename": "application.log", "when": "D", "backupCount": 30}
21 |
--------------------------------------------------------------------------------
/Chapter 12/04 - Capturing errors for later review/sentry_example.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import sentry_sdk
4 | from sentry_sdk.integrations.logging import LoggingIntegration
5 |
6 | sentry_logging = LoggingIntegration(
7 | level=logging.INFO,
8 | event_level=logging.ERROR,
9 | )
10 |
11 | sentry_sdk.init(
12 | dsn="https://:@app.getsentry.com/",
13 | integrations=[sentry_logging],
14 | )
15 |
16 | try:
17 | 1 / 0
18 | except Exception as e:
19 | sentry_sdk.capture_exception(e)
20 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | # Expert-Python-Programming-Fourth-Edition
5 | Expert Python Programming, Fourth Edition published by Packt
6 |
7 | For package versions used in examples see `requirements.txt` file.
8 | ### Download a free PDF
9 |
10 | If you have already purchased a print or Kindle version of this book, you can get a DRM-free PDF version at no cost.
Simply click on the link to claim your free PDF.
11 | https://packt.link/free-ebook/9781801071109
--------------------------------------------------------------------------------
/Chapter 2/02 - Setting up complex environments/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 |
3 | services:
4 | echo-server:
5 | # this tell Docker Compose to build image from
6 | # local (.) directory
7 | build: .
8 |
9 | # this is equivalent to "-p" option of
10 | # the "docker build" command
11 | ports:
12 | - "5000:5000"
13 |
14 | # this is equivalent to "-t" option of
15 | # the "docker build" command
16 | tty: true
17 |
18 | database:
19 | image: postgres
20 |
21 | cache:
22 | image: redis
23 |
--------------------------------------------------------------------------------
/Chapter 10/08 - Faking realistic data values/mailer.py:
--------------------------------------------------------------------------------
1 | import smtplib
2 | import email.message
3 |
4 |
5 | def send(sender, to, subject="None", body="None", server="localhost"):
6 | """sends a message."""
7 | message = email.message.Message()
8 | message["To"] = to
9 | message["From"] = sender
10 | message["Subject"] = subject
11 | message.set_payload(body)
12 |
13 | client = smtplib.SMTP(server)
14 | try:
15 | return client.sendmail(sender, to, message.as_string())
16 | finally:
17 | client.quit()
18 |
--------------------------------------------------------------------------------
/Chapter 9/05 - Cython as a language/fibonacci.pyx:
--------------------------------------------------------------------------------
1 | """Cython module that provides fibonacci sequence function."""
2 |
3 |
4 | cdef long long fibonacci_cc(unsigned int n):
5 | if n == 0:
6 | return 0
7 | if n == 1:
8 | return 1
9 | else:
10 | return fibonacci_cc(n - 1) + fibonacci_cc(n - 2)
11 |
12 |
13 | def fibonacci(unsigned int n):
14 | """ Return nth Fibonacci sequence number computed recursively
15 | """
16 | with nogil:
17 | result = fibonacci_cc(n)
18 |
19 | return fibonacci_cc(n)
20 |
--------------------------------------------------------------------------------
/Chapter 10/05 - Mocks and unittest.mock module/mailer.py:
--------------------------------------------------------------------------------
1 | import smtplib
2 | import email.message
3 |
4 |
5 | def send(sender, to, subject="None", body="None", server="localhost"):
6 | """sends a message."""
7 | message = email.message.Message()
8 | message["To"] = to
9 | message["From"] = sender
10 | message["Subject"] = subject
11 | message.set_payload(body)
12 |
13 | client = smtplib.SMTP(server)
14 | try:
15 | return client.sendmail(sender, to, message.as_string())
16 | finally:
17 | client.quit()
18 |
--------------------------------------------------------------------------------
/Chapter 6/12 - Python async and await keywords/async_print.py:
--------------------------------------------------------------------------------
1 | """
2 | "Asynchronous programming" section example showing simple
3 | asynchronous printing of numbers sequence.
4 |
5 | """
6 | import asyncio
7 | import random
8 |
9 |
10 | async def print_number(number):
11 | await asyncio.sleep(0)
12 | print(number)
13 |
14 |
15 | if __name__ == "__main__":
16 | loop = asyncio.get_event_loop()
17 |
18 | loop.run_until_complete(
19 | asyncio.gather(*[print_number(number) for number in range(10)])
20 | )
21 | loop.close()
22 |
--------------------------------------------------------------------------------
/Chapter 13/04 - Memoization/fibonacci.py:
--------------------------------------------------------------------------------
1 | def memoize(function):
2 | call_cache = {}
3 |
4 | def memoized(argument):
5 | try:
6 | return call_cache[argument]
7 | except KeyError:
8 | return call_cache.setdefault(argument, function(argument))
9 |
10 | return memoized
11 |
12 |
13 | @memoize
14 | def fibonacci(n):
15 | print(f"fibonacci({n})")
16 | if n < 2:
17 | return 1
18 | else:
19 | return fibonacci(n - 1) + fibonacci(n - 2)
20 |
21 |
22 | if __name__ == "__main__":
23 | print(f"result: {fibonacci(5)=}")
24 |
--------------------------------------------------------------------------------
/Chapter 10/04 - Using fakes/acme_sdk.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 |
3 |
4 | class AcmeSession:
5 | def __init__(self, tenant: str, token: str):
6 | ...
7 |
8 |
9 | class AcmeHashMap:
10 | def __init__(self, acme_session: AcmeSession):
11 | ...
12 |
13 | def incr(self, key: str, amount):
14 | """Increments any key by specific amount"""
15 |
16 | def atomic_incr(self, key: str, amount):
17 | """Increments any key by specific amount atomically"""
18 |
19 | def top_keys(self, count: int) -> Dict[str, int]:
20 | """Returns keys with top values"""
21 |
--------------------------------------------------------------------------------
/Chapter 12/03 - Logging configuration/configuration_file.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta, datetime
2 | import time
3 | import logging.config
4 | import logging.handlers
5 |
6 | import freezegun
7 |
8 | logging.config.fileConfig("logging.conf")
9 |
10 | logger = logging.getLogger(__name__)
11 |
12 |
13 | def main():
14 | with freezegun.freeze_time() as frozen:
15 | while True:
16 | frozen.tick(timedelta(hours=1))
17 | time.sleep(0.1)
18 | logger.info(f"Something has happened at {datetime.now()}")
19 |
20 |
21 | if __name__ == "__main__":
22 | main()
23 |
--------------------------------------------------------------------------------
/Chapter 10/09 - Faking time/test_time.py:
--------------------------------------------------------------------------------
1 | import time
2 | from datetime import timedelta
3 |
4 | from freezegun import freeze_time
5 |
6 |
7 | @freeze_time("1988-02-05 05:10:00")
8 | def test_with_time():
9 | with freeze_time("1988-02-04 05:10:00") as frozen:
10 | frozen.move_to("1988-02-05 05:10:00")
11 | frozen.tick()
12 | frozen.tick(timedelta(hours=1))
13 |
14 | print(time.time())
15 | time.sleep(1)
16 | print(time.time())
17 | time.sleep(1)
18 | print(time.time())
19 | time.sleep(1)
20 | print(time.time())
21 | time.sleep(1)
22 | print(time.time())
23 |
--------------------------------------------------------------------------------
/Chapter 6/09 - The built-in multiprocessing module/sharedctypes.py:
--------------------------------------------------------------------------------
1 | """
2 | "Multiprocessing" section example showing how
3 | to use sharedctypes submodule to share data
4 | between multiple processes.
5 |
6 | """
7 | from multiprocessing import Process, Value, Array
8 |
9 |
10 | def f(n, a):
11 | n.value = 3.1415927
12 | for i in range(len(a)):
13 | a[i] = -a[i]
14 |
15 |
16 | if __name__ == "__main__":
17 | num = Value("d", 0.0)
18 | arr = Array("i", range(10))
19 |
20 | p = Process(target=f, args=(num, arr))
21 | p.start()
22 | p.join()
23 |
24 | print(num.value)
25 | print(arr[:])
26 |
--------------------------------------------------------------------------------
/Chapter 6/01 - What is multithreading/thread_visits.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 |
3 | thread_visits = 0
4 |
5 |
6 | def visit_counter():
7 | global thread_visits
8 | for i in range(100_000):
9 | # value = thread_visits
10 | # thread_visits = value + 1
11 | thread_visits += 1
12 |
13 |
14 | if __name__ == "__main__":
15 | thread_count = 100
16 | threads = [Thread(target=visit_counter) for _ in range(thread_count)]
17 | for thread in threads:
18 | thread.start()
19 |
20 | for thread in threads:
21 | thread.join()
22 |
23 | print(f"{thread_count=}, {thread_visits=}")
24 |
--------------------------------------------------------------------------------
/Chapter 6/01 - What is multithreading/thread_safe_visits.py:
--------------------------------------------------------------------------------
1 | from threading import Thread, Lock
2 |
3 | thread_visits = 0
4 | thread_visits_lock = Lock()
5 |
6 |
7 | def visit_counter():
8 | global thread_visits
9 | for i in range(100_000):
10 | with thread_visits_lock:
11 | thread_visits += 1
12 |
13 |
14 | if __name__ == "__main__":
15 | thread_count = 100
16 | threads = [Thread(target=visit_counter) for _ in range(thread_count)]
17 | for thread in threads:
18 | thread.start()
19 |
20 | for thread in threads:
21 | thread.join()
22 |
23 | print(f"{thread_count=}, {thread_visits=}")
24 |
--------------------------------------------------------------------------------
/Chapter 4/08 - Dataclasses/vector_dataclasses.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 |
3 |
4 | @dataclass
5 | class Vector:
6 | x: int
7 | y: int
8 |
9 | def __add__(self, other):
10 | """Add two vectors using + operator"""
11 | return Vector(
12 | self.x + other.x,
13 | self.y + other.y,
14 | )
15 |
16 | def __sub__(self, other):
17 | """Subtract two vectors using - operator"""
18 | return Vector(
19 | self.x - other.x,
20 | self.y - other.y,
21 | )
22 |
23 |
24 | @dataclass(frozen=True)
25 | class FrozenVector:
26 | x: int
27 | y: int
28 |
--------------------------------------------------------------------------------
/Chapter 6/09 - The built-in multiprocessing module/basic_multiprocessing.py:
--------------------------------------------------------------------------------
1 | """
2 | "Multiprocessing" section example showing how
3 | to create new processes with `multiprocessing` module
4 |
5 | """
6 | from multiprocessing import Process
7 | import os
8 |
9 |
10 | def work(identifier):
11 | print(f"Hey, I am the process " f"{identifier}, pid: {os.getpid()}")
12 |
13 |
14 | def main():
15 | processes = [Process(target=work, args=(number,)) for number in range(5)]
16 | for process in processes:
17 | process.start()
18 |
19 | while processes:
20 | processes.pop().join()
21 |
22 |
23 | if __name__ == "__main__":
24 | main()
25 |
--------------------------------------------------------------------------------
/Chapter 4/05 - Real-life example - lazily evaluated attributes/lazy_property.py:
--------------------------------------------------------------------------------
1 | import random
2 |
3 |
4 | class lazy_property(object):
5 | def __init__(self, function):
6 | self.fget = function
7 |
8 | def __get__(self, obj, cls):
9 | value = self.fget(obj)
10 | setattr(obj, self.fget.__name__, value)
11 | return value
12 |
13 |
14 | class WithSortedRandoms:
15 | @lazy_property
16 | def lazily_initialized(self):
17 | return sorted([[random.random() for _ in range(5)]])
18 |
19 |
20 | if __name__ == "__main__":
21 | m = WithSortedRandoms()
22 | print(m.lazily_initialized)
23 | print(m.lazily_initialized)
24 |
--------------------------------------------------------------------------------
/Chapter 8/02 - Intercepting class instance creation process/instance_counting.py:
--------------------------------------------------------------------------------
1 | class InstanceCountingClass:
2 | created = 0
3 | number: int
4 |
5 | def __new__(cls, *args, **kwargs):
6 | instance = super().__new__(cls)
7 | instance.number = cls.created
8 | cls.created += 1
9 |
10 | return instance
11 |
12 | def __repr__(self):
13 | return f"<{self.__class__.__name__}: " f"{self.number} of {self.created}>"
14 |
15 |
16 | if __name__ == "__main__":
17 | instances = [InstanceCountingClass() for _ in range(5)]
18 | for i in instances:
19 | print(i)
20 | print(f"total: {InstanceCountingClass.created}")
21 |
--------------------------------------------------------------------------------
/Chapter 11/04 - Convenient configuration handling/environ_config.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 | import environ
3 |
4 |
5 | @environ.config(prefix="")
6 | class Config:
7 | @environ.config()
8 | class Bind:
9 | host = environ.var(default="localhost")
10 | port = environ.var(default="80", converter=int)
11 |
12 | bind = environ.group(Bind)
13 | database_uri = environ.var()
14 | encryption_key = environ.var()
15 |
16 | schedule_interval = environ.var(
17 | name="SCHEDULE_INTERVAL_SECONDS",
18 | converter=lambda value: timedelta(seconds=int(value)),
19 | default=50,
20 | )
21 |
22 |
23 | Config.from_environ()
24 |
--------------------------------------------------------------------------------
/Chapter 12/05 - Using Prometheus/di.py:
--------------------------------------------------------------------------------
1 | from injector import Module, provider, singleton
2 | from redis import Redis
3 |
4 | from interfaces import ViewsStorageBackend
5 | from backends import CounterBackend, RedisBackend
6 |
7 |
8 | class CounterModule(Module):
9 | @provider
10 | @singleton
11 | def provide_storage(self) -> ViewsStorageBackend:
12 | return CounterBackend()
13 |
14 |
15 | class RedisModule(Module):
16 | @provider
17 | def provide_storage(self, client: Redis) -> ViewsStorageBackend:
18 | return RedisBackend(client, "my-set")
19 |
20 | @provider
21 | def provide_redis_client(self) -> Redis:
22 | return Redis(host="redis")
23 |
--------------------------------------------------------------------------------
/Chapter 3/07 - Structural pattern matching/points.py:
--------------------------------------------------------------------------------
1 | class Point:
2 | x: int
3 | y: int
4 |
5 | def __init__(self, x, y):
6 | self.x = x
7 | self.y = y
8 |
9 |
10 | def where_is(point):
11 | match point:
12 | case Point(x=0, y=0):
13 | print("Origin")
14 | case Point(x=0, y=y):
15 | print(f"Y={y}")
16 | case Point(x=x, y=0):
17 | print(f"X={x}")
18 | case Point():
19 | print("Somewhere else")
20 | case _:
21 | print("Not a point")
22 |
23 | if __name__ == "__main__":
24 | where_is(Point(1, 20))
25 | where_is(Point(20, 0))
26 | where_is(Point(0, 20))
27 |
--------------------------------------------------------------------------------
/Chapter 12/06 - Distributed tracing with Jaeger/di.py:
--------------------------------------------------------------------------------
1 | from injector import Module, provider, singleton
2 | from redis import Redis
3 |
4 | from interfaces import ViewsStorageBackend
5 | from backends import CounterBackend, RedisBackend
6 |
7 |
8 | class CounterModule(Module):
9 | @provider
10 | @singleton
11 | def provide_storage(self) -> ViewsStorageBackend:
12 | return CounterBackend()
13 |
14 |
15 | class RedisModule(Module):
16 | @provider
17 | def provide_storage(self, client: Redis) -> ViewsStorageBackend:
18 | return RedisBackend(client, "my-set")
19 |
20 | @provider
21 | def provide_redis_client(self) -> Redis:
22 | return Redis(host="redis")
23 |
--------------------------------------------------------------------------------
/Chapter 5/06 - Using dependency injection frameworks/di.py:
--------------------------------------------------------------------------------
1 | from injector import Module, provider, singleton
2 | from redis import Redis
3 |
4 | from interfaces import ViewsStorageBackend
5 | from backends import CounterBackend, RedisBackend
6 |
7 |
8 | class CounterModule(Module):
9 | @provider
10 | @singleton
11 | def provide_storage(self) -> ViewsStorageBackend:
12 | return CounterBackend()
13 |
14 |
15 | class RedisModule(Module):
16 | @provider
17 | def provide_storage(self, client: Redis) -> ViewsStorageBackend:
18 | return RedisBackend(client, "my-set")
19 |
20 | @provider
21 | def provide_redis_client(self) -> Redis:
22 | return Redis(host="redis")
23 |
--------------------------------------------------------------------------------
/Chapter 7/01 - Event-driven programming in GUIs/tk_zen.py:
--------------------------------------------------------------------------------
1 | import this
2 | from tkinter import Tk, Frame, Button, LEFT, messagebox
3 |
4 | rot13 = str.maketrans(
5 | "ABCDEFGHIJKLMabcdefghijklmNOPQRSTUVWXYZnopqrstuvwxyz",
6 | "NOPQRSTUVWXYZnopqrstuvwxyzABCDEFGHIJKLMabcdefghijklm",
7 | )
8 |
9 |
10 | def main_window(root: Tk):
11 | frame = Frame(root)
12 | frame.pack()
13 |
14 | zen_button = Button(frame, text="Python Zen", command=show_zen)
15 | zen_button.pack(side=LEFT)
16 |
17 |
18 | def show_zen():
19 | messagebox.showinfo("Zen of Python", this.s.translate(rot13))
20 |
21 |
22 | if __name__ == "__main__":
23 | root = Tk()
24 | main_window(root)
25 | root.mainloop()
26 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohttp==3.7.3
2 | black==20.8b1
3 | bleach==3.3.0
4 | blinker==1.4
5 | coverage==5.5
6 | Cython==0.29.22
7 | environ-config==20.1.0
8 | Faker==6.5.0
9 | falcon==2.0.0
10 | Flask==1.1.2
11 | Flask-Injector==0.12.3
12 | Flask-OpenTracing==1.1.0
13 | freezegun==1.1.0
14 | gunicorn==20.1.0
15 | inflection==0.5.1
16 | hy==0.20.0
17 | injector==0.18.4
18 | ipdb==0.13.4
19 | ipython==7.19.0
20 | jaeger-client==4.4.0
21 | mutmut==2.1.0
22 | mypy==0.790
23 | objgraph==3.5.0
24 | opentracing==2.0.0
25 | prometheus-client==0.10.1
26 | pytest==6.2.2
27 | redis==3.5.3
28 | redis-opentracing==1.0.0
29 | requests==2.25.1
30 | sentry-sdk==1.0.0
31 | watchdog==0.10.3
32 | Werkzeug<2.0.0
33 | zope.interface==5.2.0
34 |
--------------------------------------------------------------------------------
/Chapter 12/05 - Using Prometheus/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 |
3 | services:
4 | app:
5 | build:
6 | context: .
7 | ports:
8 | - 8000:8000
9 | volumes:
10 | - ".:/app/"
11 |
12 | redis:
13 | image: redis
14 |
15 | prometheus:
16 | image: prom/prometheus:v2.26.0
17 | volumes:
18 | - ./prometheus.yml:/etc/prometheus/prometheus.yml
19 | command:
20 | - '--config.file=/etc/prometheus/prometheus.yml'
21 | - '--storage.tsdb.path=/prometheus'
22 | - '--web.console.libraries=/usr/share/prometheus/console_libraries'
23 | - '--web.console.templates=/usr/share/prometheus/consoles'
24 | ports:
25 | - 9090:9090
26 | restart: always
27 |
--------------------------------------------------------------------------------
/Chapter 6/12 - Python async and await keywords/waiters_await.py:
--------------------------------------------------------------------------------
1 | """
2 | "Asynchronous programming" section example showing how
3 | two coroutines cooperate by releasing control to event
4 | loop on blocking calls.
5 |
6 | """
7 | import asyncio
8 | import random
9 |
10 |
11 | async def waiter(name):
12 | for _ in range(4):
13 | time_to_sleep = random.randint(1, 3) / 4
14 | await asyncio.sleep(time_to_sleep)
15 | print(f"{name} waited {time_to_sleep} seconds")
16 |
17 |
18 | async def main():
19 | await asyncio.gather(waiter("first"), waiter("second"))
20 |
21 |
22 | if __name__ == "__main__":
23 | loop = asyncio.get_event_loop()
24 | loop.run_until_complete(main())
25 | loop.close()
26 |
--------------------------------------------------------------------------------
/Chapter 3/07 - Structural pattern matching/positional_points.py:
--------------------------------------------------------------------------------
1 | class Point:
2 | x: int
3 | y: int
4 |
5 | __match_args__ = ["x", "y"]
6 |
7 | def __init__(self, x, y):
8 | self.x = x
9 | self.y = y
10 |
11 |
12 | def where_is(point):
13 | match point:
14 | case Point(0, 0):
15 | print("Origin")
16 | case Point(0, y):
17 | print(f"Y={y}")
18 | case Point(x, 0):
19 | print(f"X={x}")
20 | case Point():
21 | print("Somewhere else")
22 | case _:
23 | print("Not a point")
24 |
25 | if __name__ == "__main__":
26 | where_is(Point(1, 20))
27 | where_is(Point(20, 0))
28 | where_is(Point(0, 20))
29 |
--------------------------------------------------------------------------------
/Chapter 7/03 - Callback-based style/tk_zen_binding.py:
--------------------------------------------------------------------------------
1 | import this
2 | from tkinter import *
3 | from tkinter import messagebox
4 |
5 | rot13 = str.maketrans(
6 | "ABCDEFGHIJKLMabcdefghijklmNOPQRSTUVWXYZnopqrstuvwxyz",
7 | "NOPQRSTUVWXYZnopqrstuvwxyzABCDEFGHIJKLMabcdefghijklm",
8 | )
9 |
10 |
11 | def main_window(root: Tk):
12 | frame = Frame(root)
13 | frame.pack()
14 |
15 | zen_button = Button(frame, text="Python Zen")
16 | zen_button.bind("", show_zen)
17 | zen_button.pack(side=LEFT)
18 |
19 |
20 | def show_zen(event):
21 | messagebox.showinfo("Zen of Python", this.s.translate(rot13))
22 |
23 |
24 | if __name__ == "__main__":
25 | root = Tk()
26 | main_window(root)
27 | root.mainloop()
28 |
--------------------------------------------------------------------------------
/Chapter 3/02 - Assignment expressions/findimports2.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import sys
4 |
5 | import_re = re.compile(r"^\s*import\s+\.{0,2}((\w+\.)*(\w+))\s*$")
6 | import_from_re = re.compile(
7 | r"^\s*from\s+\.{0,2}((\w+\.)*(\w+))\s+import\s+(\w+|\*)+\s*$"
8 | )
9 |
10 |
11 | def main():
12 | if len(sys.argv) != 2:
13 | print(f"usage: {os.path.basename(__file__)} file-name")
14 | sys.exit(1)
15 |
16 | with open(sys.argv[1]) as file:
17 | for line in file:
18 | if match := import_re.match(line):
19 | print(match.groups()[0])
20 |
21 | if match := import_from_re.match(line):
22 | print(match.groups()[0])
23 |
24 |
25 | if __name__ == "__main__":
26 | main()
27 |
--------------------------------------------------------------------------------
/Chapter 2/09 - Custom Python shells/pythonstartup.py:
--------------------------------------------------------------------------------
1 | # python startup file
2 |
3 | import atexit
4 | import os
5 |
6 | try:
7 | import readline
8 | except ImportError:
9 | print("Completion unavailable: readline module not available")
10 | else:
11 | import rlcompleter
12 |
13 | # tab completion
14 | readline.parse_and_bind("tab: complete")
15 |
16 | # Path to history file in user's home directory.
17 | # Can use your own path.
18 | history_file = os.path.join(os.environ["HOME"], ".python_shell_history")
19 | try:
20 | readline.read_history_file(history_file)
21 | except IOError:
22 | pass
23 |
24 | atexit.register(readline.write_history_file, history_file)
25 | del os, history_file, readline, rlcompleter
26 |
--------------------------------------------------------------------------------
/Chapter 11/03 - Package scripts and entry points/findimports.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import sys
4 |
5 | import_re = re.compile(r"^\s*import\s+\.{0,2}((\w+\.)*(\w+))\s*$")
6 | import_from_re = re.compile(
7 | r"^\s*from\s+\.{0,2}((\w+\.)*(\w+))\s+import\s+(\w+|\*)+\s*$"
8 | )
9 |
10 |
11 | def main():
12 | if len(sys.argv) != 2:
13 | print(f"usage: {os.path.basename(__file__)} file-name")
14 | sys.exit(1)
15 |
16 | with open(sys.argv[1]) as file:
17 | for line in file:
18 | if match := import_re.match(line):
19 | print(match.groups()[0])
20 |
21 | if match := import_from_re.match(line):
22 | print(match.groups()[0])
23 |
24 |
25 | if __name__ == "__main__":
26 | main()
27 |
--------------------------------------------------------------------------------
/Chapter 4/07 - Single dispatch/dispatch.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from functools import singledispatch
3 | from numbers import Real
4 |
5 |
6 | @singledispatch
7 | def report(value):
8 | return f"raw: {value}"
9 |
10 |
11 | @report.register
12 | def _(value: datetime):
13 | return f"dt: {value.isoformat()}"
14 |
15 |
16 | @report.register
17 | def _(value: complex):
18 | return f"complex: {value.real}{value.imag:+}j"
19 |
20 |
21 | @report.register
22 | def _(value: Real):
23 | return f"real: {value:f}"
24 |
25 |
26 | if __name__ == "__main__":
27 | print(report(datetime.now()))
28 | print(report(100 - 30j))
29 | print(report("January"))
30 | for key, value in report.registry.items():
31 | print(f"{key} -> {value}")
32 |
--------------------------------------------------------------------------------
/Chapter 13/03 - Using objgraph module/myapp.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | import objgraph
3 |
4 |
5 | def graph_references(*objects):
6 | objgraph.show_refs(
7 | objects,
8 | filename="show_refs.png",
9 | refcounts=True,
10 | # additional filtering for the sake of brevity
11 | too_many=5,
12 | filter=lambda x: not isinstance(x, dict),
13 | )
14 | objgraph.show_backrefs(objects, filename="show_backrefs.png", refcounts=True)
15 |
16 |
17 | if __name__ == "__main__":
18 | quote = """
19 | People who think they know everything are a
20 | great annoyance to those of us who do.
21 | """
22 | words = quote.lower().strip().split()
23 | counts = Counter(words)
24 | graph_references(words, quote, counts)
25 |
--------------------------------------------------------------------------------
/Chapter 3/02 - Assignment expressions/findimports.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import sys
4 |
5 | import_re = re.compile(r"^\s*import\s+\.{0,2}((\w+\.)*(\w+))\s*$")
6 | import_from_re = re.compile(
7 | r"^\s*from\s+\.{0,2}((\w+\.)*(\w+))\s+import\s+(\w+|\*)+\s*$"
8 | )
9 |
10 |
11 | def main():
12 | if len(sys.argv) != 2:
13 | print(f"usage: {os.path.basename(__file__)} file-name")
14 | sys.exit(1)
15 |
16 | with open(sys.argv[1]) as file:
17 | for line in file:
18 | match = import_re.match(line)
19 | if match:
20 | print(match.groups()[0])
21 |
22 | match = import_from_re.match(line)
23 | if match:
24 | print(match.groups()[0])
25 |
26 |
27 | if __name__ == "__main__":
28 | main()
29 |
--------------------------------------------------------------------------------
/Chapter 6/08 - Multiprocessing/forks.py:
--------------------------------------------------------------------------------
1 | """
2 | "Multiprocessing" section example showing how
3 | to spawn new process using os.fork on POSIX
4 | systems.
5 |
6 | """
7 | import os
8 |
9 | pid_list = []
10 |
11 |
12 | def main():
13 | pid_list.append(os.getpid())
14 | child_pid = os.fork()
15 |
16 | if child_pid == 0:
17 | pid_list.append(os.getpid())
18 | print()
19 | print("CHLD: hey, I am the child process")
20 | print("CHLD: all the pids I know %s" % pid_list)
21 |
22 | else:
23 | pid_list.append(os.getpid())
24 | print()
25 | print("PRNT: hey, I am the parent ")
26 | print("PRNT: the child pid is %d" % child_pid)
27 | print("PRNT: all the pids I know %s" % pid_list)
28 |
29 |
30 | if __name__ == "__main__":
31 | main()
32 |
--------------------------------------------------------------------------------
/Chapter 4/08 - Dataclasses/vector.py:
--------------------------------------------------------------------------------
1 | class Vector:
2 | def __init__(self, x, y):
3 | self.x = x
4 | self.y = y
5 |
6 | def __add__(self, other):
7 | """Add two vectors using + operator"""
8 | return Vector(
9 | self.x + other.x,
10 | self.y + other.y,
11 | )
12 |
13 | def __sub__(self, other):
14 | """Subtract two vectors using - operator"""
15 | return Vector(
16 | self.x - other.x,
17 | self.y - other.y,
18 | )
19 |
20 | def __repr__(self):
21 | """Return textual representation of vector"""
22 | return f""
23 |
24 | def __eq__(self, other):
25 | """Compare two vectors for equality"""
26 | return self.x == other.x and self.y == other.y
27 |
--------------------------------------------------------------------------------
/Chapter 8/03 - Metaclass usage/case_user.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | import inflection
3 |
4 |
5 | class CaseInterpolationDict(dict):
6 | def __setitem__(self, key: str, value: Any):
7 | super().__setitem__(key, value)
8 | super().__setitem__(inflection.underscore(key), value)
9 |
10 |
11 | class CaseInterpolatedMeta(type):
12 | @classmethod
13 | def __prepare__(mcs, name, bases):
14 | return CaseInterpolationDict()
15 |
16 |
17 | class User(metaclass=CaseInterpolatedMeta):
18 | def __init__(self, firstName: str, lastName: str):
19 | self.firstName = firstName
20 | self.lastName = lastName
21 |
22 | def getDisplayName(self):
23 | return f"{self.firstName} {self.lastName}"
24 |
25 | def greetUser(self):
26 | return f"Hello {self.getDisplayName()}!"
27 |
--------------------------------------------------------------------------------
/Chapter 4/04 - Descriptors/reveal_access.py:
--------------------------------------------------------------------------------
1 | class RevealAccess(object):
2 | """A data descriptor that sets and returns values
3 | normally and prints a message logging their access.
4 | """
5 |
6 | def __init__(self, initval=None, name="var"):
7 | self.val = initval
8 | self.name = name
9 |
10 | def __get__(self, obj, objtype):
11 | print("Retrieving", self.name)
12 | return self.val
13 |
14 | def __set__(self, obj, val):
15 | print("Updating", self.name)
16 | self.val = val
17 |
18 | def __delete__(self, obj):
19 | print("Deleting", self.name)
20 |
21 |
22 | class MyClass(object):
23 | x = RevealAccess(10, 'var "x"')
24 | y = 5
25 |
26 |
27 | if __name__ == "__main__":
28 | m = MyClass()
29 | m.x
30 | m.x = 20
31 | m.x
32 | del m.x
33 |
--------------------------------------------------------------------------------
/Chapter 4/05 - Real-life example - lazily evaluated attributes/lazily_evaluated.py:
--------------------------------------------------------------------------------
1 | import random
2 |
3 |
4 | class InitOnAccess:
5 | def __init__(self, init_func, *args, **kwargs):
6 | self.klass = init_func
7 | self.args = args
8 | self.kwargs = kwargs
9 | self._initialized = None
10 |
11 | def __get__(self, instance, owner):
12 | if self._initialized is None:
13 | print("initialized!")
14 | self._initialized = self.klass(*self.args, **self.kwargs)
15 | else:
16 | print("cached!")
17 | return self._initialized
18 |
19 |
20 | class WithSortedRandoms:
21 | lazily_initialized = InitOnAccess(sorted, [random.random() for _ in range(5)])
22 |
23 |
24 | if __name__ == "__main__":
25 | m = WithSortedRandoms()
26 | print(m.lazily_initialized)
27 | print(m.lazily_initialized)
28 |
--------------------------------------------------------------------------------
/Chapter 7/02 - Event-driven communication/flask_zen.py:
--------------------------------------------------------------------------------
1 | import this
2 |
3 | from flask import Flask
4 |
5 | app = Flask(__name__)
6 |
7 | rot13 = str.maketrans(
8 | "ABCDEFGHIJKLMabcdefghijklmNOPQRSTUVWXYZnopqrstuvwxyz",
9 | "NOPQRSTUVWXYZnopqrstuvwxyzABCDEFGHIJKLMabcdefghijklm",
10 | )
11 |
12 |
13 | def simple_html(body):
14 | return f"""
15 |
16 |
17 |
18 |
19 | Book Example
20 |
21 |
22 | {body}
23 |
24 |
25 | """
26 |
27 |
28 | @app.route("/")
29 | def hello():
30 | return simple_html("Python Zen")
31 |
32 |
33 | @app.route("/zen")
34 | def zen():
35 | return simple_html("
".join(this.s.translate(rot13).split("\n")))
36 |
37 |
38 | if __name__ == "__main__":
39 | app.run()
40 |
--------------------------------------------------------------------------------
/Chapter 6/02 - An example of a threaded application/synchronous.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | import requests
4 |
5 | SYMBOLS = ("USD", "EUR", "PLN", "NOK", "CZK")
6 | BASES = ("USD", "EUR", "PLN", "NOK", "CZK")
7 |
8 |
9 | def fetch_rates(base):
10 | response = requests.get(f"https://api.vatcomply.com/rates?base={base}")
11 | response.raise_for_status()
12 | rates = response.json()["rates"]
13 |
14 | # note: same currency exchanges to itself 1:1
15 | rates[base] = 1.0
16 |
17 | rates_line = ", ".join([f"{rates[symbol]:7.03} {symbol}" for symbol in SYMBOLS])
18 | print(f"1 {base} = {rates_line}")
19 |
20 |
21 | def main():
22 | for base in BASES:
23 | fetch_rates(base)
24 |
25 |
26 | if __name__ == "__main__":
27 | started = time.time()
28 | main()
29 | elapsed = time.time() - started
30 |
31 | print()
32 | print("time elapsed: {:.2f}s".format(elapsed))
33 |
--------------------------------------------------------------------------------
/Chapter 10/08 - Faking realistic data values/test_mailer.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import patch
2 |
3 | import pytest
4 |
5 | from mailer import send
6 | from faker import Faker
7 |
8 |
9 | @pytest.mark.parametrize("iteration", range(10))
10 | def test_send(faker: Faker, iteration: int):
11 | sender = faker.email()
12 | to = faker.email()
13 | body = faker.paragraph()
14 | subject = faker.sentence()
15 |
16 | with patch("smtplib.SMTP") as mock:
17 | client = mock.return_value
18 | client.sendmail.return_value = {}
19 |
20 | res = send(sender, to, subject, body)
21 |
22 | assert client.sendmail.called
23 | assert client.sendmail.call_args[0][0] == sender
24 | assert client.sendmail.call_args[0][1] == to
25 | assert subject in client.sendmail.call_args[0][2]
26 | assert body in client.sendmail.call_args[0][2]
27 | assert res == {}
28 |
--------------------------------------------------------------------------------
/Chapter 12/06 - Distributed tracing with Jaeger/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 |
3 | services:
4 | app:
5 | build:
6 | context: .
7 | ports:
8 | - 8000:8000
9 | environment:
10 | - JAEGER_AGENT_HOST=jaeger
11 | volumes:
12 | - ".:/app/"
13 |
14 | redis:
15 | image: redis
16 |
17 | prometheus:
18 | image: prom/prometheus:v2.26.0
19 | volumes:
20 | - ./prometheus.yml:/etc/prometheus/prometheus.yml
21 | command:
22 | - '--config.file=/etc/prometheus/prometheus.yml'
23 | - '--storage.tsdb.path=/prometheus'
24 | - '--web.console.libraries=/usr/share/prometheus/console_libraries'
25 | - '--web.console.templates=/usr/share/prometheus/consoles'
26 | ports:
27 | - 9090:9090
28 | restart: always
29 |
30 | jaeger:
31 | image: jaegertracing/all-in-one:latest
32 | ports:
33 | - "6831:6831/udp"
34 | - "16686:16686"
35 |
--------------------------------------------------------------------------------
/Chapter 2/08 - Virtual development environments using Vagrant/Vagrantfile:
--------------------------------------------------------------------------------
1 | Vagrant.configure("2") do |config|
2 | # Every Vagrant development environment requires a box.
3 | # You can search for boxes at https://vagrantcloud.com/search.
4 | # Here we use Trusty version of Ubuntu system for x64 architecture.
5 | config.vm.box = "ubuntu/trusty64"
6 |
7 | # Create a forwarded port mapping which allows access to a specific
8 | # port within the machine from a port on the host machine and only
9 | # allow access via 127.0.0.1 to disable public access
10 | config.vm.network "forwarded_port", guest: 80, host: 8080, host_ip: "127.0.0.1"
11 |
12 | config.vm.provider "virtualbox" do |vb|
13 | vb.gui = false
14 | # Customize the amount of memory on the VM:
15 | vb.memory = "1024"
16 | end
17 |
18 | # Enable provisioning with a shell script.
19 | config.vm.provision "shell", inline: <<-SHELL
20 | apt-get update
21 | SHELL
22 | end
23 |
--------------------------------------------------------------------------------
/Chapter 6/09 - The built-in multiprocessing module/pipes.py:
--------------------------------------------------------------------------------
1 | """
2 | "Multiprocessing" section example showing how
3 | to use pipes from `multiprocessing` module
4 | as communication channel.
5 |
6 | """
7 | from multiprocessing import Process, Pipe
8 |
9 |
10 | class CustomClass:
11 | pass
12 |
13 |
14 | def worker(connection):
15 | while True:
16 | instance = connection.recv()
17 | if instance:
18 | print(f"CHLD: recv: {instance}")
19 | if instance is None:
20 | break
21 |
22 |
23 | def main():
24 | parent_conn, child_conn = Pipe()
25 |
26 | child = Process(target=worker, args=(child_conn,))
27 |
28 | for item in (
29 | 42,
30 | "some string",
31 | {"one": 1},
32 | CustomClass(),
33 | None,
34 | ):
35 | print("PRNT: send: {}".format(item))
36 | parent_conn.send(item)
37 |
38 | child.start()
39 | child.join()
40 |
41 |
42 | if __name__ == "__main__":
43 | main()
44 |
--------------------------------------------------------------------------------
/Chapter 10/01 - Writing tests with pytest/test_batch.py:
--------------------------------------------------------------------------------
1 | from itertools import chain
2 |
3 | from batch import batches
4 |
5 |
6 | def test_batch_on_lists():
7 | assert list(batches([1, 2, 3, 4, 5, 6], 1)) == [[1], [2], [3], [4], [5], [6]]
8 | assert list(batches([1, 2, 3, 4, 5, 6], 2)) == [[1, 2], [3, 4], [5, 6]]
9 | assert list(batches([1, 2, 3, 4, 5, 6], 3)) == [[1, 2, 3], [4, 5, 6]]
10 | assert list(batches([1, 2, 3, 4, 5, 6], 4)) == [
11 | [1, 2, 3, 4],
12 | [5, 6],
13 | ]
14 |
15 |
16 | def test_batch_order():
17 | iterable = range(100)
18 | batch_size = 2
19 |
20 | output = batches(iterable, batch_size)
21 |
22 | assert list(chain.from_iterable(output)) == list(iterable)
23 |
24 |
25 | def test_batch_sizes():
26 | iterable = range(100)
27 | batch_size = 2
28 |
29 | output = list(batches(iterable, batch_size))
30 |
31 | for batch in output[:-1]:
32 | assert len(batch) == batch_size
33 | assert len(output[-1]) <= batch_size
34 |
--------------------------------------------------------------------------------
/Chapter 12/02 - Logging system components/logging_handlers.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta, datetime
2 | import time
3 | import logging.handlers
4 |
5 | import freezegun
6 |
7 |
8 | root_logger = logging.getLogger()
9 | root_logger.setLevel(logging.INFO)
10 | formatter = logging.Formatter(
11 | fmt=(
12 | "%(asctime)s | %(levelname)s | "
13 | "%(name)s | %(filename)s:%(lineno)d | "
14 | "%(message)s"
15 | )
16 | )
17 | handler = logging.handlers.TimedRotatingFileHandler(
18 | filename="application.log",
19 | when="D",
20 | backupCount=30,
21 | )
22 | handler.setFormatter(formatter)
23 | root_logger.addHandler(handler)
24 |
25 |
26 | logger = logging.getLogger(__name__)
27 |
28 |
29 | def main():
30 | with freezegun.freeze_time() as frozen:
31 | while True:
32 | frozen.tick(timedelta(hours=1))
33 | time.sleep(0.1)
34 | logger.info(f"Something has happened at {datetime.now()}")
35 |
36 |
37 | if __name__ == "__main__":
38 | main()
39 |
--------------------------------------------------------------------------------
/Chapter 13/02 - Micro-profiling/myapp.py:
--------------------------------------------------------------------------------
1 | import time
2 | import tempfile
3 | import cProfile
4 | import pstats
5 |
6 |
7 | def profile(column="time", list=3):
8 | def parametrized_decorator(function):
9 | def decorated(*args, **kw):
10 | s = tempfile.mktemp()
11 |
12 | profiler = cProfile.Profile()
13 | profiler.runcall(function, *args, **kw)
14 | profiler.dump_stats(s)
15 |
16 | p = pstats.Stats(s)
17 | print("=" * 5, f"{function.__name__}() profile", "=" * 5)
18 | p.sort_stats(column).print_stats(list)
19 |
20 | return decorated
21 |
22 | return parametrized_decorator
23 |
24 |
25 | def medium():
26 | time.sleep(0.01)
27 |
28 |
29 | @profile("time")
30 | def heavy():
31 | for i in range(100):
32 | medium()
33 | medium()
34 | time.sleep(2)
35 |
36 |
37 | @profile("time")
38 | def main():
39 | for i in range(2):
40 | heavy()
41 |
42 |
43 | if __name__ == "__main__":
44 | main()
45 |
--------------------------------------------------------------------------------
/Chapter 9/05 - Cython as a language/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from setuptools import setup, Extension
4 |
5 | try:
6 | # cython source to source compilation available
7 | # only when Cython is available
8 | import Cython
9 |
10 | # and specific environment variable says
11 | # explicitely that Cython should be used
12 | # to generate C sources
13 | USE_CYTHON = bool(os.environ.get("USE_CYTHON"))
14 |
15 | except ImportError:
16 | USE_CYTHON = False
17 |
18 | ext = ".pyx" if USE_CYTHON else ".c"
19 |
20 | extensions = [Extension("fibonacci", ["fibonacci" + ext])]
21 |
22 | if USE_CYTHON:
23 | from Cython.Build import cythonize
24 |
25 | extensions = cythonize(extensions)
26 |
27 | setup(
28 | name="fibonacci",
29 | ext_modules=extensions,
30 | extras_require={
31 | # Cython will be set in that specific version
32 | # as a requirement if package will be intalled
33 | # with '[with-cython]' extra feature
34 | "with-cython": ["cython==0.29.22"]
35 | },
36 | )
37 |
--------------------------------------------------------------------------------
/Chapter 9/04 - Cython as a source-to-source compiler/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from setuptools import setup, Extension
4 |
5 | try:
6 | # cython source to source compilation available
7 | # only when Cython is available
8 | import Cython
9 |
10 | # and specific environment variable says
11 | # explicitely that Cython should be used
12 | # to generate C sources
13 | USE_CYTHON = bool(os.environ.get("USE_CYTHON"))
14 |
15 | except ImportError:
16 | USE_CYTHON = False
17 |
18 | ext = ".pyx" if USE_CYTHON else ".c"
19 |
20 | extensions = [Extension("fibonacci", ["fibonacci" + ext])]
21 |
22 | if USE_CYTHON:
23 | from Cython.Build import cythonize
24 |
25 | extensions = cythonize(extensions)
26 |
27 | setup(
28 | name="fibonacci",
29 | ext_modules=extensions,
30 | extras_require={
31 | # Cython will be set in that specific version
32 | # as a requirement if package will be intalled
33 | # with '[with-cython]' extra feature
34 | "with-cython": ["cython==0.23.4"]
35 | },
36 | )
37 |
--------------------------------------------------------------------------------
/Chapter 3/01 - ChainMap from collections module/user_maps.py:
--------------------------------------------------------------------------------
1 | from collections import ChainMap
2 |
3 |
4 | class UserProfile:
5 | def __init__(self, display_name: str):
6 | self.display_name = display_name
7 |
8 | def __getitem__(self, item: str):
9 | try:
10 | return getattr(self, item)
11 | except AttributeError:
12 | raise KeyError(item)
13 |
14 |
15 | class UserAccount:
16 | def __init__(self, iban: str, balance: int):
17 | self.iban = iban
18 | self.balance = balance
19 |
20 | def __getitem__(self, item: str):
21 | try:
22 | return getattr(self, item)
23 | except AttributeError:
24 | raise KeyError(item)
25 |
26 |
27 | if __name__ == "__main__":
28 | user_profile = UserProfile("John Doe")
29 | user_account = UserAccount("GB71BARC20031885581746", 3000)
30 | user = ChainMap(user_profile, user_account)
31 | print(f"name: {user['display_name']}")
32 | print(f"iban: {user['iban']}")
33 | print(f"balance: {user['balance']}")
34 |
--------------------------------------------------------------------------------
/Chapter 6/13 - A practical example of asynchronous programming/async_aiohttp.py:
--------------------------------------------------------------------------------
1 | """
2 | "Asynchronous programming" section example showing how
3 | to use aiohttp to perform asynchronous HTTP calls
4 |
5 | """
6 | import asyncio
7 | import time
8 |
9 | import aiohttp
10 |
11 | from asyncrates import get_rates
12 |
13 | SYMBOLS = ("USD", "EUR", "PLN", "NOK", "CZK")
14 | BASES = ("USD", "EUR", "PLN", "NOK", "CZK")
15 |
16 |
17 | def present_result(base, rates):
18 | rates_line = ", ".join([f"{rates[symbol]:7.03} {symbol}" for symbol in SYMBOLS])
19 | print(f"1 {base} = {rates_line}")
20 |
21 |
22 | async def main():
23 | async with aiohttp.ClientSession() as session:
24 | for result in await asyncio.gather(
25 | *[get_rates(session, base) for base in BASES]
26 | ):
27 | present_result(*result)
28 |
29 |
30 | if __name__ == "__main__":
31 | started = time.time()
32 | loop = asyncio.get_event_loop()
33 | loop.run_until_complete(main())
34 | elapsed = time.time() - started
35 |
36 | print()
37 | print("time elapsed: {:.2f}s".format(elapsed))
38 |
--------------------------------------------------------------------------------
/Chapter 7/05 - Topic-based style/topic_based_events.py:
--------------------------------------------------------------------------------
1 | import itertools
2 |
3 | from blinker import signal
4 |
5 |
6 | class SelfWatch:
7 | _new_id = itertools.count(1)
8 |
9 | def __init__(self):
10 | self._id = next(self._new_id)
11 | init_signal = signal("SelfWatch.init")
12 | init_signal.send(self)
13 | init_signal.connect(self.receiver)
14 |
15 | def receiver(self, sender):
16 | print(f"{self}: received event from {sender}")
17 |
18 | def __str__(self):
19 | return f"<{self.__class__.__name__}: {self._id}>"
20 |
21 |
22 | # >>> from topic_based_events import SelfWatch
23 | # >>> selfwatch1 = SelfWatch()
24 | # >>> selfwatch2 = SelfWatch()
25 | # : received event from
26 | # >>> selfwatch3 = SelfWatch()
27 | # : received event from
28 | # : received event from
29 | # >>> selfwatch4 = SelfWatch()
30 | # : received event from
31 | # : received event from
32 | # : received event from
33 |
--------------------------------------------------------------------------------
/Chapter 6/10 - Using process pools/process_pools.py:
--------------------------------------------------------------------------------
1 | import time
2 | from multiprocessing import Pool
3 |
4 | import requests
5 |
6 |
7 | SYMBOLS = ("USD", "EUR", "PLN", "NOK", "CZK")
8 | BASES = ("USD", "EUR", "PLN", "NOK", "CZK")
9 |
10 | POOL_SIZE = 4
11 |
12 |
13 | def fetch_rates(base):
14 | response = requests.get(f"https://api.vatcomply.com/rates?base={base}")
15 | response.raise_for_status()
16 | rates = response.json()["rates"]
17 | # note: same currency exchanges to itself 1:1
18 | rates[base] = 1.0
19 | return base, rates
20 |
21 |
22 | def present_result(base, rates):
23 | rates_line = ", ".join([f"{rates[symbol]:7.03} {symbol}" for symbol in SYMBOLS])
24 | print(f"1 {base} = {rates_line}")
25 |
26 |
27 | def main():
28 | with Pool(POOL_SIZE) as pool:
29 | results = pool.map(fetch_rates, BASES)
30 |
31 | for result in results:
32 | present_result(*result)
33 |
34 |
35 | if __name__ == "__main__":
36 | started = time.time()
37 | main()
38 | elapsed = time.time() - started
39 |
40 | print()
41 | print("time elapsed: {:.2f}s".format(elapsed))
42 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Packt
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Chapter 10/03 - Pytests fixtures/backends.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | from typing import Dict
3 | from redis import Redis
4 |
5 | from interfaces import ViewsStorageBackend
6 |
7 |
8 | class CounterBackend(ViewsStorageBackend):
9 | def __init__(self):
10 | self._counter = Counter()
11 |
12 | def increment(self, key: str):
13 | self._counter[key] += 1
14 |
15 | def most_common(self, n: int) -> Dict[str, int]:
16 | return dict(self._counter.most_common(n))
17 |
18 |
19 | class RedisBackend(ViewsStorageBackend):
20 | def __init__(self, redis_client: Redis, set_name: str):
21 | self._client = redis_client
22 | self._set_name = set_name
23 |
24 | def increment(self, key: str):
25 | self._client.zincrby(self._set_name, 1, key)
26 |
27 | def most_common(self, n: int) -> Dict[str, int]:
28 | return {
29 | key.decode(): int(value)
30 | for key, value in self._client.zrange(
31 | self._set_name,
32 | 0,
33 | n - 1,
34 | desc=True,
35 | withscores=True,
36 | )
37 | }
38 |
--------------------------------------------------------------------------------
/Chapter 12/05 - Using Prometheus/backends.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | from typing import Dict
3 | from redis import Redis
4 |
5 | from interfaces import ViewsStorageBackend
6 |
7 |
8 | class CounterBackend(ViewsStorageBackend):
9 | def __init__(self):
10 | self._counter = Counter()
11 |
12 | def increment(self, key: str):
13 | self._counter[key] += 1
14 |
15 | def most_common(self, n: int) -> Dict[str, int]:
16 | return dict(self._counter.most_common(n))
17 |
18 |
19 | class RedisBackend(ViewsStorageBackend):
20 | def __init__(self, redis_client: Redis, set_name: str):
21 | self._client = redis_client
22 | self._set_name = set_name
23 |
24 | def increment(self, key: str):
25 | self._client.zincrby(self._set_name, 1, key)
26 |
27 | def most_common(self, n: int) -> Dict[str, int]:
28 | return {
29 | key.decode(): int(value)
30 | for key, value in self._client.zrange(
31 | self._set_name,
32 | 0,
33 | n - 1,
34 | desc=True,
35 | withscores=True,
36 | )
37 | }
38 |
--------------------------------------------------------------------------------
/Chapter 4/01 - Accessing super classes/caseinsensitive.py:
--------------------------------------------------------------------------------
1 | from collections import UserDict
2 | from typing import Any
3 |
4 |
5 | class CaseInsensitiveDict(UserDict):
6 | def __setitem__(self, key: str, value: Any):
7 | return super().__setitem__(key.lower(), value)
8 |
9 | def __getitem__(self, key: str) -> Any:
10 | return super().__getitem__(key.lower())
11 |
12 | def __delitem__(self, key: str) -> None:
13 | return super().__delitem__(key.lower())
14 |
15 |
16 | class CaseInsensitiveDic(dict):
17 | def __setitem__(self, key: str, value: Any):
18 | return super().__setitem__(key.lower(), value)
19 |
20 | def __getitem__(self, key: str) -> Any:
21 | return super().__getitem__(key.lower())
22 |
23 | def __delitem__(self, key: str) -> None:
24 | return super().__delitem__(key.lower())
25 |
26 |
27 | if __name__ == "__main__":
28 | ci = CaseInsensitiveDict()
29 | ci["foo"] = "bar"
30 | ci["BIZ"] = "baz"
31 |
32 | print("FOO:", ci["FOO"])
33 | print("foo:", ci["foo"])
34 | print("biz:", ci["FOO"])
35 | print("BAZ:", ci["foo"])
36 | del ci["foo"]
37 | del ci["BIZ"]
38 |
--------------------------------------------------------------------------------
/Chapter 12/06 - Distributed tracing with Jaeger/backends.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | from typing import Dict
3 | from redis import Redis
4 |
5 | from interfaces import ViewsStorageBackend
6 |
7 |
8 | class CounterBackend(ViewsStorageBackend):
9 | def __init__(self):
10 | self._counter = Counter()
11 |
12 | def increment(self, key: str):
13 | self._counter[key] += 1
14 |
15 | def most_common(self, n: int) -> Dict[str, int]:
16 | return dict(self._counter.most_common(n))
17 |
18 |
19 | class RedisBackend(ViewsStorageBackend):
20 | def __init__(self, redis_client: Redis, set_name: str):
21 | self._client = redis_client
22 | self._set_name = set_name
23 |
24 | def increment(self, key: str):
25 | self._client.zincrby(self._set_name, 1, key)
26 |
27 | def most_common(self, n: int) -> Dict[str, int]:
28 | return {
29 | key.decode(): int(value)
30 | for key, value in self._client.zrange(
31 | self._set_name,
32 | 0,
33 | n - 1,
34 | desc=True,
35 | withscores=True,
36 | )
37 | }
38 |
--------------------------------------------------------------------------------
/Chapter 5/06 - Using dependency injection frameworks/backends.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | from typing import Dict
3 | from redis import Redis
4 |
5 | from interfaces import ViewsStorageBackend
6 |
7 |
8 | class CounterBackend(ViewsStorageBackend):
9 | def __init__(self):
10 | self._counter = Counter()
11 |
12 | def increment(self, key: str):
13 | self._counter[key] += 1
14 |
15 | def most_common(self, n: int) -> Dict[str, int]:
16 | return dict(self._counter.most_common(n))
17 |
18 |
19 | class RedisBackend(ViewsStorageBackend):
20 | def __init__(self, redis_client: Redis, set_name: str):
21 | self._client = redis_client
22 | self._set_name = set_name
23 |
24 | def increment(self, key: str):
25 | self._client.zincrby(self._set_name, 1, key)
26 |
27 | def most_common(self, n: int) -> Dict[str, int]:
28 | return {
29 | key.decode(): int(value)
30 | for key, value in self._client.zrange(
31 | self._set_name,
32 | 0,
33 | n - 1,
34 | desc=True,
35 | withscores=True,
36 | )
37 | }
38 |
--------------------------------------------------------------------------------
/Chapter 5/05 - Inversion of control in applications p.2/backends.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | from typing import Dict
3 | from redis import Redis
4 |
5 | from interfaces import ViewsStorageBackend
6 |
7 |
8 | class CounterBackend(ViewsStorageBackend):
9 | def __init__(self):
10 | self._counter = Counter()
11 |
12 | def increment(self, key: str):
13 | self._counter[key] += 1
14 |
15 | def most_common(self, n: int) -> Dict[str, int]:
16 | return dict(self._counter.most_common(n))
17 |
18 |
19 | class RedisBackend(ViewsStorageBackend):
20 | def __init__(self, redis_client: Redis, set_name: str):
21 | self._client = redis_client
22 | self._set_name = set_name
23 |
24 | def increment(self, key: str):
25 | self._client.zincrby(self._set_name, 1, key)
26 |
27 | def most_common(self, n: int) -> Dict[str, int]:
28 | return {
29 | key.decode(): int(value)
30 | for key, value in self._client.zrange(
31 | self._set_name,
32 | 0,
33 | n - 1,
34 | desc=True,
35 | withscores=True,
36 | )
37 | }
38 |
--------------------------------------------------------------------------------
/Chapter 10/06 - Test coverage/backends.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | from typing import Dict
3 | from redis import Redis
4 |
5 | from interfaces import ViewsStorageBackend
6 |
7 |
8 | class CounterBackend(ViewsStorageBackend):
9 | def __init__(self):
10 | self._counter = Counter()
11 |
12 | def increment(self, key: str):
13 | self._counter[key] += 1
14 |
15 | def most_common(self, n: int) -> Dict[str, int]:
16 | return dict(self._counter.most_common(n))
17 |
18 | def count_keys(self):
19 | return len(self._counter)
20 |
21 |
22 | class RedisBackend(ViewsStorageBackend):
23 | def __init__(self, redis_client: Redis, set_name: str):
24 | self._client = redis_client
25 | self._set_name = set_name
26 |
27 | def increment(self, key: str):
28 | self._client.zincrby(self._set_name, 1, key)
29 |
30 | def most_common(self, n: int) -> Dict[str, int]:
31 | return {
32 | key.decode(): int(value)
33 | for key, value in self._client.zrange(
34 | self._set_name,
35 | 0,
36 | n - 1,
37 | desc=True,
38 | withscores=True,
39 | )
40 | }
41 |
--------------------------------------------------------------------------------
/Chapter 8/01 - One step deeper: class decorators/autorepr.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Iterable
2 |
3 | UNSET = object()
4 |
5 |
6 | def repr_instance(instance: object, attrs: Iterable[str]):
7 | attr_values: dict[str, Any] = {
8 | attr: getattr(instance, attr, UNSET) for attr in attrs
9 | }
10 | sub_repr = ", ".join(
11 | f"{attr}={repr(val) if val is not UNSET else 'UNSET'}"
12 | for attr, val in attr_values.items()
13 | )
14 | return f"<{instance.__class__.__qualname__}: {sub_repr}>"
15 |
16 |
17 | def autorepr(cls):
18 | attrs = set.union(
19 | *(
20 | set(c.__annotations__.keys())
21 | for c in cls.mro()
22 | if hasattr(c, "__annotations__")
23 | )
24 | )
25 |
26 | def __repr__(self):
27 | return repr_instance(self, sorted(attrs))
28 |
29 | cls.__repr__ = __repr__
30 | return cls
31 |
32 |
33 | @autorepr
34 | class MyClass:
35 | attr_a: Any
36 | attr_b: Any
37 | attr_c: Any
38 |
39 | def __init__(self, a, b):
40 | self.attr_a = a
41 | self.attr_b = b
42 |
43 |
44 | if __name__ == "__main__":
45 | print(MyClass("Ultimate answer", 42))
46 | print(MyClass([1, 2, 3], ["a", "b", "c"]))
47 |
--------------------------------------------------------------------------------
/Chapter 5/02 - Using function annotations and abstract base classes/dummy_interface.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 |
4 | class DummyInterface(ABC):
5 | @abstractmethod
6 | def dummy_method(self):
7 | ...
8 |
9 | @property
10 | @abstractmethod
11 | def dummy_property(self):
12 | ...
13 |
14 |
15 | class InvalidDummy(DummyInterface):
16 | pass
17 |
18 |
19 | class MissingPropertyDummy(DummyInterface):
20 | def dummy_method(self):
21 | pass
22 |
23 |
24 | class MissingMethodDummy(DummyInterface):
25 | @property
26 | def dummy_property(self):
27 | return None
28 |
29 |
30 | class Dummy(DummyInterface):
31 | def dummy_method(self):
32 | pass
33 |
34 | @property
35 | def dummy_property(self):
36 | return None
37 |
38 |
39 | def intantiate(cls):
40 | print("instantiating", cls)
41 | try:
42 | cls()
43 | except Exception as err:
44 | print(" -", type(err), err)
45 | else:
46 | print(" - ok")
47 |
48 |
49 | if __name__ == "__main__":
50 | intantiate(DummyInterface)
51 | intantiate(InvalidDummy)
52 | intantiate(MissingMethodDummy)
53 | intantiate(MissingPropertyDummy)
54 | intantiate(Dummy)
55 |
--------------------------------------------------------------------------------
/Chapter 8/01 - One step deeper: class decorators/autorepr_subclassed.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Iterable
2 |
3 | UNSET = object()
4 |
5 |
6 | def repr_instance(instance: object, attrs: Iterable[str]):
7 | attr_values: dict[str, Any] = {
8 | attr: getattr(instance, attr, UNSET) for attr in attrs
9 | }
10 | sub_repr = ", ".join(
11 | f"{attr}={repr(val) if val is not UNSET else 'UNSET'}"
12 | for attr, val in attr_values.items()
13 | )
14 | return f"<{instance.__class__.__qualname__}: {sub_repr}>"
15 |
16 |
17 | def autorepr(cls):
18 | attrs = set.union(
19 | *(
20 | set(c.__annotations__.keys())
21 | for c in cls.mro()
22 | if hasattr(c, "__annotations__")
23 | )
24 | )
25 |
26 | class Klass(cls):
27 | def __repr__(self):
28 | return repr_instance(self, sorted(attrs))
29 |
30 | return Klass
31 |
32 |
33 | @autorepr
34 | class MyClass:
35 | attr_a: Any
36 | attr_b: Any
37 | attr_c: Any
38 |
39 | def __init__(self, a, b):
40 | self.attr_a = a
41 | self.attr_b = b
42 |
43 |
44 | if __name__ == "__main__":
45 | print(MyClass("Ultimate answer", 42))
46 | print(MyClass([1, 2, 3], ["a", "b", "c"]))
47 |
--------------------------------------------------------------------------------
/Chapter 6/03 - Using one thread per item/one_thread_per_item.py:
--------------------------------------------------------------------------------
1 | """
2 | "An example of a threaded application" section example
3 | showing how to use `threading` module in simplest
4 | one-thread-per-item fashion.
5 |
6 | """
7 | import time
8 | from threading import Thread
9 |
10 | import requests
11 |
12 | SYMBOLS = ("USD", "EUR", "PLN", "NOK", "CZK")
13 | BASES = ("USD", "EUR", "PLN", "NOK", "CZK")
14 |
15 |
16 | def fetch_rates(base):
17 | response = requests.get(f"https://api.vatcomply.com/rates?base={base}")
18 |
19 | response.raise_for_status()
20 | rates = response.json()["rates"]
21 | # note: same currency exchanges to itself 1:1
22 | rates[base] = 1.0
23 |
24 | rates_line = ", ".join([f"{rates[symbol]:7.03} {symbol}" for symbol in SYMBOLS])
25 | print(f"1 {base} = {rates_line}")
26 |
27 |
28 | def main():
29 | threads = []
30 | for base in BASES:
31 | thread = Thread(target=fetch_rates, args=[base])
32 | thread.start()
33 | threads.append(thread)
34 |
35 | while threads:
36 | threads.pop().join()
37 |
38 |
39 | if __name__ == "__main__":
40 | started = time.time()
41 | main()
42 | elapsed = time.time() - started
43 |
44 | print()
45 | print("time elapsed: {:.2f}s".format(elapsed))
46 |
--------------------------------------------------------------------------------
/Chapter 6/11 - Using multiprocessing.dummy as the multithreading interface/multiprocessing_dummy.py:
--------------------------------------------------------------------------------
1 | import time
2 | from multiprocessing.pool import Pool as ProcessPool, ThreadPool
3 |
4 | import requests
5 |
6 |
7 | SYMBOLS = ("USD", "EUR", "PLN", "NOK", "CZK")
8 | BASES = ("USD", "EUR", "PLN", "NOK", "CZK")
9 |
10 | POOL_SIZE = 4
11 |
12 |
13 | def fetch_rates(base):
14 | response = requests.get(f"https://api.vatcomply.com/rates?base={base}")
15 | response.raise_for_status()
16 | rates = response.json()["rates"]
17 | # note: same currency exchanges to itself 1:1
18 | rates[base] = 1.0
19 | return base, rates
20 |
21 |
22 | def present_result(base, rates):
23 | rates_line = ", ".join([f"{rates[symbol]:7.03} {symbol}" for symbol in SYMBOLS])
24 | print(f"1 {base} = {rates_line}")
25 |
26 |
27 | def main(use_threads=False):
28 | if use_threads:
29 | pool_cls = ThreadPool
30 | else:
31 | pool_cls = ProcessPool
32 |
33 | with pool_cls(POOL_SIZE) as pool:
34 | results = pool.map(fetch_rates, BASES)
35 |
36 | for result in results:
37 | present_result(*result)
38 |
39 |
40 | if __name__ == "__main__":
41 | started = time.time()
42 | main()
43 | elapsed = time.time() - started
44 |
45 | print()
46 | print("time elapsed: {:.2f}s".format(elapsed))
47 |
--------------------------------------------------------------------------------
/Chapter 9/02 - Pure C extensions/fibonacci.c:
--------------------------------------------------------------------------------
1 | #define PY_SSIZE_T_CLEAN
2 | #include
3 |
4 | long long fibonacci(unsigned int n) {
5 | if (n == 0) {
6 | return 0;
7 | } else if (n == 1) {
8 | return 1;
9 | } else {
10 | return fibonacci(n - 2) + fibonacci(n - 1);
11 | }
12 | }
13 |
14 | static PyObject* fibonacci_py(PyObject* self, PyObject* args) {
15 | PyObject *result = NULL;
16 | long n;
17 |
18 | if (PyArg_ParseTuple(args, "l", &n)) {
19 | result = Py_BuildValue("L", fibonacci((unsigned int)n));
20 | }
21 |
22 | return result;
23 | }
24 |
25 | static char fibonacci_docs[] =
26 | "fibonacci(n): Return nth Fibonacci sequence number "
27 | "computed recursively\n";
28 |
29 |
30 | static PyMethodDef fibonacci_module_methods[] = {
31 | {"fibonacci", (PyCFunction)fibonacci_py,
32 | METH_VARARGS, fibonacci_docs},
33 | {NULL, NULL, 0, NULL}
34 | };
35 |
36 |
37 | static struct PyModuleDef fibonacci_module_definition = {
38 | PyModuleDef_HEAD_INIT,
39 | "fibonacci",
40 | "Extension module that provides fibonacci sequence function",
41 | -1,
42 | fibonacci_module_methods
43 | };
44 |
45 |
46 | PyMODINIT_FUNC PyInit_fibonacci(void) {
47 | Py_Initialize();
48 |
49 | return PyModule_Create(&fibonacci_module_definition);
50 | }
51 |
--------------------------------------------------------------------------------
/Chapter 8/04 - Using __init__subclass__ method as alternative to metaclasses/autorepr.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Iterable
2 |
3 | UNSET = object()
4 |
5 |
6 | def repr_instance(instance: object, attrs: Iterable[str]):
7 | attr_values: dict[str, Any] = {
8 | attr: getattr(instance, attr, UNSET) for attr in attrs
9 | }
10 | sub_repr = ", ".join(
11 | f"{attr}={repr(val) if val is not UNSET else 'UNSET'}"
12 | for attr, val in attr_values.items()
13 | )
14 | return f"<{instance.__class__.__qualname__}: {sub_repr}>"
15 |
16 |
17 | def autorepr(cls):
18 | attrs = cls.__annotations__.keys()
19 |
20 | def __repr__(self):
21 | return repr_instance(self, sorted(attrs))
22 |
23 | cls.__repr__ = __repr__
24 | return cls
25 |
26 |
27 | @autorepr
28 | class MyClass:
29 | attr_a: Any
30 | attr_b: Any
31 | attr_c: Any
32 |
33 | def __init__(self, a, b):
34 | self.attr_a = a
35 | self.attr_b = b
36 |
37 |
38 | class MyChildClass(MyClass):
39 | attr_d: Any
40 |
41 | def __init__(self, a, b):
42 | super().__init__(a, b)
43 |
44 |
45 | if __name__ == "__main__":
46 | print(MyClass("Ultimate answer", 42))
47 | print(MyClass([1, 2, 3], ["a", "b", "c"]))
48 | print(MyChildClass("Ultimate answer", 42))
49 | print(MyChildClass([1, 2, 3], ["a", "b", "c"]))
50 |
--------------------------------------------------------------------------------
/Chapter 12/03 - Logging configuration/configuration_dict.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta, datetime
2 | import time
3 | import logging.config
4 | import logging.handlers
5 |
6 | import freezegun
7 |
8 | logging.config.dictConfig(
9 | {
10 | "version": 1,
11 | "formatters": {
12 | "default": {
13 | "format": (
14 | "%(asctime)s | %(levelname)s | "
15 | "%(name)s | %(filename)s:%(lineno)d | "
16 | "%(message)s"
17 | )
18 | },
19 | },
20 | "handlers": {
21 | "logfile": {
22 | "class": "logging.handlers.TimedRotatingFileHandler",
23 | "formatter": "default",
24 | "filename": "application.log",
25 | "when": "D",
26 | "backupCount": 30,
27 | }
28 | },
29 | "root": {
30 | "handlers": ["logfile"],
31 | "level": "INFO",
32 | },
33 | }
34 | )
35 |
36 | logger = logging.getLogger(__name__)
37 |
38 |
39 | def main():
40 | with freezegun.freeze_time() as frozen:
41 | while True:
42 | frozen.tick(timedelta(hours=1))
43 | time.sleep(0.1)
44 | logger.info(f"Something has happened at {datetime.now()}")
45 |
46 |
47 | if __name__ == "__main__":
48 | main()
49 |
--------------------------------------------------------------------------------
/Chapter 6/14 - Integrating non-asynchronous code with async using futures/async_futures.py:
--------------------------------------------------------------------------------
1 | """
2 | "Asynchronous programming" section example showing
3 | how to employ `futures` and threading/multiprocessing
4 | to use non-async libraries in asyncio-based applications
5 |
6 | """
7 | import asyncio
8 | import time
9 |
10 | import requests
11 |
12 | SYMBOLS = ("USD", "EUR", "PLN", "NOK", "CZK")
13 | BASES = ("USD", "EUR", "PLN", "NOK", "CZK")
14 |
15 | THREAD_POOL_SIZE = 4
16 |
17 |
18 | async def fetch_rates(base):
19 | loop = asyncio.get_event_loop()
20 | response = await loop.run_in_executor(
21 | None, requests.get, f"https://api.vatcomply.com/rates?base={base}"
22 | )
23 | response.raise_for_status()
24 | rates = response.json()["rates"]
25 | # note: same currency exchanges to itself 1:1
26 | rates[base] = 1.0
27 | return base, rates
28 |
29 |
30 | def present_result(base, rates):
31 | rates_line = ", ".join([f"{rates[symbol]:7.03} {symbol}" for symbol in SYMBOLS])
32 | print(f"1 {base} = {rates_line}")
33 |
34 |
35 | async def main():
36 | for result in await asyncio.gather(*[fetch_rates(base) for base in BASES]):
37 | present_result(*result)
38 |
39 |
40 | if __name__ == "__main__":
41 | started = time.time()
42 | loop = asyncio.get_event_loop()
43 | loop.run_until_complete(main())
44 | elapsed = time.time() - started
45 |
46 | print()
47 | print("time elapsed: {:.2f}s".format(elapsed))
48 |
--------------------------------------------------------------------------------
/Chapter 10/04 - Using fakes/backends.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | from typing import Dict
3 | from redis import Redis
4 |
5 | from interfaces import ViewsStorageBackend
6 |
7 |
8 | class CounterBackend(ViewsStorageBackend):
9 | def __init__(self):
10 | self._counter = Counter()
11 |
12 | def increment(self, key: str):
13 | self._counter[key] += 1
14 |
15 | def most_common(self, n: int) -> Dict[str, int]:
16 | return dict(self._counter.most_common(n))
17 |
18 |
19 | class RedisBackend(ViewsStorageBackend):
20 | def __init__(self, redis_client: Redis, set_name: str):
21 | self._client = redis_client
22 | self._set_name = set_name
23 |
24 | def increment(self, key: str):
25 | self._client.zincrby(self._set_name, 1, key)
26 |
27 | def most_common(self, n: int) -> Dict[str, int]:
28 | return {
29 | key.decode(): int(value)
30 | for key, value in self._client.zrange(
31 | self._set_name,
32 | 0,
33 | n - 1,
34 | desc=True,
35 | withscores=True,
36 | )
37 | }
38 |
39 |
40 | class AcmeBackend(ViewsStorageBackend):
41 | def __init__(self, acme_hashmap):
42 | self._acme = acme_hashmap
43 |
44 | def increment(self, key: str):
45 | self._acme.atomic_incr(key, 1)
46 |
47 | def most_common(self, n: int) -> Dict[str, int]:
48 | return self._acme.top_keys(n)
49 |
--------------------------------------------------------------------------------
/Chapter 10/05 - Mocks and unittest.mock module/test_mailer.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import patch, Mock
2 | from mailer import send
3 | import smtplib
4 |
5 |
6 | def test_send_unittest():
7 | sender = "john.doe@example.com"
8 | to = "jane.doe@example.com"
9 | body = "Hello jane!"
10 | subject = "How are you?"
11 |
12 | with patch("smtplib.SMTP") as mock:
13 | client = mock.return_value
14 | client.sendmail.return_value = {}
15 |
16 | res = send(sender, to, subject, body)
17 |
18 | assert client.sendmail.called
19 | assert client.sendmail.call_args[0][0] == sender
20 | assert client.sendmail.call_args[0][1] == to
21 | assert subject in client.sendmail.call_args[0][2]
22 | assert body in client.sendmail.call_args[0][2]
23 | assert res == {}
24 |
25 |
26 | def test_send(monkeypatch):
27 | sender = "john.doe@example.com"
28 | to = "jane.doe@example.com"
29 | body = "Hello jane!"
30 | subject = "How are you?"
31 |
32 | smtp = Mock()
33 | monkeypatch.setattr(smtplib, "SMTP", smtp)
34 | client = smtp.return_value
35 | client.sendmail.return_value = {}
36 |
37 | res = send(sender, to, subject, body)
38 |
39 | assert client.sendmail.called
40 | assert client.sendmail.call_args[0][0] == sender
41 | assert client.sendmail.call_args[0][1] == to
42 | assert subject in client.sendmail.call_args[0][2]
43 | assert body in client.sendmail.call_args[0][2]
44 | assert res == {}
45 |
--------------------------------------------------------------------------------
/Chapter 4/05 - Real-life example - lazily evaluated attributes/lazy_class_attribute.py:
--------------------------------------------------------------------------------
1 | import OpenGL.GL as gl
2 | from OpenGL.GL import shaders
3 |
4 |
5 | class lazy_class_attribute(object):
6 | def __init__(self, function):
7 | self.fget = function
8 |
9 | def __get__(self, obj, cls):
10 | value = self.fget(obj or cls)
11 | # note: storing in class object not its instance
12 | # no matter if its a class-level or
13 | # instance-level access
14 | setattr(cls, self.fget.__name__, value)
15 | return value
16 |
17 |
18 | class ObjectUsingShaderProgram(object):
19 | # trivial pass-through vertex shader implementation
20 | VERTEX_CODE = """
21 | #version 330 core
22 | layout(location = 0) in vec4 vertexPosition;
23 | void main(){
24 | gl_Position = vertexPosition;
25 | }
26 | """
27 | # trivial fragment shader that results in everything
28 | # drawn with white color
29 | FRAGMENT_CODE = """
30 | #version 330 core
31 | out lowp vec4 out_color;
32 | void main(){
33 | out_color = vec4(1, 1, 1, 1);
34 | }
35 | """
36 |
37 | @lazy_class_attribute
38 | def shader_program(self):
39 | print("compiling!")
40 | return shaders.compileProgram(
41 | shaders.compileShader(self.VERTEX_CODE, gl.GL_VERTEX_SHADER),
42 | shaders.compileShader(self.FRAGMENT_CODE, gl.GL_FRAGMENT_SHADER),
43 | )
44 |
--------------------------------------------------------------------------------
/Chapter 9/03 - Exception handling/fibonacci.c:
--------------------------------------------------------------------------------
1 | #define PY_SSIZE_T_CLEAN
2 | #include
3 |
4 | long long fibonacci(unsigned int n) {
5 | if (n == 0) {
6 | return 0;
7 | } else if (n == 1) {
8 | return 1;
9 | } else {
10 | return fibonacci(n - 2) + fibonacci(n - 1);
11 | }
12 | }
13 |
14 | static PyObject* fibonacci_py(PyObject* self, PyObject* args) {
15 | PyObject *result = NULL;
16 | long n;
17 | long long fib;
18 |
19 | if (PyArg_ParseTuple(args, "l", &n)) {
20 | if (n<0) {
21 | PyErr_SetString(PyExc_ValueError,
22 | "n must not be less than 0");
23 | } else {
24 | result = Py_BuildValue("L", fibonacci((unsigned int) n));
25 | }
26 | }
27 | return result;
28 | }
29 |
30 | static char fibonacci_docs[] =
31 | "fibonacci(n): Return nth Fibonacci sequence number "
32 | "computed recursively\n";
33 |
34 |
35 | static PyMethodDef fibonacci_module_methods[] = {
36 | {"fibonacci", (PyCFunction)fibonacci_py,
37 | METH_VARARGS, fibonacci_docs},
38 | {NULL, NULL, 0, NULL}
39 | };
40 |
41 |
42 | static struct PyModuleDef fibonacci_module_definition = {
43 | PyModuleDef_HEAD_INIT,
44 | "fibonacci",
45 | "Extension module that provides fibonacci sequence function",
46 | -1,
47 | fibonacci_module_methods
48 | };
49 |
50 |
51 | PyMODINIT_FUNC PyInit_fibonacci(void) {
52 | Py_Initialize();
53 |
54 | return PyModule_Create(&fibonacci_module_definition);
55 | }
56 |
--------------------------------------------------------------------------------
/Chapter 9/06 - Calling C functions using ctypes/qsort.py:
--------------------------------------------------------------------------------
1 | from random import shuffle
2 |
3 | import ctypes
4 | from ctypes.util import find_library
5 |
6 | libc = ctypes.cdll.LoadLibrary(find_library("c"))
7 |
8 | CMPFUNC = ctypes.CFUNCTYPE(
9 | # return type
10 | ctypes.c_int,
11 | # first argument type
12 | ctypes.POINTER(ctypes.c_int),
13 | # second argument type
14 | ctypes.POINTER(ctypes.c_int),
15 | )
16 |
17 |
18 | def ctypes_int_compare(a, b):
19 | # arguments are pointers so we access using [0] index
20 | print(" %s cmp %s" % (a[0], b[0]))
21 |
22 | # according to qsort specification this should return:
23 | # * less than zero if a < b
24 | # * zero if a == b
25 | # * more than zero if a > b
26 | return a[0] - b[0]
27 |
28 |
29 | def main():
30 | numbers = list(range(5))
31 | shuffle(numbers)
32 | print("shuffled: ", numbers)
33 |
34 | # create new type representing array with lenght
35 | # same as the lenght of numbers list
36 | NumbersArray = ctypes.c_int * len(numbers)
37 | # create new C array using a new type
38 | c_array = NumbersArray(*numbers)
39 |
40 | libc.qsort(
41 | # pointer to the sorted array
42 | c_array,
43 | # length of the array
44 | len(c_array),
45 | # size of single array element
46 | ctypes.sizeof(ctypes.c_int),
47 | # callback (pointer to the C comparison function)
48 | CMPFUNC(ctypes_int_compare),
49 | )
50 | print("sorted: ", list(c_array))
51 |
52 |
53 | if __name__ == "__main__":
54 | main()
55 |
--------------------------------------------------------------------------------
/Chapter 5/04 - Inversion of control in applications/tracking.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | from http import HTTPStatus
3 |
4 | from flask import Flask, request, Response
5 |
6 | storage = Counter()
7 | app = Flask(__name__)
8 |
9 | PIXEL = (
10 | b"GIF89a\x01\x00\x01\x00\x80\x00\x00\x00"
11 | b"\x00\x00\xff\xff\xff!\xf9\x04\x01\x00"
12 | b"\x00\x00\x00,\x00\x00\x00\x00\x01\x00"
13 | b"\x01\x00\x00\x02\x01D\x00;"
14 | )
15 |
16 |
17 | @app.route("/track")
18 | def track():
19 | try:
20 | referer = request.headers["Referer"]
21 | except KeyError:
22 | return Response(status=HTTPStatus.BAD_REQUEST)
23 |
24 | storage[referer] += 1
25 |
26 | return Response(
27 | PIXEL,
28 | headers={
29 | "Content-Type": "image/gif",
30 | "Expires": "Mon, 01 Jan 1990 00:00:00 GMT",
31 | "Cache-Control": "no-cache, no-store, must-revalidate",
32 | "Pragma": "no-cache",
33 | },
34 | )
35 |
36 |
37 | @app.route("/stats")
38 | def stats():
39 | return dict(storage.most_common(10))
40 |
41 |
42 | @app.route("/")
43 | def index():
44 | return """
45 |
46 |
47 |
48 | /test
49 | /stats
50 |
51 |
52 | """
53 |
54 |
55 | @app.route("/test")
56 | def test():
57 | return """
58 |
59 |
60 |
61 |
62 | """
63 |
64 |
65 | if __name__ == "__main__":
66 | app.run(host="0.0.0.0", port=8000)
67 |
--------------------------------------------------------------------------------
/Chapter 6/04 - Using a thread pool/thread_pool.py:
--------------------------------------------------------------------------------
1 | """
2 | "An example of a threaded application" section example
3 | showing how to implement simple thread pool.
4 |
5 | """
6 | import time
7 | from queue import Queue, Empty
8 | from threading import Thread
9 |
10 | import requests
11 |
12 | THREAD_POOL_SIZE = 4
13 |
14 |
15 | SYMBOLS = ("USD", "EUR", "PLN", "NOK", "CZK")
16 | BASES = ("USD", "EUR", "PLN", "NOK", "CZK")
17 |
18 |
19 | def fetch_rates(base):
20 | response = requests.get(f"https://api.vatcomply.com/rates?base={base}")
21 |
22 | response.raise_for_status()
23 | rates = response.json()["rates"]
24 | # note: same currency exchanges to itself 1:1
25 | rates[base] = 1.0
26 |
27 | rates_line = ", ".join([f"{rates[symbol]:7.03} {symbol}" for symbol in SYMBOLS])
28 | print(f"1 {base} = {rates_line}")
29 |
30 |
31 | def worker(work_queue):
32 | while not work_queue.empty():
33 | try:
34 | item = work_queue.get_nowait()
35 | except Empty:
36 | break
37 | else:
38 | fetch_rates(item)
39 | work_queue.task_done()
40 |
41 |
42 | def main():
43 | work_queue = Queue()
44 |
45 | for base in BASES:
46 | work_queue.put(base)
47 |
48 | threads = [
49 | Thread(target=worker, args=(work_queue,)) for _ in range(THREAD_POOL_SIZE)
50 | ]
51 |
52 | for thread in threads:
53 | thread.start()
54 |
55 | work_queue.join()
56 |
57 | while threads:
58 | threads.pop().join()
59 |
60 |
61 | if __name__ == "__main__":
62 | started = time.time()
63 | main()
64 | elapsed = time.time() - started
65 |
66 | print()
67 | print("time elapsed: {:.2f}s".format(elapsed))
68 |
--------------------------------------------------------------------------------
/Chapter 5/06 - Using dependency injection frameworks/tracking.py:
--------------------------------------------------------------------------------
1 | from http import HTTPStatus
2 |
3 | from flask import Flask, request, Response
4 | from flask_injector import FlaskInjector
5 |
6 | from interfaces import ViewsStorageBackend
7 | import di
8 |
9 |
10 | app = Flask(__name__)
11 |
12 | PIXEL = (
13 | b"GIF89a\x01\x00\x01\x00\x80\x00\x00\x00"
14 | b"\x00\x00\xff\xff\xff!\xf9\x04\x01\x00"
15 | b"\x00\x00\x00,\x00\x00\x00\x00\x01\x00"
16 | b"\x01\x00\x00\x02\x01D\x00;"
17 | )
18 |
19 |
20 | @app.route("/track")
21 | def track(storage: ViewsStorageBackend):
22 | try:
23 | referer = request.headers["Referer"]
24 | except KeyError:
25 | return Response(status=HTTPStatus.BAD_REQUEST)
26 |
27 | storage.increment(referer)
28 |
29 | return Response(
30 | PIXEL,
31 | headers={
32 | "Content-Type": "image/gif",
33 | "Expires": "Mon, 01 Jan 1990 00:00:00 GMT",
34 | "Cache-Control": "no-cache, no-store, must-revalidate",
35 | "Pragma": "no-cache",
36 | },
37 | )
38 |
39 |
40 | @app.route("/stats")
41 | def stats(storage: ViewsStorageBackend):
42 | return storage.most_common(10)
43 |
44 |
45 | @app.route("/")
46 | def index():
47 | return """
48 |
49 |
50 |
51 | /test
52 | /stats
53 |
54 |
55 | """
56 |
57 |
58 | @app.route("/test")
59 | def test():
60 | return """
61 |
62 |
63 |
64 |
65 | """
66 |
67 |
68 | if __name__ == "__main__":
69 | FlaskInjector(app=app, modules=[di.RedisModule()])
70 | app.run(host="0.0.0.0", port=8000)
71 |
--------------------------------------------------------------------------------
/Chapter 5/05 - Inversion of control in applications p.2/tracking.py:
--------------------------------------------------------------------------------
1 | from functools import partial
2 | from http import HTTPStatus
3 |
4 | from flask import Flask, request, Response
5 |
6 | from interfaces import ViewsStorageBackend
7 | from backends import CounterBackend, RedisBackend
8 | from redis import Redis
9 |
10 | app = Flask(__name__)
11 |
12 | PIXEL = (
13 | b"GIF89a\x01\x00\x01\x00\x80\x00\x00\x00"
14 | b"\x00\x00\xff\xff\xff!\xf9\x04\x01\x00"
15 | b"\x00\x00\x00,\x00\x00\x00\x00\x01\x00"
16 | b"\x01\x00\x00\x02\x01D\x00;"
17 | )
18 |
19 |
20 | def track(storage: ViewsStorageBackend):
21 | try:
22 | referer = request.headers["Referer"]
23 | except KeyError:
24 | return Response(status=HTTPStatus.BAD_REQUEST)
25 |
26 | storage.increment(referer)
27 |
28 | return Response(
29 | PIXEL,
30 | headers={
31 | "Content-Type": "image/gif",
32 | "Expires": "Mon, 01 Jan 1990 00:00:00 GMT",
33 | "Cache-Control": "no-cache, no-store, must-revalidate",
34 | "Pragma": "no-cache",
35 | },
36 | )
37 |
38 |
39 | def stats(storage: ViewsStorageBackend):
40 | return storage.most_common(10)
41 |
42 |
43 | @app.route("/")
44 | def index():
45 | return """
46 |
47 |
48 |
49 | /test
50 | /stats
51 |
52 |
53 | """
54 |
55 |
56 | @app.route("/test")
57 | def test():
58 | return """
59 |
60 |
61 |
62 |
63 | """
64 |
65 |
66 | if __name__ == "__main__":
67 | views_storage = RedisBackend(Redis(host="redis"), "my-stats")
68 | app.route("/track", endpoint="track")(partial(track, storage=views_storage))
69 | app.route("/stats", endpoint="stats")(partial(stats, storage=views_storage))
70 | app.run(host="0.0.0.0", port=8000)
71 |
--------------------------------------------------------------------------------
/Chapter 8/04 - Using __init__subclass__ method as alternative to metaclasses/autorepr_with_init_subclass.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Iterable
2 |
3 | UNSET = object()
4 |
5 |
6 | def repr_instance(instance: object, attrs: Iterable[str]):
7 | attr_values: dict[str, Any] = {
8 | attr: getattr(instance, attr, UNSET) for attr in attrs
9 | }
10 | sub_repr = ", ".join(
11 | f"{attr}={repr(val) if val is not UNSET else 'UNSET'}"
12 | for attr, val in attr_values.items()
13 | )
14 | return f"<{instance.__class__.__qualname__}: {sub_repr}>"
15 |
16 |
17 | def autorepr(cls):
18 | attrs = set.union(
19 | *(
20 | set(c.__annotations__.keys())
21 | for c in cls.mro()
22 | if hasattr(c, "__annotations__")
23 | )
24 | )
25 |
26 | def __repr__(self):
27 | return repr_instance(self, sorted(attrs))
28 |
29 | cls.__repr__ = __repr__
30 |
31 | def __init_subclass__(cls):
32 | autorepr(cls)
33 |
34 | cls.__init_subclass__ = classmethod(__init_subclass__)
35 |
36 | return cls
37 |
38 |
39 | @autorepr
40 | class MyClass:
41 | attr_a: Any
42 | attr_b: Any
43 | attr_c: Any
44 |
45 | def __init__(self, a, b):
46 | self.attr_a = a
47 | self.attr_b = b
48 |
49 |
50 | class MyChildClass(MyClass):
51 | attr_d: Any
52 |
53 | def __init__(self, a, b):
54 | super().__init__(a, b)
55 |
56 |
57 | class MyFurtherChildClass(MyChildClass):
58 | attr_e: Any
59 |
60 | def __init__(self, a, b):
61 | super().__init__(a, b)
62 |
63 |
64 | if __name__ == "__main__":
65 | print(MyClass("Ultimate answer", 42))
66 | print(MyClass([1, 2, 3], ["a", "b", "c"]))
67 | print(MyChildClass("Ultimate answer", 42))
68 | print(MyChildClass([1, 2, 3], ["a", "b", "c"]))
69 | print(MyFurtherChildClass("Ultimate answer", 42))
70 | print(MyFurtherChildClass([1, 2, 3], ["a", "b", "c"]))
71 |
--------------------------------------------------------------------------------
/Chapter 10/02 - Test parametrization/test_batch.py:
--------------------------------------------------------------------------------
1 | from itertools import chain
2 | import pytest
3 |
4 | from batch import batches
5 |
6 |
7 | def test_batch_on_lists():
8 | assert list(batches([1, 2, 3, 4, 5, 6], 1)) == [[1], [2], [3], [4], [5], [6]]
9 | assert list(batches([1, 2, 3, 4, 5, 6], 2)) == [[1, 2], [3, 4], [5, 6]]
10 | assert list(batches([1, 2, 3, 4, 5, 6], 3)) == [[1, 2, 3], [4, 5, 6]]
11 | assert list(batches([1, 2, 3, 4, 5, 6], 4)) == [
12 | [1, 2, 3, 4],
13 | [5, 6],
14 | ]
15 |
16 |
17 | def test_batch_with_loop():
18 | iterable = [1, 2, 3, 4, 5, 6]
19 | samples = {
20 | # even batches
21 | 1: [[1], [2], [3], [4], [5], [6]],
22 | 2: [[1, 2], [3, 4], [5, 6]],
23 | 3: [[1, 2, 3], [4, 5, 6]],
24 | # batches with rest
25 | 4: [[1, 2, 3, 4], [5, 6]],
26 | }
27 |
28 | for batch_size, expected in samples.items():
29 | assert list(batches(iterable, batch_size)) == expected
30 |
31 |
32 | @pytest.mark.parametrize(
33 | "batch_size, expected",
34 | [
35 | # even batches
36 | [1, [[1], [2], [3], [4], [5], [6]]],
37 | [2, [[1, 2], [3, 4], [5, 6]]],
38 | [3, [[1, 2, 3], [4, 5, 6]]],
39 | # batches with rest
40 | [4, [[1, 2, 3, 4], [5, 6]]],
41 | ],
42 | )
43 | def test_batch_parametrized(batch_size, expected):
44 | iterable = [1, 2, 3, 4, 5, 6]
45 | assert list(batches(iterable, batch_size)) == expected
46 |
47 |
48 | def test_batch_order():
49 | iterable = range(100)
50 | batch_size = 2
51 |
52 | output = batches(iterable, batch_size)
53 |
54 | assert list(chain.from_iterable(output)) == list(iterable)
55 |
56 |
57 | def test_batch_sizes():
58 | iterable = range(100)
59 | batch_size = 2
60 |
61 | output = list(batches(iterable, batch_size))
62 |
63 | for batch in output[:-1]:
64 | assert len(batch) == batch_size
65 | assert len(output[-1]) <= batch_size
66 |
--------------------------------------------------------------------------------
/Chapter 6/05 - Using two-way queues/two_way_queues.py:
--------------------------------------------------------------------------------
1 | """
2 | "An example of a threaded application" section example
3 | showing how to use two-way queues as inter-thread
4 | communication method.
5 |
6 | """
7 | import time
8 | from queue import Queue, Empty
9 | from threading import Thread
10 |
11 | import requests
12 |
13 |
14 | SYMBOLS = ("USD", "EUR", "PLN", "NOK", "CZK")
15 | BASES = ("USD", "EUR", "PLN", "NOK", "CZK")
16 |
17 | THREAD_POOL_SIZE = 4
18 |
19 |
20 | def fetch_rates(base):
21 | response = requests.get(f"https://api.vatcomply.com/rates?base={base}")
22 |
23 | response.raise_for_status()
24 | rates = response.json()["rates"]
25 | # note: same currency exchanges to itself 1:1
26 | rates[base] = 1.0
27 | return base, rates
28 |
29 |
30 | def present_result(base, rates):
31 | rates_line = ", ".join([f"{rates[symbol]:7.03} {symbol}" for symbol in SYMBOLS])
32 | print(f"1 {base} = {rates_line}")
33 |
34 |
35 | def worker(work_queue, results_queue):
36 | while not work_queue.empty():
37 | try:
38 | item = work_queue.get_nowait()
39 | except Empty:
40 | break
41 | else:
42 | results_queue.put(fetch_rates(item))
43 | work_queue.task_done()
44 |
45 |
46 | def main():
47 | work_queue = Queue()
48 | results_queue = Queue()
49 |
50 | for base in BASES:
51 | work_queue.put(base)
52 |
53 | threads = [
54 | Thread(target=worker, args=(work_queue, results_queue))
55 | for _ in range(THREAD_POOL_SIZE)
56 | ]
57 |
58 | for thread in threads:
59 | thread.start()
60 |
61 | work_queue.join()
62 |
63 | while threads:
64 | threads.pop().join()
65 |
66 | while not results_queue.empty():
67 | present_result(*results_queue.get())
68 |
69 |
70 | if __name__ == "__main__":
71 | started = time.time()
72 | main()
73 | elapsed = time.time() - started
74 |
75 | print()
76 | print("time elapsed: {:.2f}s".format(elapsed))
77 |
--------------------------------------------------------------------------------
/Chapter 5/01 - A bit of history: zope.interface/colliders_simple.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | from dataclasses import dataclass
3 |
4 |
5 | def rects_collide(rect1, rect2):
6 | """Check collision between rectangles
7 |
8 | Rectangle coordinates:
9 | ┌───(x2, y2)
10 | │ │
11 | (x1, y1)──┘
12 | """
13 | return (
14 | rect1.x1 < rect2.x2
15 | and rect1.x2 > rect2.x1
16 | and rect1.y1 < rect2.y2
17 | and rect1.y2 > rect2.y1
18 | )
19 |
20 |
21 | def find_collisions(objects):
22 | return [
23 | (item1, item2)
24 | for item1, item2 in itertools.combinations(objects, 2)
25 | if rects_collide(item1.bounding_box, item2.bounding_box)
26 | ]
27 |
28 |
29 | @dataclass
30 | class Box:
31 | x1: float
32 | y1: float
33 | x2: float
34 | y2: float
35 |
36 |
37 | @dataclass
38 | class Square:
39 | x: float
40 | y: float
41 | size: float
42 |
43 | @property
44 | def bounding_box(self):
45 | return Box(self.x, self.y, self.x + self.size, self.y + self.size)
46 |
47 |
48 | @dataclass
49 | class Rect:
50 | x: float
51 | y: float
52 | width: float
53 | height: float
54 |
55 | @property
56 | def bounding_box(self):
57 | return Box(self.x, self.y, self.x + self.width, self.y + self.height)
58 |
59 |
60 | @dataclass
61 | class Circle:
62 | x: float
63 | y: float
64 | radius: float
65 |
66 | @property
67 | def bounding_box(self):
68 | return Box(
69 | self.x - self.radius,
70 | self.y - self.radius,
71 | self.x + self.radius,
72 | self.y + self.radius,
73 | )
74 |
75 |
76 | @dataclass
77 | class Point:
78 | x: float
79 | y: float
80 |
81 |
82 | if __name__ == "__main__":
83 | for collision in find_collisions(
84 | [
85 | Square(0, 0, 10),
86 | Rect(5, 5, 20, 20),
87 | Square(15, 20, 5),
88 | Circle(1, 1, 2),
89 | Point(100, 200),
90 | ]
91 | ):
92 | print(collision)
93 |
--------------------------------------------------------------------------------
/Chapter 10/03 - Pytests fixtures/test_backends.py:
--------------------------------------------------------------------------------
1 | import random
2 |
3 | import pytest
4 | from redis import Redis
5 |
6 | from backends import RedisBackend, CounterBackend
7 | from interfaces import ViewsStorageBackend
8 |
9 |
10 | @pytest.fixture
11 | def counter_backend():
12 | return CounterBackend()
13 |
14 |
15 | @pytest.fixture(scope="session")
16 | def redis_client():
17 | return Redis(host="localhost", port=6379)
18 |
19 |
20 | @pytest.fixture
21 | def redis_backend(redis_client: Redis):
22 | set_name = "test-page-counts"
23 | redis_client.delete(set_name)
24 |
25 | return RedisBackend(redis_client=redis_client, set_name=set_name)
26 |
27 |
28 | @pytest.fixture(params=["redis_backend", "counter_backend"])
29 | def backend(request):
30 | return request.getfixturevalue(request.param)
31 |
32 |
33 | @pytest.mark.parametrize("n", [0] + [random.randint(0, 100) for _ in range(5)])
34 | def test_empty_backend(backend: ViewsStorageBackend, n: int):
35 | assert backend.most_common(n) == {}
36 |
37 |
38 | def test_increments_all(backend: ViewsStorageBackend):
39 | increments = {
40 | "key_a": random.randint(1, 10),
41 | "key_b": random.randint(1, 10),
42 | "key_c": random.randint(1, 10),
43 | }
44 |
45 | for key, count in increments.items():
46 | for _ in range(count):
47 | backend.increment(key)
48 |
49 | assert backend.most_common(len(increments)) == increments
50 | assert backend.most_common(len(increments) + 1) == increments
51 |
52 |
53 | def test_increments_top(backend: ViewsStorageBackend):
54 | increments = {
55 | "key_a": random.randint(1, 10),
56 | "key_b": random.randint(1, 10),
57 | "key_c": random.randint(1, 10),
58 | "key_d": random.randint(1, 10),
59 | }
60 |
61 | for key, count in increments.items():
62 | for _ in range(count):
63 | backend.increment(key)
64 |
65 | assert len(backend.most_common(1)) == 1
66 | assert len(backend.most_common(2)) == 2
67 | assert len(backend.most_common(3)) == 3
68 |
69 | top2_values = backend.most_common(2).values()
70 | assert list(top2_values) == (sorted(increments.values(), reverse=True)[:2])
71 |
--------------------------------------------------------------------------------
/Chapter 10/06 - Test coverage/test_backends.py:
--------------------------------------------------------------------------------
1 | import random
2 |
3 | import pytest
4 | from redis import Redis
5 |
6 | from backends import RedisBackend, CounterBackend
7 | from interfaces import ViewsStorageBackend
8 |
9 |
10 | @pytest.fixture
11 | def counter_backend():
12 | return CounterBackend()
13 |
14 |
15 | @pytest.fixture(scope="session")
16 | def redis_client():
17 | return Redis(host="localhost", port=6379)
18 |
19 |
20 | @pytest.fixture
21 | def redis_backend(redis_client: Redis):
22 | set_name = "test-page-counts"
23 | redis_client.delete(set_name)
24 |
25 | return RedisBackend(redis_client=redis_client, set_name=set_name)
26 |
27 |
28 | @pytest.fixture(params=["redis_backend", "counter_backend"])
29 | def backend(request):
30 | return request.getfixturevalue(request.param)
31 |
32 |
33 | @pytest.mark.parametrize("n", [0] + [random.randint(0, 100) for _ in range(5)])
34 | def test_empty_backend(backend: ViewsStorageBackend, n: int):
35 | assert backend.most_common(n) == {}
36 |
37 |
38 | def test_increments_all(backend: ViewsStorageBackend):
39 | increments = {
40 | "key_a": random.randint(1, 10),
41 | "key_b": random.randint(1, 10),
42 | "key_c": random.randint(1, 10),
43 | }
44 |
45 | for key, count in increments.items():
46 | for _ in range(count):
47 | backend.increment(key)
48 |
49 | assert backend.most_common(len(increments)) == increments
50 | assert backend.most_common(len(increments) + 1) == increments
51 |
52 |
53 | def test_increments_top(backend: ViewsStorageBackend):
54 | increments = {
55 | "key_a": random.randint(1, 10),
56 | "key_b": random.randint(1, 10),
57 | "key_c": random.randint(1, 10),
58 | "key_d": random.randint(1, 10),
59 | }
60 |
61 | for key, count in increments.items():
62 | for _ in range(count):
63 | backend.increment(key)
64 |
65 | assert len(backend.most_common(1)) == 1
66 | assert len(backend.most_common(2)) == 2
67 | assert len(backend.most_common(3)) == 3
68 |
69 | top2_values = backend.most_common(2).values()
70 | assert list(top2_values) == (sorted(increments.values(), reverse=True)[:2])
71 |
--------------------------------------------------------------------------------
/Chapter 12/05 - Using Prometheus/tracking.py:
--------------------------------------------------------------------------------
1 | from http import HTTPStatus
2 |
3 | from flask import Flask, request, Response
4 | from flask_injector import FlaskInjector
5 | from prometheus_client import Summary, Gauge, Info, make_wsgi_app
6 | from werkzeug.middleware.dispatcher import DispatcherMiddleware
7 |
8 | from interfaces import ViewsStorageBackend
9 | import di
10 |
11 |
12 | app = Flask(__name__)
13 |
14 | PIXEL = (
15 | b"GIF89a\x01\x00\x01\x00\x80\x00\x00\x00"
16 | b"\x00\x00\xff\xff\xff!\xf9\x04\x01\x00"
17 | b"\x00\x00\x00,\x00\x00\x00\x00\x01\x00"
18 | b"\x01\x00\x00\x02\x01D\x00;"
19 | )
20 |
21 | REQUEST_TIME = Summary("request_processing_seconds", "Time spent processing requests")
22 | AVERAGE_TOP_HITS = Gauge("average_top_hits", "Average number of top-10 page counts ")
23 | TOP_PAGE = Info("top_page", "Most popular referrer")
24 |
25 |
26 | @app.route("/track")
27 | @REQUEST_TIME.time()
28 | def track(storage: ViewsStorageBackend):
29 | try:
30 | referer = request.headers["Referer"]
31 | except KeyError:
32 | return Response(status=HTTPStatus.BAD_REQUEST)
33 |
34 | storage.increment(referer)
35 |
36 | return Response(
37 | PIXEL,
38 | headers={
39 | "Content-Type": "image/gif",
40 | "Expires": "Mon, 01 Jan 1990 00:00:00 GMT",
41 | "Cache-Control": "no-cache, no-store, must-revalidate",
42 | "Pragma": "no-cache",
43 | },
44 | )
45 |
46 |
47 | @app.route("/stats")
48 | @REQUEST_TIME.time()
49 | def stats(storage: ViewsStorageBackend):
50 | counts: dict[str, int] = storage.most_common(10)
51 |
52 | AVERAGE_TOP_HITS.set(sum(counts.values()) / len(counts) if counts else 0)
53 | TOP_PAGE.info({"top": max(counts, default="n/a", key=lambda x: counts[x])})
54 |
55 | return counts
56 |
57 |
58 | @app.route("/test")
59 | @REQUEST_TIME.time()
60 | def test():
61 | return """
62 |
63 |
64 |
65 |
66 | """
67 |
68 |
69 | FlaskInjector(app=app, modules=[di.RedisModule()])
70 | app.wsgi_app = DispatcherMiddleware(app.wsgi_app, {"/metrics": make_wsgi_app()})
71 |
72 | if __name__ == "__main__":
73 | app.run(host="0.0.0.0", port=8000)
74 |
--------------------------------------------------------------------------------
/Chapter 4/02 - Multiple iheritance and method resolution order/C3.txt:
--------------------------------------------------------------------------------
1 | L[MyClass]
2 | = [MyClass] + merge(L[Base1], L[Base2], [Base1, Base2])]
3 | = [MyClass] + merge(
4 | [Base1 + merge(L[CommonBase], [CommonBase])],
5 | [Base2 + merge(L[CommonBase], [CommonBase])],
6 | [Base1, Base2]
7 | )
8 | = [MyClass] + merge(
9 | [Base1] + merge(L[CommonBase], [CommonBase]),
10 | [Base2] + merge(L[CommonBase], [CommonBase]),
11 | [Base1, Base2]
12 | )
13 | = [MyClass] + merge(
14 | [Base1] + merge([CommonBase] + merge(L[object]), [CommonBase]),
15 | [Base2] + merge([CommonBase] + merge(L[object]), [CommonBase]),
16 | [Base1, Base2]
17 | )
18 | = [MyClass] + merge(
19 | [Base1] + merge([CommonBase] + merge([object]), [CommonBase]),
20 | [Base2] + merge([CommonBase] + merge([object]), [CommonBase]),
21 | [Base1, Base2]
22 | )
23 | = [MyClass] + merge(
24 | [Base1] + merge([CommonBase] + merge([object]), [CommonBase]),
25 | [Base2] + merge([CommonBase] + merge([object]), [CommonBase]),
26 | [Base1, Base2]
27 | )
28 | = [MyClass] + merge(
29 | [Base1] + merge([CommonBase, object] + merge([]), [CommonBase]),
30 | [Base2] + merge([CommonBase, object] + merge([]), [CommonBase]),
31 | [Base1, Base2]
32 | )
33 | = [MyClass] + merge(
34 | [Base1] + merge([CommonBase, object], [CommonBase]),
35 | [Base2] + merge([CommonBase, object], [CommonBase]),
36 | [Base1, Base2]
37 | )
38 | = [MyClass] + merge(
39 | [Base1, CommonBase] + merge([object], []),
40 | [Base2, CommonBase] + merge([object], []),
41 | [Base1, Base2]
42 | )
43 | = [MyClass] + merge(
44 | [Base1, CommonBase, object] + merge([], [])
45 | [Base2, CommonBase, object] + merge([], [])
46 | [Base1, Base2]
47 | )
48 | = [MyClass] + merge(
49 | [Base1, CommonBase, object],
50 | [Base2, CommonBase, object],
51 | [Base1, Base2]
52 | )
53 | = [MyClass, Base1] + merge(
54 | [CommonBase, object],
55 | [Base2, CommonBase, object],
56 | [Base2]
57 | )
58 | = [MyClass, Base1, Base2] + merge(
59 | [CommonBase, object],
60 | [CommonBase, object],
61 | []
62 | )
63 | = [MyClass, Base1, Base2, CommonBase] + merge(
64 | [object],
65 | [object],
66 | []
67 | )
68 | = [MyClass, Base1, Base2, CommonBase, object] + merge([], [], [])
69 |
--------------------------------------------------------------------------------
/Chapter 6/06 - Dealing with errors in threads/error_handling.py:
--------------------------------------------------------------------------------
1 | """
2 | "An example of a threaded application" section example
3 | showing how throttling / rate limiting can be implemented
4 | in multithreaded application
5 |
6 | """
7 | import random
8 | import time
9 | from queue import Queue, Empty
10 | from threading import Thread
11 |
12 | import requests
13 |
14 |
15 | SYMBOLS = ("USD", "EUR", "PLN", "NOK", "CZK")
16 | BASES = ("USD", "EUR", "PLN", "NOK", "CZK")
17 |
18 | THREAD_POOL_SIZE = 4
19 |
20 |
21 | def fetch_rates(base):
22 | response = requests.get(f"https://api.vatcomply.com/rates?base={base}")
23 |
24 | if random.randint(0, 5) < 1:
25 | # simulate error by overriding status code
26 | response.status_code = 500
27 |
28 | response.raise_for_status()
29 | rates = response.json()["rates"]
30 | # note: same currency exchanges to itself 1:1
31 | rates[base] = 1.0
32 | return base, rates
33 |
34 |
35 | def present_result(base, rates):
36 | rates_line = ", ".join([f"{rates[symbol]:7.03} {symbol}" for symbol in SYMBOLS])
37 | print(f"1 {base} = {rates_line}")
38 |
39 |
40 | def worker(work_queue, results_queue):
41 | while not work_queue.empty():
42 | try:
43 | item = work_queue.get_nowait()
44 | except Empty:
45 | break
46 | try:
47 | result = fetch_rates(item)
48 | except Exception as err:
49 | results_queue.put(err)
50 | else:
51 | results_queue.put(result)
52 | finally:
53 | work_queue.task_done()
54 |
55 |
56 | def main():
57 | work_queue = Queue()
58 | results_queue = Queue()
59 |
60 | for base in BASES:
61 | work_queue.put(base)
62 |
63 | threads = [
64 | Thread(target=worker, args=(work_queue, results_queue))
65 | for _ in range(THREAD_POOL_SIZE)
66 | ]
67 |
68 | for thread in threads:
69 | thread.start()
70 |
71 | work_queue.join()
72 |
73 | while threads:
74 | threads.pop().join()
75 |
76 | while not results_queue.empty():
77 | result = results_queue.get()
78 | if isinstance(result, Exception):
79 | raise result
80 |
81 | present_result(*result)
82 |
83 |
84 | if __name__ == "__main__":
85 | started = time.time()
86 | main()
87 | elapsed = time.time() - started
88 |
89 | print()
90 | print("time elapsed: {:.2f}s".format(elapsed))
91 |
--------------------------------------------------------------------------------
/Chapter 7/04 - Subject-based style/observers.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import re
4 | from abc import ABC, abstractmethod
5 | from glob import glob
6 |
7 |
8 | class ObserverABC(ABC):
9 | @abstractmethod
10 | def notify(self, event):
11 | ...
12 |
13 |
14 | class SubjectABC(ABC):
15 | @abstractmethod
16 | def register(self, observer: ObserverABC):
17 | ...
18 |
19 |
20 | class Grepper(SubjectABC):
21 | _observers: list[ObserverABC]
22 |
23 | def __init__(self):
24 | self._observers = []
25 |
26 | def register(self, observer: ObserverABC):
27 | self._observers.append(observer)
28 |
29 | def notify_observers(self, path):
30 | for observer in self._observers:
31 | observer.notify(path)
32 |
33 | def grep(self, path: str, pattern: str):
34 | r = re.compile(pattern)
35 |
36 | for item in glob(path, recursive=True):
37 | if not os.path.isfile(item):
38 | continue
39 |
40 | try:
41 | with open(item) as f:
42 | self.notify_observers(("opened", item))
43 | if r.findall(f.read()):
44 | self.notify_observers(("matched", item))
45 | finally:
46 | self.notify_observers(("closed", item))
47 |
48 |
49 | class Presenter(ObserverABC):
50 | def notify(self, event):
51 | event_type, file = event
52 | if event_type == "matched":
53 | print(f"Found in: {file}")
54 |
55 |
56 | class Auditor(ObserverABC):
57 | def notify(self, event):
58 | event_type, file = event
59 | print(f"{event_type:8}: {file}")
60 |
61 |
62 | if __name__ == "__main__":
63 | if len(sys.argv) != 3:
64 | print("usage: program PATH PATTERN")
65 | sys.exit(1)
66 |
67 | grepper = Grepper()
68 | grepper.register(Presenter())
69 | grepper.grep(sys.argv[1], sys.argv[2])
70 |
71 |
72 | # >>> from subject_based_events import Subject
73 | # >>> subject = Subject()
74 | # >>> observer1 = Observer()
75 | # >>> observer2 = Observer()
76 | # >>> observer3 = Observer()
77 | # >>> subject.register(observer1)
78 | # >>> subject.register(observer2)
79 | # : received event 'register()' from
80 | # >>> subject.register(observer3)
81 | # : received event 'register()' from
82 | # : received event 'register()' from
83 |
--------------------------------------------------------------------------------
/Chapter 10/04 - Using fakes/test_backends.py:
--------------------------------------------------------------------------------
1 | import random
2 |
3 | import pytest
4 | from redis import Redis
5 |
6 | from backends import RedisBackend, CounterBackend, AcmeBackend
7 | from interfaces import ViewsStorageBackend
8 | from acme_fakes import AcmeHashMapFake
9 |
10 |
11 | @pytest.fixture
12 | def counter_backend():
13 | return CounterBackend()
14 |
15 |
16 | @pytest.fixture(scope="session")
17 | def redis_client():
18 | return Redis(host="localhost", port=6379)
19 |
20 |
21 | @pytest.fixture
22 | def redis_backend(redis_client: Redis):
23 | set_name = "test-page-counts"
24 | redis_client.delete(set_name)
25 |
26 | return RedisBackend(redis_client=redis_client, set_name=set_name)
27 |
28 |
29 | @pytest.fixture
30 | def acme_client():
31 | return AcmeHashMapFake()
32 |
33 |
34 | @pytest.fixture
35 | def acme_client():
36 | return AcmeHashMapFake()
37 |
38 |
39 | @pytest.fixture
40 | def acme_backend(acme_client):
41 | return AcmeBackend(acme_client)
42 |
43 |
44 | @pytest.fixture(params=["redis_backend", "counter_backend", "acme_backend"])
45 | def backend(request):
46 | return request.getfixturevalue(request.param)
47 |
48 |
49 | @pytest.mark.parametrize("n", [0] + [random.randint(0, 100) for _ in range(5)])
50 | def test_empty_backend(backend: ViewsStorageBackend, n: int):
51 | assert backend.most_common(n) == {}
52 |
53 |
54 | def test_increments_all(backend: ViewsStorageBackend):
55 | increments = {
56 | "key_a": random.randint(1, 10),
57 | "key_b": random.randint(1, 10),
58 | "key_c": random.randint(1, 10),
59 | }
60 |
61 | for key, count in increments.items():
62 | for _ in range(count):
63 | backend.increment(key)
64 |
65 | assert backend.most_common(len(increments)) == increments
66 | assert backend.most_common(len(increments) + 1) == increments
67 |
68 |
69 | def test_increments_top(backend: ViewsStorageBackend):
70 | increments = {
71 | "key_a": random.randint(1, 10),
72 | "key_b": random.randint(1, 10),
73 | "key_c": random.randint(1, 10),
74 | "key_d": random.randint(1, 10),
75 | }
76 |
77 | for key, count in increments.items():
78 | for _ in range(count):
79 | backend.increment(key)
80 |
81 | assert len(backend.most_common(1)) == 1
82 | assert len(backend.most_common(2)) == 2
83 | assert len(backend.most_common(3)) == 3
84 |
85 | top2_values = backend.most_common(2).values()
86 | assert list(top2_values) == (sorted(increments.values(), reverse=True)[:2])
87 |
--------------------------------------------------------------------------------
/Chapter 4/06 - Dunder methods/matrices.py:
--------------------------------------------------------------------------------
1 | class Matrix:
2 | def __init__(self, rows):
3 | if len(set(len(row) for row in rows)) > 1:
4 | raise ValueError("All matrix rows must be the same length")
5 |
6 | self.rows = rows
7 |
8 | def __add__(self, other):
9 | if len(self.rows) != len(other.rows) or len(self.rows[0]) != len(other.rows[0]):
10 | raise ValueError("Matrix dimensions don't match")
11 |
12 | return Matrix(
13 | [
14 | [a + b for a, b in zip(a_row, b_row)]
15 | for a_row, b_row in zip(self.rows, other.rows)
16 | ]
17 | )
18 |
19 | def __sub__(self, other):
20 | if len(self.rows) != len(other.rows) or len(self.rows[0]) != len(other.rows[0]):
21 | raise ValueError("Matrix dimensions don't match")
22 |
23 | return Matrix(
24 | [
25 | [a - b for a, b in zip(a_row, b_row)]
26 | for a_row, b_row in zip(self.rows, other.rows)
27 | ]
28 | )
29 |
30 | def __mul__(self, other):
31 | if not isinstance(other, Matrix):
32 | raise TypeError(f"Don't know how to multiply {type(other)} with Matrix")
33 |
34 | if len(self.rows[0]) != len(other.rows):
35 | raise ValueError("Matrix dimensions don't match")
36 |
37 | rows = [[0 for _ in other.rows[0]] for _ in self.rows]
38 |
39 | for i in range(len(self.rows)):
40 | for j in range(len(other.rows[0])):
41 | for k in range(len(other.rows)):
42 | rows[i][j] += self.rows[i][k] * other.rows[k][j]
43 |
44 | return Matrix(rows)
45 |
46 | def __repr__(self):
47 | return "\n".join(str(row) for row in self.rows)
48 |
49 |
50 | if __name__ == "__main__":
51 | m0 = Matrix(
52 | [
53 | [1, 0, 0],
54 | [0, 1, 0],
55 | [0, 0, 1],
56 | ]
57 | )
58 | m1 = Matrix(
59 | [
60 | [1, 2, 3],
61 | [4, 1, 4],
62 | [5, 7, 9],
63 | ]
64 | )
65 | assert (m1 * m0).rows == m1.rows
66 |
67 | m2 = Matrix(
68 | [
69 | [1, 2, 3],
70 | [1, 4, 3],
71 | [1, 0, 5],
72 | ]
73 | )
74 | assert (m2 * m0).rows == m2.rows
75 |
76 | m3 = m1 + m2
77 | m4 = m1 - m2
78 | m5 = m1 * m2
79 |
80 | m6 = Matrix(
81 | [
82 | [1, 2, 4, 3],
83 | [3, 4, 1, 5],
84 | [1, 6, 1, 3],
85 | ]
86 | )
87 |
--------------------------------------------------------------------------------
/Chapter 12/06 - Distributed tracing with Jaeger/tracking.py:
--------------------------------------------------------------------------------
1 | from http import HTTPStatus
2 |
3 | from flask import Flask, request, Response
4 | from flask_injector import FlaskInjector
5 | from flask_opentracing import FlaskTracing
6 | from jaeger_client import Config
7 | from prometheus_client import Summary, Gauge, Info, make_wsgi_app
8 | from werkzeug.middleware.dispatcher import DispatcherMiddleware
9 | import redis_opentracing
10 |
11 | from interfaces import ViewsStorageBackend
12 | import di
13 |
14 | app = Flask(__name__)
15 | tracer = Config(
16 | config={"sampler": {"type": "const", "param": 1}},
17 | service_name="pixel-tracking",
18 | ).initialize_tracer()
19 |
20 | redis_opentracing.init_tracing(tracer)
21 | FlaskTracing(tracer, app=app)
22 |
23 | PIXEL = (
24 | b"GIF89a\x01\x00\x01\x00\x80\x00\x00\x00"
25 | b"\x00\x00\xff\xff\xff!\xf9\x04\x01\x00"
26 | b"\x00\x00\x00,\x00\x00\x00\x00\x01\x00"
27 | b"\x01\x00\x00\x02\x01D\x00;"
28 | )
29 |
30 | REQUEST_TIME = Summary("request_processing_seconds", "Time spent processing requests")
31 | AVERAGE_TOP_HITS = Gauge("average_top_hits", "Average number of top-10 page counts ")
32 | TOP_PAGE = Info("top_page", "Most popular referrer")
33 |
34 |
35 | @app.route("/track")
36 | @REQUEST_TIME.time()
37 | def track(storage: ViewsStorageBackend):
38 | try:
39 | referer = request.headers["Referer"]
40 | except KeyError:
41 | return Response(status=HTTPStatus.BAD_REQUEST)
42 |
43 | storage.increment(referer)
44 |
45 | return Response(
46 | PIXEL,
47 | headers={
48 | "Content-Type": "image/gif",
49 | "Expires": "Mon, 01 Jan 1990 00:00:00 GMT",
50 | "Cache-Control": "no-cache, no-store, must-revalidate",
51 | "Pragma": "no-cache",
52 | },
53 | )
54 |
55 |
56 | @app.route("/stats")
57 | @REQUEST_TIME.time()
58 | def stats(storage: ViewsStorageBackend):
59 | with tracer.start_span("storage-query"):
60 | counts: dict[str, int] = storage.most_common(10)
61 |
62 | AVERAGE_TOP_HITS.set(sum(counts.values()) / len(counts) if counts else 0)
63 | TOP_PAGE.info({"top": max(counts, default="n/a", key=lambda x: counts[x])})
64 |
65 | return counts
66 |
67 |
68 | @app.route("/test")
69 | @REQUEST_TIME.time()
70 | def test():
71 | return """
72 |
73 |
74 |
75 |
76 | """
77 |
78 |
79 | FlaskInjector(app=app, modules=[di.RedisModule()])
80 | app.wsgi_app = DispatcherMiddleware(app.wsgi_app, {"/metrics": make_wsgi_app()})
81 |
82 | if __name__ == "__main__":
83 | app.run(host="0.0.0.0", port=8000)
84 |
--------------------------------------------------------------------------------
/Chapter 5/01 - A bit of history: zope.interface/colliders_interfaces.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | from dataclasses import dataclass
3 |
4 | from zope.interface import Interface, Attribute, implementer
5 | from zope.interface.verify import verifyObject
6 |
7 |
8 | def rects_collide(rect1, rect2):
9 | """Check collision between rectangles
10 |
11 | Rectangle coordinates:
12 | ┌───(x2, y2)
13 | │ │
14 | (x1, y1)──┘
15 | """
16 | return (
17 | rect1.x1 < rect2.x2
18 | and rect1.x2 > rect2.x1
19 | and rect1.y1 < rect2.y2
20 | and rect1.y2 > rect2.y1
21 | )
22 |
23 |
24 | def find_collisions(objects):
25 | for item in objects:
26 | verifyObject(ICollidable, item)
27 |
28 | return [
29 | (item1, item2)
30 | for item1, item2 in itertools.combinations(objects, 2)
31 | if rects_collide(item1.bounding_box, item2.bounding_box)
32 | ]
33 |
34 |
35 | class ICollidable(Interface):
36 | bounding_box = Attribute("Object's bounding box")
37 |
38 |
39 | @dataclass
40 | class Box:
41 | x1: float
42 | y1: float
43 | x2: float
44 | y2: float
45 |
46 |
47 | @implementer(ICollidable)
48 | @dataclass
49 | class Square:
50 | x: float
51 | y: float
52 | size: float
53 |
54 | @property
55 | def bounding_box(self):
56 | return Box(self.x, self.y, self.x + self.size, self.y + self.size)
57 |
58 |
59 | @implementer(ICollidable)
60 | @dataclass
61 | class Rect:
62 | x: float
63 | y: float
64 | width: float
65 | height: float
66 |
67 | @property
68 | def bounding_box(self):
69 | return Box(self.x, self.y, self.x + self.width, self.y + self.height)
70 |
71 |
72 | @implementer(ICollidable)
73 | @dataclass
74 | class Circle:
75 | x: float
76 | y: float
77 | radius: float
78 |
79 | @property
80 | def bounding_box(self):
81 | return Box(
82 | self.x - self.radius,
83 | self.y - self.radius,
84 | self.x + self.radius,
85 | self.y + self.radius,
86 | )
87 |
88 |
89 | @dataclass
90 | class Point:
91 | x: float
92 | y: float
93 |
94 |
95 | if __name__ == "__main__":
96 | print("Valid attempt:")
97 | for collision in find_collisions(
98 | [
99 | Square(0, 0, 10),
100 | Rect(5, 5, 20, 20),
101 | Square(15, 20, 5),
102 | Circle(1, 1, 2),
103 | ]
104 | ):
105 | print(collision)
106 |
107 | print("Invalid attempt")
108 | for collision in find_collisions(
109 | [
110 | Square(0, 0, 10),
111 | Rect(5, 5, 20, 20),
112 | Square(15, 20, 5),
113 | Circle(1, 1, 2),
114 | Point(100, 200),
115 | ]
116 | ):
117 | print(collision)
118 |
--------------------------------------------------------------------------------
/Chapter 5/02 - Using function annotations and abstract base classes/colliders_abc.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | from abc import ABC, abstractmethod
3 | from dataclasses import dataclass
4 |
5 |
6 | def rects_collide(rect1, rect2):
7 | """Check collision between rectangles
8 |
9 | Rectangle coordinates:
10 | ┌───(x2, y2)
11 | │ │
12 | (x1, y1)──┘
13 | """
14 | return (
15 | rect1.x1 < rect2.x2
16 | and rect1.x2 > rect2.x1
17 | and rect1.y1 < rect2.y2
18 | and rect1.y2 > rect2.y1
19 | )
20 |
21 |
22 | class ColliderABC(ABC):
23 | @property
24 | @abstractmethod
25 | def bounding_box(self):
26 | ...
27 |
28 |
29 | def find_collisions(objects):
30 | for item in objects:
31 | if not isinstance(item, ColliderABC):
32 | raise TypeError(f"{item} is not a collider")
33 |
34 | return [
35 | (item1, item2)
36 | for item1, item2 in itertools.combinations(objects, 2)
37 | if rects_collide(item1.bounding_box, item2.bounding_box)
38 | ]
39 |
40 |
41 | @dataclass
42 | class Box:
43 | x1: float
44 | y1: float
45 | x2: float
46 | y2: float
47 |
48 |
49 | @dataclass
50 | class Square(ColliderABC):
51 | x: float
52 | y: float
53 | size: float
54 |
55 | @property
56 | def bounding_box(self):
57 | return Box(self.x, self.y, self.x + self.size, self.y + self.size)
58 |
59 |
60 | @dataclass
61 | class Rect(ColliderABC):
62 | x: float
63 | y: float
64 | width: float
65 | height: float
66 |
67 | @property
68 | def bounding_box(self):
69 | return Box(self.x, self.y, self.x + self.width, self.y + self.height)
70 |
71 |
72 | @dataclass
73 | class Circle(ColliderABC):
74 | x: float
75 | y: float
76 | radius: float
77 |
78 | @property
79 | def bounding_box(self):
80 | return Box(
81 | self.x - self.radius,
82 | self.y - self.radius,
83 | self.x + self.radius,
84 | self.y + self.radius,
85 | )
86 |
87 |
88 | @dataclass
89 | class Point:
90 | x: float
91 | y: float
92 |
93 |
94 | @dataclass
95 | class PointWithABC(ColliderABC):
96 | x: float
97 | y: float
98 |
99 |
100 | if __name__ == "__main__":
101 | print("Valid attempt:")
102 | for collision in find_collisions(
103 | [
104 | Square(0, 0, 10),
105 | Rect(5, 5, 20, 20),
106 | Square(15, 20, 5),
107 | Circle(1, 1, 2),
108 | ]
109 | ):
110 | print(collision)
111 |
112 | print("Invalid attempt")
113 | for collision in find_collisions(
114 | [
115 | Square(0, 0, 10),
116 | Rect(5, 5, 20, 20),
117 | Square(15, 20, 5),
118 | Circle(1, 1, 2),
119 | Point(100, 200),
120 | ]
121 | ):
122 | print(collision)
123 |
124 | print("Invalid attempt using PointWithABC")
125 | for collision in find_collisions(
126 | [
127 | Square(0, 0, 10),
128 | Rect(5, 5, 20, 20),
129 | Square(15, 20, 5),
130 | Circle(1, 1, 2),
131 | PointWithABC(100, 200),
132 | ]
133 | ):
134 | print(collision)
135 |
--------------------------------------------------------------------------------
/Chapter 5/03 - Interfaces though type annotations/colliders_protocol.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | from dataclasses import dataclass
3 | from typing import Iterable, Protocol, runtime_checkable
4 |
5 |
6 | @runtime_checkable
7 | class IBox(Protocol):
8 | x1: float
9 | y1: float
10 | x2: float
11 | y2: float
12 |
13 |
14 | @runtime_checkable
15 | class ICollider(Protocol):
16 | @property
17 | def bounding_box(self) -> IBox:
18 | ...
19 |
20 |
21 | def rects_collide(rect1: IBox, rect2: IBox):
22 | """Check collision between rectangles
23 |
24 | Rectangle coordinates:
25 | ┌───(x2, y2)
26 | │ │
27 | (x1, y1)──┘
28 | """
29 | return (
30 | rect1.x1 < rect2.x2
31 | and rect1.x2 > rect2.x1
32 | and rect1.y1 < rect2.y2
33 | and rect1.y2 > rect2.y1
34 | )
35 |
36 |
37 | def find_collisions(objects: Iterable[ICollider]):
38 | for item in objects:
39 | if not isinstance(item, ICollider):
40 | raise TypeError(f"{item} is not a collider")
41 |
42 | return [
43 | (item1, item2)
44 | for item1, item2 in itertools.combinations(objects, 2)
45 | if rects_collide(item1.bounding_box, item2.bounding_box)
46 | ]
47 |
48 |
49 | @dataclass
50 | class Box:
51 | x1: float
52 | y1: float
53 | x2: float
54 | y2: float
55 |
56 |
57 | @dataclass
58 | class Square:
59 | x: float
60 | y: float
61 | size: float
62 |
63 | @property
64 | def bounding_box(self) -> IBox:
65 | return Box(self.x, self.y, self.x + self.size, self.y + self.size)
66 |
67 |
68 | @dataclass
69 | class Rect:
70 | x: float
71 | y: float
72 | width: float
73 | height: float
74 |
75 | @property
76 | def bounding_box(self) -> IBox:
77 | return Box(self.x, self.y, self.x + self.width, self.y + self.height)
78 |
79 |
80 | @dataclass
81 | class Circle:
82 | x: float
83 | y: float
84 | radius: float
85 |
86 | @property
87 | def bounding_box(self) -> IBox:
88 | return Box(
89 | self.x - self.radius,
90 | self.y - self.radius,
91 | self.x + self.radius,
92 | self.y + self.radius,
93 | )
94 |
95 |
96 | @dataclass
97 | class Point:
98 | x: float
99 | y: float
100 |
101 |
102 | @dataclass
103 | class Line:
104 | p1: Point
105 | p2: Point
106 |
107 | @property
108 | def bounding_box(self) -> IBox:
109 | return Box(
110 | self.p1.x,
111 | self.p1.y,
112 | self.p2.x,
113 | self.p2.y,
114 | )
115 |
116 |
117 | if __name__ == "__main__":
118 | print("Valid attempt:")
119 | for collision in find_collisions(
120 | [
121 | Square(0, 0, 10),
122 | Rect(5, 5, 20, 20),
123 | Square(15, 20, 5),
124 | Circle(1, 1, 2),
125 | ]
126 | ):
127 | print(collision)
128 |
129 | print("Invalid attempt")
130 | for collision in find_collisions(
131 | [
132 | Square(0, 0, 10),
133 | Rect(5, 5, 20, 20),
134 | Square(15, 20, 5),
135 | Circle(1, 1, 2),
136 | Point(100, 200),
137 | ]
138 | ):
139 | print(collision)
140 |
--------------------------------------------------------------------------------
/Chapter 5/02 - Using function annotations and abstract base classes/colliders_subclasshooks.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | from abc import ABC, abstractmethod
3 | from dataclasses import dataclass
4 |
5 |
6 | def rects_collide(rect1, rect2):
7 | """Check collision between rectangles
8 |
9 | Rectangle coordinates:
10 | ┌───(x2, y2)
11 | │ │
12 | (x1, y1)──┘
13 | """
14 | return (
15 | rect1.x1 < rect2.x2
16 | and rect1.x2 > rect2.x1
17 | and rect1.y1 < rect2.y2
18 | and rect1.y2 > rect2.y1
19 | )
20 |
21 |
22 | class ColliderABC(ABC):
23 | @property
24 | @abstractmethod
25 | def bounding_box(self):
26 | ...
27 |
28 | @classmethod
29 | def __subclasshook__(cls, C):
30 | if cls is ColliderABC:
31 | if any("bounding_box" in B.__dict__ for B in C.__mro__):
32 | return True
33 | return NotImplemented
34 |
35 |
36 | def find_collisions(objects):
37 | for item in objects:
38 | if not isinstance(item, ColliderABC):
39 | raise TypeError(f"{item} is not a collider")
40 |
41 | return [
42 | (item1, item2)
43 | for item1, item2 in itertools.combinations(objects, 2)
44 | if rects_collide(item1.bounding_box, item2.bounding_box)
45 | ]
46 |
47 |
48 | @dataclass
49 | class Box:
50 | x1: float
51 | y1: float
52 | x2: float
53 | y2: float
54 |
55 |
56 | @dataclass
57 | class Square:
58 | x: float
59 | y: float
60 | size: float
61 |
62 | @property
63 | def bounding_box(self):
64 | return Box(self.x, self.y, self.x + self.size, self.y + self.size)
65 |
66 |
67 | @dataclass
68 | class Rect:
69 | x: float
70 | y: float
71 | width: float
72 | height: float
73 |
74 | @property
75 | def bounding_box(self):
76 | return Box(self.x, self.y, self.x + self.width, self.y + self.height)
77 |
78 |
79 | @dataclass
80 | class Circle:
81 | x: float
82 | y: float
83 | radius: float
84 |
85 | @property
86 | def bounding_box(self):
87 | return Box(
88 | self.x - self.radius,
89 | self.y - self.radius,
90 | self.x + self.radius,
91 | self.y + self.radius,
92 | )
93 |
94 |
95 | @dataclass
96 | class Point:
97 | x: float
98 | y: float
99 |
100 |
101 | @dataclass
102 | class Line:
103 | p1: Point
104 | p2: Point
105 |
106 | @property
107 | def bounding_box(self):
108 | return Box(
109 | self.p1.x,
110 | self.p1.y,
111 | self.p2.x,
112 | self.p2.y,
113 | )
114 |
115 |
116 | if __name__ == "__main__":
117 | print("Valid attempt:")
118 | for collision in find_collisions(
119 | [
120 | Square(0, 0, 10),
121 | Rect(5, 5, 20, 20),
122 | Square(15, 20, 5),
123 | Circle(1, 1, 2),
124 | ]
125 | ):
126 | print(collision)
127 |
128 | print("Invalid attempt")
129 | for collision in find_collisions(
130 | [
131 | Square(0, 0, 10),
132 | Rect(5, 5, 20, 20),
133 | Square(15, 20, 5),
134 | Circle(1, 1, 2),
135 | Point(100, 200),
136 | ]
137 | ):
138 | print(collision)
139 |
--------------------------------------------------------------------------------
/Chapter 5/01 - A bit of history: zope.interface/colliders_invariants.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | from dataclasses import dataclass
3 |
4 | from zope.interface import Interface, Attribute, implementer, invariant
5 | from zope.interface.verify import verifyObject
6 |
7 |
8 | def rects_collide(rect1, rect2):
9 | """Check collision between rectangles
10 |
11 | Rectangle coordinates:
12 | ┌───(x2, y2)
13 | │ │
14 | (x1, y1)──┘
15 | """
16 | return (
17 | rect1.x1 < rect2.x2
18 | and rect1.x2 > rect2.x1
19 | and rect1.y1 < rect2.y2
20 | and rect1.y2 > rect2.y1
21 | )
22 |
23 |
24 | def find_collisions(objects):
25 | for item in objects:
26 | verifyObject(ICollidable, item)
27 | ICollidable.validateInvariants(item)
28 |
29 | return [
30 | (item1, item2)
31 | for item1, item2 in itertools.combinations(objects, 2)
32 | if rects_collide(item1.bounding_box, item2.bounding_box)
33 | ]
34 |
35 |
36 | class IBBox(Interface):
37 | x1 = Attribute("lower-left x coordinate")
38 | y1 = Attribute("lower-left y coordinate")
39 | x2 = Attribute("upper-right x coordinate")
40 | y2 = Attribute("upper-right y coordinate")
41 |
42 |
43 | class ICollidable(Interface):
44 | bounding_box = Attribute("Object's bounding box")
45 | invariant(lambda self: verifyObject(IBBox, self.bounding_box))
46 |
47 |
48 | @implementer(IBBox)
49 | @dataclass
50 | class Box:
51 | x1: float
52 | y1: float
53 | x2: float
54 | y2: float
55 |
56 |
57 | @implementer(ICollidable)
58 | @dataclass
59 | class Square:
60 | x: float
61 | y: float
62 | size: float
63 |
64 | @property
65 | def bounding_box(self):
66 | return Box(self.x, self.y, self.x + self.size, self.y + self.size)
67 |
68 |
69 | @implementer(ICollidable)
70 | @dataclass
71 | class Rect:
72 | x: float
73 | y: float
74 | width: float
75 | height: float
76 |
77 | @property
78 | def bounding_box(self):
79 | return Box(self.x, self.y, self.x + self.width, self.y + self.height)
80 |
81 |
82 | @implementer(ICollidable)
83 | @dataclass
84 | class Circle:
85 | x: float
86 | y: float
87 | radius: float
88 |
89 | @property
90 | def bounding_box(self):
91 | return Box(
92 | self.x - self.radius,
93 | self.y - self.radius,
94 | self.x + self.radius,
95 | self.y + self.radius,
96 | )
97 |
98 |
99 | @implementer(ICollidable)
100 | @dataclass
101 | class Point:
102 | x: float
103 | y: float
104 |
105 | @property
106 | def bounding_box(self):
107 | return self
108 |
109 |
110 | if __name__ == "__main__":
111 | print("Valid attempt:")
112 | for collision in find_collisions(
113 | [
114 | Square(0, 0, 10),
115 | Rect(5, 5, 20, 20),
116 | Square(15, 20, 5),
117 | Circle(1, 1, 2),
118 | ]
119 | ):
120 | print(collision)
121 |
122 | print("Invalid attempt")
123 | for collision in find_collisions(
124 | [
125 | Square(0, 0, 10),
126 | Rect(5, 5, 20, 20),
127 | Square(15, 20, 5),
128 | Circle(1, 1, 2),
129 | Point(100, 200),
130 | ]
131 | ):
132 | print(collision)
133 |
--------------------------------------------------------------------------------
/Chapter 6/07 - Throttling/throttling.py:
--------------------------------------------------------------------------------
1 | """
2 | "An example of a threaded application" section example
3 | showing how throttling / rate limiting can be implemented
4 | in multithreaded application
5 |
6 | """
7 | import time
8 | from queue import Queue, Empty
9 | from threading import Thread, Lock
10 |
11 | import requests
12 |
13 |
14 | SYMBOLS = ("USD", "EUR", "PLN", "NOK", "CZK")
15 | BASES = ("USD", "EUR", "PLN", "NOK", "CZK")
16 |
17 | THREAD_POOL_SIZE = 4
18 |
19 |
20 | class Throttle:
21 | def __init__(self, rate):
22 | self._consume_lock = Lock()
23 | self.rate = rate
24 | self.tokens = 0
25 | self.last = None
26 |
27 | def consume(self, amount=1):
28 | with self._consume_lock:
29 | now = time.time()
30 |
31 | # time measument is initialized on first
32 | # token request to avoid initial bursts
33 | if self.last is None:
34 | self.last = now
35 |
36 | elapsed = now - self.last
37 |
38 | # make sure that quant of passed time is big
39 | # enough to add new tokens
40 | if elapsed * self.rate > 1:
41 | self.tokens += elapsed * self.rate
42 | self.last = now
43 |
44 | # never over-fill the bucket
45 | self.tokens = min(self.rate, self.tokens)
46 |
47 | # finally dispatch tokens if available
48 | if self.tokens >= amount:
49 | self.tokens -= amount
50 | return amount
51 |
52 | return 0
53 |
54 |
55 | def fetch_rates(base):
56 | response = requests.get(f"https://api.vatcomply.com/rates?base={base}")
57 |
58 | response.raise_for_status()
59 | rates = response.json()["rates"]
60 | # note: same currency exchanges to itself 1:1
61 | rates[base] = 1.0
62 | return base, rates
63 |
64 |
65 | def present_result(base, rates):
66 | rates_line = ", ".join([f"{rates[symbol]:7.03} {symbol}" for symbol in SYMBOLS])
67 | print(f"1 {base} = {rates_line}")
68 |
69 |
70 | def worker(work_queue, results_queue, throttle):
71 | while not work_queue.empty():
72 | try:
73 | item = work_queue.get_nowait()
74 | except Empty:
75 | break
76 |
77 | while not throttle.consume():
78 | time.sleep(0.1)
79 |
80 | try:
81 | result = fetch_rates(item)
82 | except Exception as err:
83 | results_queue.put(err)
84 | else:
85 | results_queue.put(result)
86 | finally:
87 | work_queue.task_done()
88 |
89 |
90 | def main():
91 | work_queue = Queue()
92 | results_queue = Queue()
93 | throttle = Throttle(10)
94 |
95 | for base in BASES:
96 | work_queue.put(base)
97 |
98 | threads = [
99 | Thread(target=worker, args=(work_queue, results_queue, throttle))
100 | for _ in range(THREAD_POOL_SIZE)
101 | ]
102 |
103 | for thread in threads:
104 | thread.start()
105 |
106 | work_queue.join()
107 |
108 | while threads:
109 | threads.pop().join()
110 |
111 | while not results_queue.empty():
112 | result = results_queue.get()
113 | if isinstance(result, Exception):
114 | raise result
115 |
116 | present_result(*result)
117 |
118 |
119 | if __name__ == "__main__":
120 | started = time.time()
121 | main()
122 | elapsed = time.time() - started
123 |
124 | print()
125 | print("time elapsed: {:.2f}s".format(elapsed))
126 |
--------------------------------------------------------------------------------
/Chapter 4/06 - Dunder methods/matrices_with_scalars.py:
--------------------------------------------------------------------------------
1 | from numbers import Number
2 |
3 |
4 | class Matrix:
5 | def __init__(self, rows):
6 | if len(set(len(row) for row in rows)) > 1:
7 | raise ValueError("All matrix rows must be the same length")
8 |
9 | self.rows = rows
10 |
11 | def __add__(self, other):
12 | if len(self.rows) != len(other.rows) or len(self.rows[0]) != len(other.rows[0]):
13 | raise ValueError("Matrix dimensions don't match")
14 |
15 | if isinstance(other, Matrix):
16 | return Matrix(
17 | [
18 | [a + b for a, b in zip(a_row, b_row)]
19 | for a_row, b_row in zip(self.rows, other.rows)
20 | ]
21 | )
22 | elif isinstance(other, Number):
23 | return Matrix([[item + other for item in row] for row in self.rows])
24 | else:
25 | raise TypeError(f"Can't add {type(other)} to Matrix")
26 |
27 | def __sub__(self, other):
28 | if len(self.rows) != len(other.rows) or len(self.rows[0]) != len(other.rows[0]):
29 | raise ValueError("Matrix dimensions don't match")
30 |
31 | if isinstance(other, Matrix):
32 | return Matrix(
33 | [
34 | [a - b for a, b in zip(a_row, b_row)]
35 | for a_row, b_row in zip(self.rows, other.rows)
36 | ]
37 | )
38 | elif isinstance(other, Number):
39 | return Matrix([[item - other for item in row] for row in self.rows])
40 | else:
41 | raise TypeError(f"Can't subtract {type(other)} from Matrix")
42 |
43 | def __mul__(self, other):
44 | if isinstance(other, Matrix):
45 | if len(self.rows[0]) != len(other.rows):
46 | raise ValueError("Matrix dimensions don't match")
47 |
48 | rows = [[0 for _ in other.rows[0]] for _ in self.rows]
49 |
50 | for i in range(len(self.rows)):
51 | for j in range(len(other.rows[0])):
52 | for k in range(len(other.rows)):
53 | rows[i][j] += self.rows[i][k] * other.rows[k][j]
54 |
55 | return Matrix(rows)
56 |
57 | elif isinstance(other, Number):
58 | return Matrix([[item * other for item in row] for row in self.rows])
59 |
60 | else:
61 | raise TypeError(f"Can't multiply {type(other)} with Matrix")
62 |
63 | def __eq__(self, other):
64 | if isinstance(other, Matrix):
65 | return self.rows == other.rows
66 | return super().__eq__(other)
67 |
68 | def __rmul__(self, other):
69 | if isinstance(other, Number):
70 | return self * other
71 |
72 | def __repr__(self):
73 | return "\n".join(str(row) for row in self.rows)
74 |
75 |
76 | if __name__ == "__main__":
77 | m0 = Matrix(
78 | [
79 | [1, 0, 0],
80 | [0, 1, 0],
81 | [0, 0, 1],
82 | ]
83 | )
84 | m1 = Matrix(
85 | [
86 | [1, 2, 3],
87 | [4, 1, 4],
88 | [5, 7, 9],
89 | ]
90 | )
91 | assert (m1 * m0).rows == m1.rows
92 | m2 = Matrix(
93 | [
94 | [1, 2, 3],
95 | [1, 4, 3],
96 | [1, 0, 5],
97 | ]
98 | )
99 | assert (m2 * m0).rows == m2.rows
100 |
101 | assert m1 * 2 == m1 + m1
102 | assert 2 * m1 == m1 + m1
103 | assert 3 * m1 == m1 + m1 + m1
104 |
105 | assert m2 * 2 == m2 + m2
106 | assert 2 * m2 == m2 + m2
107 | assert 3 * m2 == m2 + m2 + m2
108 |
--------------------------------------------------------------------------------
/Chapter 4/06 - Dunder methods/matrices_with_singledistpatch.py:
--------------------------------------------------------------------------------
1 | from functools import singledispatchmethod
2 | from numbers import Number
3 |
4 |
5 | class Matrix:
6 | def __init__(self, rows):
7 | if len(set(len(row) for row in rows)) > 1:
8 | raise ValueError("All matrix rows must be the same length")
9 |
10 | self.rows = rows
11 |
12 | @singledispatchmethod
13 | def __add__(self, other):
14 | if not isinstance(other, Matrix):
15 | raise TypeError(f"Can't add {type(other)} to Matrix")
16 |
17 | if len(self.rows) != len(other.rows) or len(self.rows[0]) != len(other.rows[0]):
18 | raise ValueError("Matrix dimensions don't match")
19 |
20 | return Matrix(
21 | [
22 | [a + b for a, b in zip(a_row, b_row)]
23 | for a_row, b_row in zip(self.rows, other.rows)
24 | ]
25 | )
26 |
27 | @__add__.register(Number)
28 | def _(self, other):
29 | return Matrix([[item + other for item in row] for row in self.rows])
30 |
31 | @singledispatchmethod
32 | def __sub__(self, other):
33 | if not isinstance(other, Matrix):
34 | raise TypeError(f"Can't subtract {type(other)} from Matrix")
35 |
36 | if len(self.rows) != len(other.rows) or len(self.rows[0]) != len(other.rows[0]):
37 | raise ValueError("Matrix dimensions don't match")
38 |
39 | return Matrix(
40 | [
41 | [a - b for a, b in zip(a_row, b_row)]
42 | for a_row, b_row in zip(self.rows, other.rows)
43 | ]
44 | )
45 |
46 | @__sub__.register(Number)
47 | def _(self, other):
48 | return Matrix([[item - other for item in row] for row in self.rows])
49 |
50 | @singledispatchmethod
51 | def __mul__(self, other):
52 | if not isinstance(other, Matrix):
53 | raise TypeError(f"Can't subtract {type(other)} from Matrix")
54 |
55 | if len(self.rows[0]) != len(other.rows):
56 | raise ValueError("Matrix dimensions don't match")
57 |
58 | rows = [[0 for _ in other.rows[0]] for _ in self.rows]
59 |
60 | for i in range(len(self.rows)):
61 | for j in range(len(other.rows[0])):
62 | for k in range(len(other.rows)):
63 | rows[i][j] += self.rows[i][k] * other.rows[k][j]
64 |
65 | return Matrix(rows)
66 |
67 | @__mul__.register(Number)
68 | def _(self, other):
69 | return Matrix([[item * other for item in row] for row in self.rows])
70 |
71 | def __eq__(self, other):
72 | if isinstance(other, Matrix):
73 | return self.rows == other.rows
74 | return super().__eq__(other)
75 |
76 | def __rmul__(self, other):
77 | if isinstance(other, Number):
78 | return self * other
79 |
80 | def __repr__(self):
81 | return "\n".join(str(row) for row in self.rows)
82 |
83 |
84 | if __name__ == "__main__":
85 | m0 = Matrix(
86 | [
87 | [1, 0, 0],
88 | [0, 1, 0],
89 | [0, 0, 1],
90 | ]
91 | )
92 | m1 = Matrix(
93 | [
94 | [1, 2, 3],
95 | [4, 1, 4],
96 | [5, 7, 9],
97 | ]
98 | )
99 | assert (m1 * m0).rows == m1.rows
100 | m2 = Matrix(
101 | [
102 | [1, 2, 3],
103 | [1, 4, 3],
104 | [1, 0, 5],
105 | ]
106 | )
107 | assert (m2 * m0).rows == m2.rows
108 |
109 | assert m1 * 2 == m1 + m1
110 | assert 2 * m1 == m1 + m1
111 | assert 3 * m1 == m1 + m1 + m1
112 |
113 | assert m2 * 2 == m2 + m2
114 | assert 2 * m2 == m2 + m2
115 | assert 3 * m2 == m2 + m2 + m2
116 |
--------------------------------------------------------------------------------