├── .gitignore ├── LICENSE ├── README.md ├── demos ├── README.md ├── dagster │ ├── README.md │ ├── docs │ │ ├── demo_requirements.md │ │ ├── example_pipeline.md │ │ ├── repos_and_workspaces.md │ │ └── testing_pipelines.md │ ├── pipelines │ │ ├── __init__.py │ │ └── example_pipeline.py │ ├── repository.py │ ├── requirements.txt │ ├── tests │ │ ├── __init__.py │ │ ├── test_data.csv │ │ └── test_example_pipeline.py │ └── workspace.yaml ├── dvc-pipelines │ ├── .dvc │ │ ├── .gitignore │ │ └── config │ ├── .dvcignore │ ├── README.md │ ├── artefacts │ │ └── .gitignore │ ├── docs │ │ ├── demo_requirements.md │ │ └── example_pipeline.md │ ├── dvc.lock │ ├── dvc.yaml │ ├── dvc_pipelines.ipynb │ ├── metrics │ │ └── .gitignore │ ├── params.yaml │ ├── requirements.txt │ └── stages │ │ ├── config.py │ │ ├── get_data.py │ │ └── train_model.py ├── dvc │ ├── .dvc │ │ ├── .gitignore │ │ └── config │ ├── .dvcignore │ ├── README.md │ ├── data_and_model_versioning.ipynb │ ├── datasets │ │ ├── .gitignore │ │ └── example.csv.dvc │ ├── docs │ │ └── demo_requirements.md │ └── requirements.txt ├── ibis │ ├── README.md │ ├── docs │ │ └── demo_requirements.md │ ├── geography.sqlite │ ├── ibis_introduction.ipynb │ └── requirements.txt ├── jax │ ├── README.md │ ├── docs │ │ └── demo_requirements.md │ ├── introduction_to_jax.ipynb │ ├── linear_regression.ipynb │ ├── mnist_with_flax_and_optax.ipynb │ └── requirements.txt ├── mlflow │ ├── README.md │ ├── docs │ │ └── demo_requirements.md │ ├── mlflow_basics.ipynb │ └── requirements.txt ├── pydantic │ ├── README.md │ ├── config.yaml │ ├── config_schema.py │ ├── docs │ │ ├── define_data_model.md │ │ ├── demo_requirements.md │ │ └── validating_runtime_data.md │ ├── load_config.py │ └── requirements.txt ├── pytorch │ ├── MNIST.ipynb │ ├── MNIST_pytorch_lightning.ipynb │ ├── README.md │ ├── datasets.ipynb │ ├── docs │ │ └── demo_requirements.md │ ├── linear_regression_sgd.ipynb │ ├── logistic_regression_sgd.ipynb │ ├── requirements.txt │ └── tensors.ipynb ├── spacy │ ├── README.md │ ├── docs │ │ └── demo_requirements.md │ ├── requirements.txt │ └── spacy_101.ipynb └── sqlalchemy │ ├── README.md │ ├── alembic.ini │ ├── docs │ ├── data_models.md │ └── demo_requirements.md │ ├── migrations │ ├── README │ ├── env.py │ ├── script.py.mako │ └── versions │ │ └── c31efd831ee7_initial_migration.py │ ├── models.py │ ├── requirements.txt │ └── sql-alchemy-basics.ipynb ├── docs ├── 404.html ├── assets │ ├── images │ │ └── favicon.png │ ├── javascripts │ │ ├── bundle.8fd75fb4.min.js │ │ ├── bundle.8fd75fb4.min.js.map │ │ ├── lunr │ │ │ ├── min │ │ │ │ ├── lunr.ar.min.js │ │ │ │ ├── lunr.da.min.js │ │ │ │ ├── lunr.de.min.js │ │ │ │ ├── lunr.du.min.js │ │ │ │ ├── lunr.el.min.js │ │ │ │ ├── lunr.es.min.js │ │ │ │ ├── lunr.fi.min.js │ │ │ │ ├── lunr.fr.min.js │ │ │ │ ├── lunr.he.min.js │ │ │ │ ├── lunr.hi.min.js │ │ │ │ ├── lunr.hu.min.js │ │ │ │ ├── lunr.hy.min.js │ │ │ │ ├── lunr.it.min.js │ │ │ │ ├── lunr.ja.min.js │ │ │ │ ├── lunr.jp.min.js │ │ │ │ ├── lunr.kn.min.js │ │ │ │ ├── lunr.ko.min.js │ │ │ │ ├── lunr.multi.min.js │ │ │ │ ├── lunr.nl.min.js │ │ │ │ ├── lunr.no.min.js │ │ │ │ ├── lunr.pt.min.js │ │ │ │ ├── lunr.ro.min.js │ │ │ │ ├── lunr.ru.min.js │ │ │ │ ├── lunr.sa.min.js │ │ │ │ ├── lunr.stemmer.support.min.js │ │ │ │ ├── lunr.sv.min.js │ │ │ │ ├── lunr.ta.min.js │ │ │ │ ├── lunr.te.min.js │ │ │ │ ├── lunr.th.min.js │ │ │ │ ├── lunr.tr.min.js │ │ │ │ ├── lunr.vi.min.js │ │ │ │ └── lunr.zh.min.js │ │ │ ├── tinyseg.js │ │ │ └── wordcut.js │ │ └── workers │ │ │ ├── search.b8dbb3d2.min.js │ │ │ └── search.b8dbb3d2.min.js.map │ └── stylesheets │ │ ├── main.f2e4d321.min.css │ │ ├── main.f2e4d321.min.css.map │ │ ├── palette.06af60db.min.css │ │ └── palette.06af60db.min.css.map ├── dagster │ ├── docs │ │ ├── demo_requirements │ │ │ └── index.html │ │ ├── example_pipeline │ │ │ └── index.html │ │ ├── repos_and_workspaces │ │ │ └── index.html │ │ └── testing_pipelines │ │ │ └── index.html │ ├── index.html │ ├── pipelines │ │ ├── __init__.py │ │ └── example_pipeline.py │ ├── repository.py │ ├── requirements.txt │ ├── tests │ │ ├── __init__.py │ │ ├── test_data.csv │ │ └── test_example_pipeline.py │ └── workspace.yaml ├── dvc-pipelines │ ├── artefacts │ │ ├── dataset.csv │ │ └── model.joblib │ ├── docs │ │ ├── demo_requirements │ │ │ └── index.html │ │ └── example_pipeline │ │ │ └── index.html │ ├── dvc.lock │ ├── dvc.yaml │ ├── dvc_pipelines │ │ └── index.html │ ├── index.html │ ├── params.yaml │ ├── requirements.txt │ └── stages │ │ ├── config.py │ │ ├── get_data.py │ │ └── train_model.py ├── dvc │ ├── data_and_model_versioning │ │ └── index.html │ ├── datasets │ │ └── example.csv.dvc │ ├── docs │ │ └── demo_requirements │ │ │ └── index.html │ ├── index.html │ └── requirements.txt ├── ibis │ ├── docs │ │ └── demo_requirements │ │ │ └── index.html │ ├── geography.sqlite │ ├── ibis_introduction │ │ └── index.html │ ├── index.html │ └── requirements.txt ├── index.html ├── jax │ ├── docs │ │ └── demo_requirements │ │ │ └── index.html │ ├── index.html │ ├── introduction_to_jax │ │ └── index.html │ ├── linear_regression │ │ └── index.html │ ├── mnist_with_flax_and_optax │ │ └── index.html │ └── requirements.txt ├── mlflow │ ├── docs │ │ └── demo_requirements │ │ │ └── index.html │ ├── index.html │ ├── mlflow_basics │ │ └── index.html │ └── requirements.txt ├── pydantic │ ├── config.yaml │ ├── config_schema.py │ ├── docs │ │ ├── define_data_model │ │ │ └── index.html │ │ ├── demo_requirements │ │ │ └── index.html │ │ └── validating_runtime_data │ │ │ └── index.html │ ├── index.html │ ├── load_config.py │ └── requirements.txt ├── pytorch │ ├── MNIST │ │ └── index.html │ ├── MNIST_pytorch_lightning │ │ └── index.html │ ├── datasets │ │ └── index.html │ ├── docs │ │ └── demo_requirements │ │ │ └── index.html │ ├── index.html │ ├── linear_regression_sgd │ │ └── index.html │ ├── logistic_regression_sgd │ │ └── index.html │ ├── requirements.txt │ └── tensors │ │ └── index.html ├── sitemap.xml ├── sitemap.xml.gz ├── spacy │ ├── docs │ │ └── demo_requirements │ │ │ └── index.html │ ├── index.html │ ├── requirements.txt │ └── spacy_101 │ │ └── index.html └── sqlalchemy │ ├── alembic.ini │ ├── docs │ ├── data_models │ │ └── index.html │ └── demo_requirements │ │ └── index.html │ ├── index.html │ ├── migrations │ ├── README │ ├── env.py │ ├── script.py.mako │ └── versions │ │ └── c31efd831ee7_initial_migration.py │ ├── models.py │ ├── requirements.txt │ └── sql-alchemy-basics │ └── index.html ├── mkdocs.yml └── requirements_mkdocs.txt /.gitignore: -------------------------------------------------------------------------------- 1 | # misc 2 | .DS_Store 3 | .vscode 4 | 5 | # PyTorch 6 | data/ 7 | lightning_logs 8 | 9 | # mlflow 10 | mlflow/artefacts 11 | *.db 12 | *.pkl 13 | 14 | # Byte-compiled / optimized / DLL files 15 | __pycache__/ 16 | *.py[cod] 17 | *$py.class 18 | 19 | # C extensions 20 | *.so 21 | 22 | # Distribution / packaging 23 | .Python 24 | build/ 25 | develop-eggs/ 26 | dist/ 27 | downloads/ 28 | eggs/ 29 | .eggs/ 30 | lib/ 31 | lib64/ 32 | parts/ 33 | sdist/ 34 | var/ 35 | wheels/ 36 | pip-wheel-metadata/ 37 | share/python-wheels/ 38 | *.egg-info/ 39 | .installed.cfg 40 | *.egg 41 | MANIFEST 42 | 43 | # PyInstaller 44 | # Usually these files are written by a python script from a template 45 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 46 | *.manifest 47 | *.spec 48 | 49 | # Installer logs 50 | pip-log.txt 51 | pip-delete-this-directory.txt 52 | 53 | # Unit test / coverage reports 54 | htmlcov/ 55 | .tox/ 56 | .nox/ 57 | .coverage 58 | .coverage.* 59 | .cache 60 | nosetests.xml 61 | coverage.xml 62 | *.cover 63 | *.py,cover 64 | .hypothesis/ 65 | .pytest_cache/ 66 | 67 | # Translations 68 | *.mo 69 | *.pot 70 | 71 | # Django stuff: 72 | *.log 73 | local_settings.py 74 | db.sqlite3 75 | db.sqlite3-journal 76 | 77 | # Flask stuff: 78 | instance/ 79 | .webassets-cache 80 | 81 | # Scrapy stuff: 82 | .scrapy 83 | 84 | # Sphinx documentation 85 | docs/_build/ 86 | 87 | # PyBuilder 88 | target/ 89 | 90 | # Jupyter Notebook 91 | .ipynb_checkpoints 92 | 93 | # IPython 94 | profile_default/ 95 | ipython_config.py 96 | 97 | # pyenv 98 | .python-version 99 | 100 | # pipenv 101 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 102 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 103 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 104 | # install all needed dependencies. 105 | #Pipfile.lock 106 | 107 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 108 | __pypackages__/ 109 | 110 | # Celery stuff 111 | celerybeat-schedule 112 | celerybeat.pid 113 | 114 | # SageMath parsed files 115 | *.sage.py 116 | 117 | # Environments 118 | .env 119 | .venv* 120 | env/ 121 | venv/ 122 | ENV/ 123 | env.bak/ 124 | venv.bak/ 125 | 126 | # Spyder project settings 127 | .spyderproject 128 | .spyproject 129 | 130 | # Rope project settings 131 | .ropeproject 132 | 133 | # mkdocs documentation 134 | /site 135 | 136 | # mypy 137 | .mypy_cache/ 138 | .dmypy.json 139 | dmypy.json 140 | 141 | # Pyre type checker 142 | .pyre/ 143 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020-2022 Alex Ioannides 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Study Notes and Demos 2 | 3 | > "_The best way to learn is by doing, and the best way to remember what you have learnt is it to write about it._" 4 | 5 | This repository and the [accompanying website](http://alexioannides.com/notes-and-demos/) contain an ever-growing set of self-contained demos - i.e., Python modules and notebooks that demonstrate how to use the key features of various packages and frameworks that have caught my professional attention over the years. 6 | -------------------------------------------------------------------------------- /demos/README.md: -------------------------------------------------------------------------------- 1 | # Study Notes and Demos 2 | 3 | > "_The best way to learn is by doing, and the best way to remember what you have learnt is it to write about it._" 4 | 5 | This repository and the accompanying website contain an ever-growing set of self-contained demos - i.e., Python modules and notebooks that demonstrate how to use the key features of various packages and frameworks that have caught my professional attention over the years. 6 | -------------------------------------------------------------------------------- /demos/dagster/README.md: -------------------------------------------------------------------------------- 1 | # Pipeline Orchestration with Dagster 2 | 3 | Data engineering and ML often require some level of pipeline orchestration - e.g., for ETL or training models. [Dagster](https://www.dagster.io) is an alternative to orchestration tools such as Airflow. 4 | 5 | ## Demo Objectives 6 | 7 | * Define a pipeline with multiple stages. 8 | * Test pipelines and stages. 9 | * Add pipelines to a Dagster repository. 10 | * Configure pipelines within a Dagster workspace. 11 | 12 | ## Running the Demo 13 | 14 | If you run 15 | 16 | ```text 17 | $ dagit 18 | ... 19 | ``` 20 | 21 | The pipelines configured in `workspace.yaml` will be available to run in the UI at `http://localhost:3000`. Alternatively, individual pipelines can be run directly from the command line - e.g., 22 | 23 | ```text 24 | $ python demos/dagster/pipelines/example_pipeline.py 25 | ... 26 | ``` 27 | 28 | Alternatively, they can be executed via the Dagster CLI 29 | 30 | ```text 31 | $ dagster pipeline execute -f demos/dagster/pipelines/example_pipeline.py 32 | ... 33 | ``` 34 | 35 | Refer to the [Dagster docs](https://docs.dagster.io/getting-started) for more information - e.g. how to define schedules or triggers, etc. 36 | 37 | ### Running Tests 38 | 39 | Example tests (using PyTest) can be found in the `demos/dagster/tests` folder and can be executed by running 40 | 41 | ```text 42 | $ pytest 43 | ... 44 | ``` 45 | -------------------------------------------------------------------------------- /demos/dagster/docs/demo_requirements.md: -------------------------------------------------------------------------------- 1 | # Demo Requirements 2 | 3 | This demo depends on the following Python packages: 4 | 5 | ```text title="demos/dagster/requirements.txt" 6 | --8<-- "demos/dagster/requirements.txt" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/dagster/docs/example_pipeline.md: -------------------------------------------------------------------------------- 1 | # Example Pipeline 2 | 3 | An example pipeline comprised of multiple stages (or 'solids'). 4 | 5 | ```python title="demos/dagster/pipelines/example_pipeline.py" 6 | --8<-- "demos/dagster/pipelines/example_pipeline.py" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/dagster/docs/repos_and_workspaces.md: -------------------------------------------------------------------------------- 1 | # Repositories & Workspaces 2 | 3 | Dagster repositories and workspaces provide a mechanism for managing pipelines easier when operating at scale - e.g., across multiple teams within an organisation all sharing the same Dagster cluster. 4 | 5 | Repositories can be defined in code as follows: 6 | 7 | ```python title="demos/dagster/repository.py" 8 | --8<-- "demos/dagster/repository.py" 9 | ``` 10 | 11 | And workspaces are configured via: 12 | 13 | ```yanl title="demos/dagster/workspace.yaml" 14 | --8<-- "demos/dagster/workspace.yaml" 15 | ``` 16 | 17 | At a basic level the above example shows how to associate an execution environment (i.e., a Python virtual environment), with a given team's pipeline repository. This enables teams to specify their own Python requirements - e.g., a ML engineering team may want to use a newer version of NumPy than that used by an adjacent data engineering team. 18 | -------------------------------------------------------------------------------- /demos/dagster/docs/testing_pipelines.md: -------------------------------------------------------------------------------- 1 | # Testing Pipelines 2 | 3 | Tests for the example stages and the end-to-end pipeline. Note the mocking of the external HTTP request. 4 | 5 | ```python title="demos/dagster/tests/test_example_pipeline.py" 6 | --8<-- "demos/dagster/tests/test_example_pipeline.py" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/dagster/pipelines/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AlexIoannides/notes-and-demos/535e9d35ac19395442e7f92feda10326f4a3b7ad/demos/dagster/pipelines/__init__.py -------------------------------------------------------------------------------- /demos/dagster/pipelines/example_pipeline.py: -------------------------------------------------------------------------------- 1 | """ 2 | Example Dagster solids and pipeline from the Dagster tutorial. 3 | """ 4 | import csv 5 | from typing import Any, Dict, List 6 | 7 | import requests 8 | from dagster import execute_pipeline, pipeline, solid 9 | from dagster.core.execution.context.compute import SolidExecutionContext 10 | 11 | 12 | @solid 13 | def download_data(context: SolidExecutionContext) -> List[Dict[str, Any]]: 14 | """Download dataset.""" 15 | response = requests.get("https://docs.dagster.io/assets/cereal.csv") 16 | lines = response.text.split("\n") 17 | cereals = [row for row in csv.DictReader(lines)] 18 | context.log.info(f"Found {len(cereals)} cereals") 19 | return cereals 20 | 21 | 22 | @solid 23 | def find_max_sugar_cereal( 24 | context: SolidExecutionContext, cereals: List[Dict[str, Any]] 25 | ) -> str: 26 | """Find the product that has the maximum value for sugar content""" 27 | sorted_by_sugar = sorted(cereals, key=lambda cereal: cereal["sugars"]) 28 | max_sugar_cereal = sorted_by_sugar[-1]["name"] 29 | context.log.info(f"{max_sugar_cereal} has the greatest amount of sugar.") 30 | return max_sugar_cereal 31 | 32 | 33 | @pipeline 34 | def cereal_data_pipeline() -> str: 35 | """Compose the end-to-end cereal data pipeline.""" 36 | return find_max_sugar_cereal(download_data()) 37 | 38 | 39 | if __name__ == "__main__": 40 | result = execute_pipeline(cereal_data_pipeline) 41 | -------------------------------------------------------------------------------- /demos/dagster/repository.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module defines the Dagster pipeline repository. 3 | """ 4 | from dagster import repository 5 | 6 | from pipelines.example_pipeline import cereal_data_pipeline 7 | 8 | 9 | @repository 10 | def team_one(): 11 | return { 12 | "pipelines": { 13 | "cereal_data_pipeline": lambda: cereal_data_pipeline 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /demos/dagster/requirements.txt: -------------------------------------------------------------------------------- 1 | dagit==0.12.10 2 | dagster==0.12.10 3 | requests>=2.26.0 4 | pytest==6.2.5 5 | -------------------------------------------------------------------------------- /demos/dagster/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AlexIoannides/notes-and-demos/535e9d35ac19395442e7f92feda10326f4a3b7ad/demos/dagster/tests/__init__.py -------------------------------------------------------------------------------- /demos/dagster/tests/test_data.csv: -------------------------------------------------------------------------------- 1 | name,mfr,type,calories,protein,fat,sodium,fiber,carbo,sugars,potass,vitamins,shelf,weight,cups,rating 2 | 100% Bran,N,C,70,4,1,130,10,5,6,280,25,3,1,0.33,68.402973 3 | 100% Natural Bran,Q,C,120,3,5,15,2,8,8,135,0,3,1,1,33.983679 4 | All-Bran,K,C,70,4,1,260,9,7,5,320,25,3,1,0.33,59.425505 5 | All-Bran with Extra Fiber,K,C,50,4,0,140,14,8,0,330,25,3,1,0.5,93.704912 6 | Almond Delight,R,C,110,2,2,200,1,14,8,-1,25,3,1,0.75,34.384843 7 | Apple Cinnamon Cheerios,G,C,110,2,2,180,1.5,10.5,10,70,25,1,1,0.75,29.509541 8 | Apple Jacks,K,C,110,2,0,125,1,11,14,30,25,2,1,1,33.174094 9 | Basic 4,G,C,130,3,2,210,2,18,8,100,25,3,1.33,0.75,37.038562 10 | Bran Chex,R,C,90,2,1,200,4,15,6,125,25,1,1,0.67,49.120253 11 | Bran Flakes,P,C,90,3,0,210,5,13,5,190,25,3,1,0.67,53.313813 12 | Cap'n'Crunch,Q,C,120,1,2,220,0,12,12,35,25,2,1,0.75,18.042851 13 | Cheerios,G,C,110,6,2,290,2,17,1,105,25,1,1,1.25,50.764999 14 | Cinnamon Toast Crunch,G,C,120,1,3,210,0,13,9,45,25,2,1,0.75,19.823573 15 | Clusters,G,C,110,3,2,140,2,13,7,105,25,3,1,0.5,40.400208 16 | Cocoa Puffs,G,C,110,1,1,180,0,12,13,55,25,2,1,1,22.736446 17 | Corn Chex,R,C,110,2,0,280,0,22,3,25,25,1,1,1,41.445019 18 | Corn Flakes,K,C,100,2,0,290,1,21,2,35,25,1,1,1,45.863324 19 | Corn Pops,K,C,110,1,0,90,1,13,12,20,25,2,1,1,35.782791 20 | Count Chocula,G,C,110,1,1,180,0,12,13,65,25,2,1,1,22.396513 21 | Cracklin' Oat Bran,K,C,110,3,3,140,4,10,7,160,25,3,1,0.5,40.448772 22 | Cream of Wheat (Quick),N,H,100,3,0,80,1,21,0,-1,0,2,1,1,64.533816 23 | Crispix,K,C,110,2,0,220,1,21,3,30,25,3,1,1,46.895644 24 | Crispy Wheat & Raisins,G,C,100,2,1,140,2,11,10,120,25,3,1,0.75,36.176196 25 | Double Chex,R,C,100,2,0,190,1,18,5,80,25,3,1,0.75,44.330856 26 | Froot Loops,K,C,110,2,1,125,1,11,13,30,25,2,1,1,32.207582 27 | Frosted Flakes,K,C,110,1,0,200,1,14,11,25,25,1,1,0.75,31.435973 28 | Frosted Mini-Wheats,K,C,100,3,0,0,3,14,7,100,25,2,1,0.8,58.345141 29 | Fruit & Fibre Dates; Walnuts; and Oats,P,C,120,3,2,160,5,12,10,200,25,3,1.25,0.67,40.917047 30 | Fruitful Bran,K,C,120,3,0,240,5,14,12,190,25,3,1.33,0.67,41.015492 31 | Fruity Pebbles,P,C,110,1,1,135,0,13,12,25,25,2,1,0.75,28.025765 32 | Golden Crisp,P,C,100,2,0,45,0,11,15,40,25,1,1,0.88,35.252444 33 | Golden Grahams,G,C,110,1,1,280,0,15,9,45,25,2,1,0.75,23.804043 34 | Grape Nuts Flakes,P,C,100,3,1,140,3,15,5,85,25,3,1,0.88,52.076897 35 | Grape-Nuts,P,C,110,3,0,170,3,17,3,90,25,3,1,0.25,53.371007 36 | Great Grains Pecan,P,C,120,3,3,75,3,13,4,100,25,3,1,0.33,45.811716 37 | Honey Graham Ohs,Q,C,120,1,2,220,1,12,11,45,25,2,1,1,21.871292 38 | Honey Nut Cheerios,G,C,110,3,1,250,1.5,11.5,10,90,25,1,1,0.75,31.072217 39 | Honey-comb,P,C,110,1,0,180,0,14,11,35,25,1,1,1.33,28.742414 40 | Just Right Crunchy Nuggets,K,C,110,2,1,170,1,17,6,60,100,3,1,1,36.523683 41 | Just Right Fruit & Nut,K,C,140,3,1,170,2,20,9,95,100,3,1.3,0.75,36.471512 42 | Kix,G,C,110,2,1,260,0,21,3,40,25,2,1,1.5,39.241114 43 | Life,Q,C,100,4,2,150,2,12,6,95,25,2,1,0.67,45.328074 44 | Lucky Charms,G,C,110,2,1,180,0,12,12,55,25,2,1,1,26.734515 45 | Maypo,A,H,100,4,1,0,0,16,3,95,25,2,1,1,54.850917 46 | Muesli Raisins; Dates; & Almonds,R,C,150,4,3,95,3,16,11,170,25,3,1,1,37.136863 47 | Muesli Raisins; Peaches; & Pecans,R,C,150,4,3,150,3,16,11,170,25,3,1,1,34.139765 48 | Mueslix Crispy Blend,K,C,160,3,2,150,3,17,13,160,25,3,1.5,0.67,30.313351 49 | Multi-Grain Cheerios,G,C,100,2,1,220,2,15,6,90,25,1,1,1,40.105965 50 | Nut&Honey Crunch,K,C,120,2,1,190,0,15,9,40,25,2,1,0.67,29.924285 51 | Nutri-Grain Almond-Raisin,K,C,140,3,2,220,3,21,7,130,25,3,1.33,0.67,40.692320 52 | Nutri-grain Wheat,K,C,90,3,0,170,3,18,2,90,25,3,1,1,59.642837 53 | Oatmeal Raisin Crisp,G,C,130,3,2,170,1.5,13.5,10,120,25,3,1.25,0.5,30.450843 54 | Post Nat. Raisin Bran,P,C,120,3,1,200,6,11,14,260,25,3,1.33,0.67,37.840594 55 | Product 19,K,C,100,3,0,320,1,20,3,45,100,3,1,1,41.503540 56 | Puffed Rice,Q,C,50,1,0,0,0,13,0,15,0,3,0.5,1,60.756112 57 | Puffed Wheat,Q,C,50,2,0,0,1,10,0,50,0,3,0.5,1,63.005645 58 | Quaker Oat Squares,Q,C,100,4,1,135,2,14,6,110,25,3,1,0.5,49.511874 59 | Quaker Oatmeal,Q,H,100,5,2,0,2.7,-1,-1,110,0,1,1,0.67,50.828392 60 | Raisin Bran,K,C,120,3,1,210,5,14,12,240,25,2,1.33,0.75,39.259197 61 | Raisin Nut Bran,G,C,100,3,2,140,2.5,10.5,8,140,25,3,1,0.5,39.703400 62 | Raisin Squares,K,C,90,2,0,0,2,15,6,110,25,3,1,0.5,55.333142 63 | Rice Chex,R,C,110,1,0,240,0,23,2,30,25,1,1,1.13,41.998933 64 | Rice Krispies,K,C,110,2,0,290,0,22,3,35,25,1,1,1,40.560159 65 | Shredded Wheat,N,C,80,2,0,0,3,16,0,95,0,1,0.83,1,68.235885 66 | Shredded Wheat 'n'Bran,N,C,90,3,0,0,4,19,0,140,0,1,1,0.67,74.472949 67 | Shredded Wheat spoon size,N,C,90,3,0,0,3,20,0,120,0,1,1,0.67,72.801787 68 | Smacks,K,C,110,2,1,70,1,9,15,40,25,2,1,0.75,31.230054 69 | Special K,K,C,110,6,0,230,1,16,3,55,25,1,1,1,53.131324 70 | Strawberry Fruit Wheats,N,C,90,2,0,15,3,15,5,90,25,2,1,1,59.363993 71 | Total Corn Flakes,G,C,110,2,1,200,0,21,3,35,100,3,1,1,38.839746 72 | Total Raisin Bran,G,C,140,3,1,190,4,15,14,230,100,3,1.5,1,28.592785 73 | Total Whole Grain,G,C,100,3,1,200,3,16,3,110,100,3,1,1,46.658844 74 | Triples,G,C,110,2,1,250,0,21,3,60,25,3,1,0.75,39.106174 75 | Trix,G,C,110,1,1,140,0,13,12,25,25,2,1,1,27.753301 76 | Wheat Chex,R,C,100,3,1,230,3,17,3,115,25,1,1,0.67,49.787445 77 | Wheaties,G,C,100,3,1,200,3,17,3,110,25,1,1,1,51.592193 78 | Wheaties Honey Gold,G,C,110,2,1,200,1,16,8,60,25,1,1,0.75,36.187559 79 | -------------------------------------------------------------------------------- /demos/dagster/tests/test_example_pipeline.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for the example pipeline. 3 | """ 4 | from io import BytesIO 5 | from unittest.mock import MagicMock, patch 6 | 7 | from dagster import execute_pipeline, build_solid_context 8 | from dagster.core.execution.context.compute import SolidExecutionContext 9 | from pytest import fixture 10 | from requests import Response 11 | 12 | from pipelines.example_pipeline import cereal_data_pipeline, download_data 13 | 14 | 15 | @fixture(scope="session") 16 | def test_data() -> BytesIO: 17 | with open("tests/test_data.csv", "r+b") as f: 18 | file_bytes = f.read() 19 | return BytesIO(file_bytes) 20 | 21 | 22 | @fixture(scope="session") 23 | def context() -> SolidExecutionContext: 24 | return build_solid_context() 25 | 26 | 27 | @patch("pipelines.example_pipeline.requests") 28 | def test_download_data_downloads_data( 29 | mock_requests: MagicMock, test_data: BytesIO, context: SolidExecutionContext 30 | ): 31 | mock_response = Response() 32 | mock_response.raw = test_data 33 | mock_requests.get.return_value = mock_response 34 | dataset = download_data(context) 35 | assert len(dataset) == 77 36 | assert len(dataset[0].keys()) == 16 37 | 38 | 39 | def test_cereal_data_pipeline(): 40 | result = execute_pipeline(cereal_data_pipeline) 41 | assert result.success 42 | -------------------------------------------------------------------------------- /demos/dagster/workspace.yaml: -------------------------------------------------------------------------------- 1 | load_from: 2 | - python_file: 3 | relative_path: repository.py 4 | executable_path: ".venv/bin/python" 5 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/.dvc/.gitignore: -------------------------------------------------------------------------------- 1 | /config.local 2 | /tmp 3 | /cache 4 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/.dvc/config: -------------------------------------------------------------------------------- 1 | [core] 2 | remote = s3 3 | ['remote "s3"'] 4 | url = s3://dvc-example-artefacts/pipelines 5 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/.dvcignore: -------------------------------------------------------------------------------- 1 | # Add patterns of files dvc should ignore, which could improve 2 | # the performance. Learn more at 3 | # https://dvc.org/doc/user-guide/dvcignore 4 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/README.md: -------------------------------------------------------------------------------- 1 | # DVC Pipelines 2 | 3 | We build upon the introductory DVC demo and add the tracking of ML artefact lineage from data and the underlying ML pipeline source code. 4 | 5 | ## Demo Objectives 6 | 7 | * How to initialise a DVC pipelines project. 8 | * How to develop a ML pipeline for use with DVC. 9 | * How to run a ML pipeline via DVC and track the resulting artefacts. 10 | * How to retrieve pipeline run metrics. 11 | 12 | ## Running the Demo 13 | 14 | This demo is contained within a single Jupyter notebook - `demos/dvc-pipelines/dvc_pipelines.ipynb`. Make sure you have the necessary Python package requirements installed into a Jupyter kernel for it to run successfully. 15 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/artefacts/.gitignore: -------------------------------------------------------------------------------- 1 | *.joblib 2 | *.csv 3 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/docs/demo_requirements.md: -------------------------------------------------------------------------------- 1 | # Demo Requirements 2 | 3 | This demo depends on the following Python packages: 4 | 5 | ```text title="demos/dvc/requirements.txt" 6 | --8<-- "demos/dvc-pipelines/requirements.txt" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/docs/example_pipeline.md: -------------------------------------------------------------------------------- 1 | # Example Pipeline 2 | 3 | Each stage of a DVC pipeline needs to be defined within an executable Python module, with the end-to-end pipeline defined within a YAML file. We demonstrate how to compose a two stage pipeline that first gets a dataset and then trains a model on this dataset, saving the model and metrics to local disk (so that the files can be tracked by DVC). 4 | 5 | ## Shared Config 6 | 7 | ```python title="demos/dvc-pipelines/stages/config.py" 8 | --8<-- "demos/dvc-pipelines/stages/config.py" 9 | ``` 10 | 11 | ## Stage 1: Get Training Data 12 | 13 | ```python title="demos/dvc-pipelines/stages/get_data.py" 14 | --8<-- "demos/dvc-pipelines/stages/get_data.py" 15 | ``` 16 | 17 | ## Stage 2: Train Model 18 | 19 | ```python title="demos/dvc-pipelines/stages/train_model.py" 20 | --8<-- "demos/dvc-pipelines/stages/train_model.py" 21 | ``` 22 | 23 | ## Pipeline Definition 24 | 25 | ```yaml title="demos/dvc-pipelines/dvc.yaml" 26 | --8<-- "demos/dvc-pipelines/dvc.yaml" 27 | ``` 28 | 29 | ### Pipeline Parameters 30 | 31 | ```yaml title="demos/dvc-pipelines/params.yaml" 32 | --8<-- "demos/dvc-pipelines/params.yaml" 33 | ``` 34 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/dvc.lock: -------------------------------------------------------------------------------- 1 | schema: '2.0' 2 | stages: 3 | get_data: 4 | cmd: python stages/get_data.py 5 | deps: 6 | - path: stages/get_data.py 7 | md5: 947cf9166abd9bb26357fa15a6e8c1d2 8 | size: 388 9 | outs: 10 | - path: artefacts/dataset.csv 11 | md5: 9ee47ada0122c0951fcc98bc1e26ca50 12 | size: 38932 13 | train_model: 14 | cmd: python stages/train_model.py 15 | deps: 16 | - path: artefacts/dataset.csv 17 | md5: 9ee47ada0122c0951fcc98bc1e26ca50 18 | size: 38932 19 | - path: stages/get_data.py 20 | md5: 947cf9166abd9bb26357fa15a6e8c1d2 21 | size: 388 22 | params: 23 | params.yaml: 24 | train.random_state: 42 25 | outs: 26 | - path: artefacts/model.joblib 27 | md5: 98f7f0dc59a7a416f004a31ab305f320 28 | size: 849 29 | - path: metrics/metrics.json 30 | md5: 45679f988a4725b9e860c5eb0323c4f3 31 | size: 34 32 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/dvc.yaml: -------------------------------------------------------------------------------- 1 | stages: 2 | get_data: 3 | cmd: python stages/get_data.py 4 | deps: 5 | - stages/get_data.py 6 | outs: 7 | - artefacts/dataset.csv 8 | train_model: 9 | cmd: python stages/train_model.py 10 | deps: 11 | - artefacts/dataset.csv 12 | - stages/get_data.py 13 | params: 14 | - train.random_state 15 | outs: 16 | - artefacts/model.joblib 17 | metrics: 18 | - metrics/metrics.json: 19 | cache: false 20 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/dvc_pipelines.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# DVC Pipelines\n", 8 | "\n", 9 | "Orchestrating data science workflows and tracking computation artefacts and their lineage, using DVC." 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "## Initialise the Project" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 1, 22 | "metadata": {}, 23 | "outputs": [ 24 | { 25 | "name": "stdout", 26 | "output_type": "stream", 27 | "text": [ 28 | "Initialized DVC repository.\n", 29 | "\n", 30 | "You can now commit the changes to git.\n", 31 | "\n", 32 | "\u001b[31m+---------------------------------------------------------------------+\n", 33 | "\u001b[0m\u001b[31m|\u001b[0m \u001b[31m|\u001b[0m\n", 34 | "\u001b[31m|\u001b[0m DVC has enabled anonymous aggregate usage analytics. \u001b[31m|\u001b[0m\n", 35 | "\u001b[31m|\u001b[0m Read the analytics documentation (and how to opt-out) here: \u001b[31m|\u001b[0m\n", 36 | "\u001b[31m|\u001b[0m <\u001b[36mhttps://dvc.org/doc/user-guide/analytics\u001b[39m> \u001b[31m|\u001b[0m\n", 37 | "\u001b[31m|\u001b[0m \u001b[31m|\u001b[0m\n", 38 | "\u001b[31m+---------------------------------------------------------------------+\n", 39 | "\u001b[0m\n", 40 | "\u001b[33mWhat's next?\u001b[39m\n", 41 | "\u001b[33m------------\u001b[39m\n", 42 | "- Check out the documentation: <\u001b[36mhttps://dvc.org/doc\u001b[39m>\n", 43 | "- Get help and share ideas: <\u001b[36mhttps://dvc.org/chat\u001b[39m>\n", 44 | "- Star us on GitHub: <\u001b[36mhttps://github.com/iterative/dvc\u001b[39m>\n", 45 | "\u001b[0m" 46 | ] 47 | } 48 | ], 49 | "source": [ 50 | "!dvc init --subdir" 51 | ] 52 | }, 53 | { 54 | "cell_type": "markdown", 55 | "metadata": {}, 56 | "source": [ 57 | "## Setup a Remote Artefact Location" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": 2, 63 | "metadata": {}, 64 | "outputs": [ 65 | { 66 | "name": "stdout", 67 | "output_type": "stream", 68 | "text": [ 69 | "Setting 's3' as a default remote.\n", 70 | "\u001b[0m" 71 | ] 72 | } 73 | ], 74 | "source": [ 75 | "!dvc remote add -d s3 s3://dvc-example-artefacts/pipelines" 76 | ] 77 | }, 78 | { 79 | "cell_type": "markdown", 80 | "metadata": {}, 81 | "source": [ 82 | "## Define the Pipeline\n", 83 | "\n", 84 | "The pipeline is defined in a YAML file, which is reproduced below. This is all that is required to get DVC to track the various artefacts and metrics." 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 3, 90 | "metadata": {}, 91 | "outputs": [ 92 | { 93 | "name": "stdout", 94 | "output_type": "stream", 95 | "text": [ 96 | "stages:\n", 97 | " get_data:\n", 98 | " cmd: python stages/get_data.py\n", 99 | " deps:\n", 100 | " - stages/get_data.py\n", 101 | " outs:\n", 102 | " - artefacts/dataset.csv\n", 103 | " train_model:\n", 104 | " cmd: python stages/train_model.py\n", 105 | " deps:\n", 106 | " - artefacts/dataset.csv\n", 107 | " - stages/get_data.py\n", 108 | " params:\n", 109 | " - train.random_state\n", 110 | " outs:\n", 111 | " - artefacts/model.joblib\n", 112 | " metrics:\n", 113 | " - metrics/metrics.json:\n", 114 | " cache: false\n", 115 | " " 116 | ] 117 | } 118 | ], 119 | "source": [ 120 | "!cat dvc.yaml" 121 | ] 122 | }, 123 | { 124 | "cell_type": "markdown", 125 | "metadata": {}, 126 | "source": [ 127 | "The implied DAG can be reproduced as follows," 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": 4, 133 | "metadata": {}, 134 | "outputs": [ 135 | { 136 | "name": "stdout", 137 | "output_type": "stream", 138 | "text": [ 139 | " +----------+ \n", 140 | " | get_data | \n", 141 | " +----------+ \n", 142 | " * \n", 143 | " * \n", 144 | " * \n", 145 | "+-------------+ \n", 146 | "| train_model | \n", 147 | "+-------------+ \n", 148 | "\u001b[0m" 149 | ] 150 | } 151 | ], 152 | "source": [ 153 | "!dvc dag" 154 | ] 155 | }, 156 | { 157 | "cell_type": "markdown", 158 | "metadata": {}, 159 | "source": [ 160 | "### Run the Pipeline\n", 161 | "\n", 162 | "The pipeline can be run with one command," 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": 6, 168 | "metadata": {}, 169 | "outputs": [ 170 | { 171 | "name": "stdout", 172 | "output_type": "stream", 173 | "text": [ 174 | "Stage 'get_data' didn't change, skipping core\u001b[39m>\n", 175 | "Running stage 'train_model':\n", 176 | "> python stages/train_model.py\n", 177 | "Updating lock file 'dvc.lock' \n", 178 | "\n", 179 | "To track the changes with git, run:\n", 180 | "\n", 181 | " git add dvc.lock\n", 182 | "\n", 183 | "To enable auto staging, run:\n", 184 | "\n", 185 | "\tdvc config core.autostage true\n", 186 | "Use `dvc push` to send your updates to remote storage.\n", 187 | "\u001b[0m" 188 | ] 189 | } 190 | ], 191 | "source": [ 192 | "!dvc repro" 193 | ] 194 | }, 195 | { 196 | "cell_type": "markdown", 197 | "metadata": {}, 198 | "source": [ 199 | "### Version Control the Artefacts and Metrics" 200 | ] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": 9, 205 | "metadata": {}, 206 | "outputs": [ 207 | { 208 | "name": "stdout", 209 | "output_type": "stream", 210 | "text": [ 211 | "[dvc 3cc326d] Pipeline run #1\n", 212 | " 1 file changed, 31 insertions(+)\n", 213 | " create mode 100644 dvc-pipelines/dvc.lock\n", 214 | " 0% Transferring| |0/2 [00:00\n", 259 | "metrics/metrics.json 0.07843\n", 260 | "\u001b[0m" 261 | ] 262 | } 263 | ], 264 | "source": [ 265 | "!dvc metrics show" 266 | ] 267 | }, 268 | { 269 | "cell_type": "markdown", 270 | "metadata": {}, 271 | "source": [ 272 | "## Thoughts and Conclusions\n", 273 | "\n", 274 | "It's worth noting that running `dvc pull` on a clone of this repository will pull the latest version of all the files from S3 and into the local directory. Use `dvc import` if the initial dataset exists in a different repo (e.g., in a dedicated DVC data registry)." 275 | ] 276 | } 277 | ], 278 | "metadata": { 279 | "kernelspec": { 280 | "display_name": "Python 3.10.2 ('.venv': venv)", 281 | "language": "python", 282 | "name": "python3" 283 | }, 284 | "language_info": { 285 | "codemirror_mode": { 286 | "name": "ipython", 287 | "version": 3 288 | }, 289 | "file_extension": ".py", 290 | "mimetype": "text/x-python", 291 | "name": "python", 292 | "nbconvert_exporter": "python", 293 | "pygments_lexer": "ipython3", 294 | "version": "3.10.2" 295 | }, 296 | "orig_nbformat": 4, 297 | "vscode": { 298 | "interpreter": { 299 | "hash": "9dc4f215462ea912f4965da2670482d3eef22f25452006b3bdce86b7cb4ab1a4" 300 | } 301 | } 302 | }, 303 | "nbformat": 4, 304 | "nbformat_minor": 2 305 | } 306 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/metrics/.gitignore: -------------------------------------------------------------------------------- 1 | *.json 2 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/params.yaml: -------------------------------------------------------------------------------- 1 | train: 2 | random_state: 42 3 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/requirements.txt: -------------------------------------------------------------------------------- 1 | dvc[s3]==2.9.5 2 | pandas==1.3.5 3 | scikit-learn==1.0.2 4 | joblib==1.1.0 5 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/stages/config.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pipeline stage configuration. 3 | """ 4 | DATASET_FILENAME = "artefacts/dataset.csv" 5 | METRICS_FILENAME = "metrics/metrics.json" 6 | MODEL_FILENAME = "artefacts/model.joblib" 7 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/stages/get_data.py: -------------------------------------------------------------------------------- 1 | """ 2 | Stage that gets a dataset for training a ML model. 3 | """ 4 | import numpy as np 5 | import pandas as pd 6 | 7 | from config import DATASET_FILENAME 8 | 9 | 10 | def run_stage() -> None: 11 | x = np.random.standard_normal(1000) 12 | y = 2.0 * x + 0.1 * np.random.standard_normal(1000) 13 | df = pd.DataFrame({"y": y, "x": x}) 14 | df.to_csv(DATASET_FILENAME, index=False) 15 | 16 | 17 | if __name__ == "__main__": 18 | run_stage() 19 | -------------------------------------------------------------------------------- /demos/dvc-pipelines/stages/train_model.py: -------------------------------------------------------------------------------- 1 | """ 2 | Train regression model on dataset 3 | """ 4 | import joblib 5 | import json 6 | import pandas as pd 7 | import yaml 8 | from sklearn.linear_model import LinearRegression 9 | from sklearn.metrics import mean_absolute_error 10 | from sklearn.model_selection import train_test_split 11 | 12 | from config import DATASET_FILENAME, METRICS_FILENAME, MODEL_FILENAME 13 | 14 | 15 | def run_stage() -> None: 16 | params = yaml.safe_load(open("params.yaml"))["train"] 17 | data = pd.read_csv(DATASET_FILENAME) 18 | X_train, X_test, y_train, y_test = train_test_split( 19 | data[["x"]], data["y"], random_state=params["random_state"] 20 | ) 21 | model = LinearRegression() 22 | model.fit(X_train, y_train) 23 | joblib.dump(model, MODEL_FILENAME) 24 | 25 | y_test_pred = model.predict(X_test) 26 | mae = mean_absolute_error(y_test, y_test_pred) 27 | with open(METRICS_FILENAME, "w") as metrics_file: 28 | json.dump({"MAE": mae}, metrics_file, indent=4) 29 | 30 | 31 | if __name__ == "__main__": 32 | run_stage() 33 | -------------------------------------------------------------------------------- /demos/dvc/.dvc/.gitignore: -------------------------------------------------------------------------------- 1 | /config.local 2 | /tmp 3 | /cache 4 | -------------------------------------------------------------------------------- /demos/dvc/.dvc/config: -------------------------------------------------------------------------------- 1 | [core] 2 | remote = s3 3 | ['remote "s3"'] 4 | url = s3://dvc-example-artefacts 5 | -------------------------------------------------------------------------------- /demos/dvc/.dvcignore: -------------------------------------------------------------------------------- 1 | # Add patterns of files dvc should ignore, which could improve 2 | # the performance. Learn more at 3 | # https://dvc.org/doc/user-guide/dvcignore 4 | -------------------------------------------------------------------------------- /demos/dvc/README.md: -------------------------------------------------------------------------------- 1 | # Managing ML Artefacts with DVC 2 | 3 | Data Version Control (DVC) is a command line tool that enables version control for ML artefacts (e.g., models and training datasets), using a Git repository and a filesystem (e.g., cloud object storage). 4 | 5 | This demo is based around version control for a dataset, but it would work in exactly the same way for any ML model serialised to a file. 6 | 7 | ## Demo Objectives 8 | 9 | * How to initialise version control for a dataset stored on AWS S3. 10 | * How to update a dataset. 11 | * How to fetch any versions of dataset. 12 | 13 | ## Running the Demo 14 | 15 | This demo is contained within a single Jupyter notebook - `demos/dvc/data_and_model_versioning.ipynb`. Make sure you have the necessary Python package requirements installed into a Jupyter kernel for it to run successfully. 16 | -------------------------------------------------------------------------------- /demos/dvc/datasets/.gitignore: -------------------------------------------------------------------------------- 1 | /example.csv 2 | -------------------------------------------------------------------------------- /demos/dvc/datasets/example.csv.dvc: -------------------------------------------------------------------------------- 1 | outs: 2 | - md5: f08bbb05ad36122a35304ed5f7aa5212 3 | size: 35 4 | path: example.csv 5 | -------------------------------------------------------------------------------- /demos/dvc/docs/demo_requirements.md: -------------------------------------------------------------------------------- 1 | # Demo Requirements 2 | 3 | This demo depends on the following Python packages: 4 | 5 | ```text title="demos/dvc/requirements.txt" 6 | --8<-- "demos/dvc/requirements.txt" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/dvc/requirements.txt: -------------------------------------------------------------------------------- 1 | dvc[s3]==2.9.5 2 | pandas==1.3.5 3 | -------------------------------------------------------------------------------- /demos/ibis/README.md: -------------------------------------------------------------------------------- 1 | # Ibis 2 | 3 | [Ibis](http://ibis-project.org/) is a Python package that aims to provide a single API for composing analytical queries from a variety of database 'backends' - e.g., PostgeSQL, PySpark, Pandas, etc. The API closely resembles the PySpark DataFrame API - i.e., the aim is to represent SQL expressions using Python code. 4 | 5 | ## Demo Objectives 6 | 7 | * How to read data from a database table. 8 | * How to create a new column. 9 | * How to join tables. 10 | * How to perform the split-apply-combine pattern. 11 | * How to define window functions. 12 | * How to create views. 13 | * How to enable lazy evaluation. 14 | 15 | ## Running the Demo 16 | 17 | This demo is contained within a single Jupyter notebook - `demos/ibis/ibis_introduction.ipynb`. Make sure you have the necessary Python package requirements installed into a Jupyter kernel for it to run successfully. 18 | -------------------------------------------------------------------------------- /demos/ibis/docs/demo_requirements.md: -------------------------------------------------------------------------------- 1 | # Demo Requirements 2 | 3 | This demo depends on the following Python packages: 4 | 5 | ```text title="demos/ibis/requirements.txt" 6 | --8<-- "demos/ibis/requirements.txt" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/ibis/geography.sqlite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AlexIoannides/notes-and-demos/535e9d35ac19395442e7f92feda10326f4a3b7ad/demos/ibis/geography.sqlite -------------------------------------------------------------------------------- /demos/ibis/requirements.txt: -------------------------------------------------------------------------------- 1 | ibis-framework[sqlite]==3.0.2 2 | pandas==1.4.3 3 | -------------------------------------------------------------------------------- /demos/jax/README.md: -------------------------------------------------------------------------------- 1 | # Introduction to JAX 2 | 3 | [JAX](https://jax.readthedocs.io/en/latest/) is a framework for accelerated scientific computing that provides an alternative implementation of the NumPy API for linear algebra computation with added auto-differentiation and Just-in-Time (JIT) compilation. Unlike similar frameworks - e.g., PyTorch or TensorFlow - it works within a purely [functional programming](https://en.wikipedia.org/wiki/Functional_programming) paradigm. 4 | 5 | The [Flax](https://flax.readthedocs.io/en/latest/index.html) and [Optax](https://optax.readthedocs.io/en/latest/index.html) packages extend JAX's capabilities to cover the easy definition and training of deep learning models. 6 | 7 | ## Demo Objectives 8 | 9 | * How to manipulate tensors - i.e., JAX as an alternative to NumPy. 10 | * How to use auto-differentiation and minimise arbitrary functions. 11 | * How to build and train ML models from first principles - linear regression. 12 | * How to build and train a deep learning model for image classification using Flax and Optax. 13 | 14 | ## Running the Demo 15 | 16 | This demo spans several Jupyter notebook: 17 | 18 | * `demos/jax/introduction_to_jax.ipynb`. 19 | * `demos/jax/linear_regression.ipynb`. 20 | * `demos/jax/mnist_with_flax_and_optax.ipynb`. 21 | 22 | Make sure you have the necessary Python package requirements installed into a Jupyter kernel for it to run successfully. 23 | -------------------------------------------------------------------------------- /demos/jax/docs/demo_requirements.md: -------------------------------------------------------------------------------- 1 | # Demo Requirements 2 | 3 | This demo depends on the following Python packages: 4 | 5 | ```text title="demos/jax/requirements.txt" 6 | --8<-- "demos/jax/requirements.txt" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/jax/requirements.txt: -------------------------------------------------------------------------------- 1 | flax==0.6.1 2 | jax==0.3.24 3 | jaxlib==0.3.24 4 | matplotlib==3.6.2 5 | numpy==1.23.1 6 | pandas==1.4.3 7 | seaborn==0.11.2 8 | tqdm==4.64.0 9 | torch==1.13.0 10 | torchvision==0.14.0 11 | -------------------------------------------------------------------------------- /demos/mlflow/README.md: -------------------------------------------------------------------------------- 1 | # Managing the ML Lifecycle using MLflow 2 | 3 | The ML lifecycle covers the following stages of a ML engineering project: 4 | 5 | * data preparation 6 | * training 7 | * deployment 8 | 9 | [MLflow](https://mlflow.org) is an open-source framework that supports the ML lifecycle by tracking training metrics, storing trained models and model deployment. 10 | 11 | ## Demo Objectives 12 | 13 | * How to setup and configure MLflow. 14 | * How to track metrics. 15 | * How to track metrics during hyper-parameter optimisation. 16 | * How to save a model to the MLflow registry. 17 | * How to retrieve the latest model tagged for production. 18 | * How to serve model predictions via a REST API. 19 | 20 | ## Running the Demo 21 | 22 | This demo is contained within a single Jupyter notebook - `demos/mlflow/mlflow_basics.ipynb`. Make sure you have the necessary Python package requirements installed into a Jupyter kernel for it to run successfully. 23 | -------------------------------------------------------------------------------- /demos/mlflow/docs/demo_requirements.md: -------------------------------------------------------------------------------- 1 | # Demo Requirements 2 | 3 | This demo depends on the following Python packages: 4 | 5 | ```text title="demos/mlflow/requirements.txt" 6 | --8<-- "demos/mlflow/requirements.txt" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/mlflow/requirements.txt: -------------------------------------------------------------------------------- 1 | jupyter==1.0.0 2 | mlflow==1.12.1 3 | sklearn==1.0.0 4 | pandas==1.1.5 5 | tqdm==4.54.1 6 | numpy==1.19.4 7 | -------------------------------------------------------------------------------- /demos/pydantic/README.md: -------------------------------------------------------------------------------- 1 | # Runtime Type Validation with Pydantic 2 | 3 | [Pydantic](https://docs.pydantic.dev/latest/) is a framework for executing runtime type validation - e.g., for validating data sent to an API against a pre-defined data model. 4 | 5 | This demo shows how Pydantic can be used to validate a config file (written in YAML), against a pre-defined schema. 6 | 7 | ## Demo Objectives 8 | 9 | * Define a data model. 10 | * Use the data model to validate config loaded from a YAML file. 11 | * Show the difference between validating data in a `dict` and as part of data model instatiation. 12 | * Demonstrate how validation errors can be handled and reported to Users. 13 | 14 | ## Running the Demo 15 | 16 | This demo is contained in a single Python module, `load_config.py`, that can be run using, 17 | 18 | ```text 19 | $ python -m load_config 20 | ``` 21 | -------------------------------------------------------------------------------- /demos/pydantic/config.yaml: -------------------------------------------------------------------------------- 1 | # example config valyes that we will use Pydantic to validate 2 | 3 | SCHEMA_VERSION: "0.1" 4 | 5 | PROJECT_ID: 012345 6 | PROJECT_ENV: prod 7 | PROJECT_URL: http://foo.com/bar.html 8 | 9 | USER_CERT: 10 | secret_resource_name: http://foo.com/secrets/ 11 | filename: README.md 12 | 13 | USERNAME: 14 | env_var: foo-bar-1 15 | -------------------------------------------------------------------------------- /demos/pydantic/config_schema.py: -------------------------------------------------------------------------------- 1 | """Config file schema defined using Pydantic.""" 2 | 3 | from typing import Literal, Optional 4 | 5 | from pydantic import BaseModel, ConfigDict, FilePath, HttpUrl 6 | 7 | 8 | class _UserCert(BaseModel): 9 | """Schema for User certificate config components.""" 10 | 11 | secret_resource_name: HttpUrl 12 | filename: FilePath 13 | 14 | 15 | class ConfigV1(BaseModel): 16 | """Schema for PROJECT config values.""" 17 | 18 | model_config = ConfigDict( 19 | frozen=True, extra="allow" 20 | ) # make immutable and allow extra fields 21 | 22 | SCHEMA_VERSION: Literal["0.1"] 23 | PROJECT_ID: int 24 | PROJECT_ENV: Literal["dev", "test", "prod"] 25 | USER_CERT: _UserCert 26 | USER_TAG: Optional[str] = None 27 | -------------------------------------------------------------------------------- /demos/pydantic/docs/define_data_model.md: -------------------------------------------------------------------------------- 1 | # Defining a Data Model 2 | 3 | A data model for a hypothetical config file can be defined as: 4 | 5 | ```python title="demos/dagster/pipelines/example_pipeline.py" 6 | --8<-- "demos/pydantic/config_schema.py" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/pydantic/docs/demo_requirements.md: -------------------------------------------------------------------------------- 1 | # Demo Requirements 2 | 3 | This demo depends on the following Python packages: 4 | 5 | ```text title="demos/pydantic/requirements.txt" 6 | --8<-- "demos/pydantic/requirements.txt" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/pydantic/docs/validating_runtime_data.md: -------------------------------------------------------------------------------- 1 | # Validating Runtime Data 2 | 3 | The data model can be imported and used to validate config loaded from YAML files. Two different approaches to runtime validation are demonstrated: 4 | 5 | ```python title="demos/pydantic/load_config.py" 6 | --8<-- "demos/pydantic/load_config.py" 7 | ``` 8 | 9 | Given a config file, 10 | 11 | ```yaml title="demos/pydantic/config.yaml" 12 | --8<-- "demos/pydantic/config.yaml" 13 | ``` 14 | 15 | The output from `load_config.py` is, 16 | 17 | ```text 18 | (1) config as ConfigV1 object: 19 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 20 | { 'PROJECT_ENV': 'prod', 21 | 'PROJECT_ID': 5349, 22 | 'PROJECT_URL': 'http://foo.com/bar.html', 23 | 'SCHEMA_VERSION': '0.1', 24 | 'USERNAME': {'env_var': 'foo-bar-1'}, 25 | 'USER_CERT': { 'filename': PosixPath('README.md'), 26 | 'secret_resource_name': Url('http://foo.com/secrets/')}, 27 | 'USER_TAG': None} 28 | 29 | (2) config as dict: 30 | ~~~~~~~~~~~~~~~~~~~ 31 | { 'PROJECT_ENV': 'prod', 32 | 'PROJECT_ID': 5349, 33 | 'PROJECT_URL': 'http://foo.com/bar.html', 34 | 'SCHEMA_VERSION': '0.1', 35 | 'USERNAME': {'env_var': 'foo-bar-1'}, 36 | 'USER_CERT': { 'filename': 'README.md', 37 | 'secret_resource_name': 'http://foo.com/secrets/'}} 38 | ``` 39 | 40 | Note how config not defined in the schema has been allowed to pass through when valudating the config data held in a dictionary. When instantiating the data model it is silently ignored. 41 | 42 | If we manually invalidate a couple of the config values then we can also take a look at how errors are formatted: 43 | 44 | ```text 45 | (1) config as ConfigV1 object: 46 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 47 | 2 validation errors for ConfigV1 48 | PROJECT_ENV 49 | Input should be 'dev', 'test' or 'prod' [type=literal_error, input_value='run', input_type=str] 50 | For further information visit https://errors.pydantic.dev/2.6/v/literal_error 51 | USER_CERT.filename 52 | Path does not point to a file [type=path_not_file, input_value='README.mdz', input_type=str] 53 | ``` 54 | -------------------------------------------------------------------------------- /demos/pydantic/load_config.py: -------------------------------------------------------------------------------- 1 | """Demoing how to use Pydantic to get schema-valid config from a YAML file.""" 2 | 3 | from pathlib import Path 4 | from pprint import pprint 5 | from typing import Any 6 | 7 | import yaml 8 | from pydantic import ValidationError 9 | 10 | from config_schema import ConfigV1 11 | 12 | 13 | def get_config(file: Path = Path.cwd() / "config.yaml") -> ConfigV1: 14 | """Get validated config as an instance of the data model.""" 15 | with open(file) as f: 16 | raw_config: dict[str, Any] = yaml.safe_load(f) 17 | return ConfigV1(**raw_config) 18 | 19 | 20 | def get_config_as_dict(file: Path = Path.cwd() / "config.yaml") -> dict[str, Any]: 21 | """Get config as a dictionary that has been validated against the data model.""" 22 | with open(file) as f: 23 | raw_config: dict[str, Any] = yaml.safe_load(f) 24 | ConfigV1.model_validate(raw_config) 25 | return raw_config 26 | 27 | 28 | if __name__ == "__main__": 29 | try: 30 | print("\n(1) config as ConfigV1 object:") 31 | print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") 32 | config = get_config() 33 | pprint(config.model_dump(), indent=2) 34 | 35 | print("\n(2) config as dict:") 36 | print("~~~~~~~~~~~~~~~~~~~") 37 | config_dict = get_config_as_dict() 38 | pprint(config_dict, indent=2) 39 | 40 | except ValidationError as e: 41 | print(e) 42 | -------------------------------------------------------------------------------- /demos/pydantic/requirements.txt: -------------------------------------------------------------------------------- 1 | pydantic==2.6.* 2 | PyYAML==6.0.* 3 | -------------------------------------------------------------------------------- /demos/pytorch/README.md: -------------------------------------------------------------------------------- 1 | # Introduction to PyTorch 2 | 3 | [PyTorch](https://pytorch.org) is a ML framework that provides NumPy-like tensor computation together with the fundamental building blocks for constructing and training deep learning models. 4 | 5 | ## Demo Objectives 6 | 7 | * How to manipulate tensors - i.e., PyTorch as an alternative to NumPy. 8 | * How to use auto-differentiation and minimise arbitrary functions with gradient descent. 9 | * How to create custom data loaders for efficient model training. 10 | * How to build and train ML models from first principles - linear and logistic regression. 11 | * How to build and train a deep learning model for image classification. 12 | * How the PyTorch Lightening framework streamlines the deep learning workflow. 13 | 14 | ## Running the Demo 15 | 16 | This demo spans several Jupyter notebook: 17 | 18 | * `demos/pytorch/tensors.ipynb`. 19 | * `demos/pytorch/linear_regression.ipynb`. 20 | * `demos/pytorch/logistic_regression.ipynb`. 21 | * `demos/pytorch/MNIST.ipynb`. 22 | * `demos/pytorch/MNIST_pytorch_lightning.ipynb`. 23 | 24 | Make sure you have the necessary Python package requirements installed into a Jupyter kernel for it to run successfully. 25 | -------------------------------------------------------------------------------- /demos/pytorch/datasets.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "detailed-skirt", 6 | "metadata": {}, 7 | "source": [ 8 | "# Datasets\n", 9 | "\n", 10 | "How to work with datasets used for training and testing models." 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "id": "adaptive-woman", 16 | "metadata": {}, 17 | "source": [ 18 | "## Imports" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 1, 24 | "id": "nonprofit-lawsuit", 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "from typing import Callable, Tuple\n", 29 | "\n", 30 | "import numpy as np\n", 31 | "import torch\n", 32 | "from torch.utils.data import Dataset, DataLoader" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "id": "pregnant-colombia", 38 | "metadata": {}, 39 | "source": [ 40 | "## Custom Dataset Classes\n", 41 | "\n", 42 | "From the docs:\n", 43 | "\n", 44 | "> \"_All datasets that represent a map from keys to data samples should subclass\n", 45 | " it. All subclasses should overwrite :meth:`__getitem__`, supporting fetching a\n", 46 | " data sample for a given key. Subclasses could also optionally overwrite\n", 47 | " :meth:`__len__`, which is expected to return the size of the dataset by many\n", 48 | " :class:`~torch.utils.data.Sampler` implementations and the default options\n", 49 | " of :class:`~torch.utils.data.DataLoader`._\"\n", 50 | " \n", 51 | "See the docs for more and for subclasses of `Dataset` - e.g. `IterableDataset`." 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 2, 57 | "id": "acceptable-providence", 58 | "metadata": {}, 59 | "outputs": [ 60 | { 61 | "name": "stdout", 62 | "output_type": "stream", 63 | "text": [ 64 | "regression_data size = 100\n", 65 | "regression_data[5] = (tensor(-0.5618, dtype=torch.float64), tensor(-0.5182, dtype=torch.float64))\n" 66 | ] 67 | } 68 | ], 69 | "source": [ 70 | "Transformer = Callable[[Tuple[float, float]], Tuple[float, float]]\n", 71 | "\n", 72 | "\n", 73 | "class MyRandomRegressionDataset(Dataset):\n", 74 | " def __init__(self, n_samples: int, transform: Transformer = None):\n", 75 | " self.n_samples = n_samples\n", 76 | " self.transform = transform\n", 77 | " X = np.random.normal(0, 1, n_samples)\n", 78 | " y = 0.5 * X + np.sqrt(0.75) * np.random.normal(0, 1, n_samples)\n", 79 | " self.X = torch.from_numpy(X)\n", 80 | " self.y = torch.from_numpy(y)\n", 81 | "\n", 82 | " def __len__(self) -> int:\n", 83 | " return self.n_samples\n", 84 | "\n", 85 | " def __getitem__(self, idx) -> Tuple[float, float]:\n", 86 | " sample = (self.X[idx], self.y[idx])\n", 87 | " if self.transform:\n", 88 | " return self.transform(sample)\n", 89 | " else:\n", 90 | " return (self.X[idx], self.y[idx])\n", 91 | "\n", 92 | "\n", 93 | "regression_data = MyRandomRegressionDataset(100)\n", 94 | "print(f\"regression_data size = {len(regression_data)}\")\n", 95 | "print(f\"regression_data[5] = {regression_data[5]}\")" 96 | ] 97 | }, 98 | { 99 | "cell_type": "markdown", 100 | "id": "pregnant-updating", 101 | "metadata": {}, 102 | "source": [ 103 | "### Transformers\n", 104 | "\n", 105 | "Data transformations can be handled at load-time on a sample-by-sample basis." 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": 3, 111 | "id": "opened-processor", 112 | "metadata": {}, 113 | "outputs": [ 114 | { 115 | "name": "stdout", 116 | "output_type": "stream", 117 | "text": [ 118 | "regression_data_scaled[5] = (tensor(2.3606, dtype=torch.float64), tensor(-1.7589, dtype=torch.float64))\n" 119 | ] 120 | } 121 | ], 122 | "source": [ 123 | "class ScalarTransform:\n", 124 | " def __init__(self, multiplier: float):\n", 125 | " self.multiplier = multiplier\n", 126 | "\n", 127 | " def __call__(self, sample: Tuple[float, float]) -> Tuple[float, float]:\n", 128 | " X_trans = self.multiplier * sample[0]\n", 129 | " y_trans = self.multiplier * sample[1]\n", 130 | " return (X_trans, y_trans)\n", 131 | "\n", 132 | "\n", 133 | "regression_data_scaled = MyRandomRegressionDataset(100, transform=ScalarTransform(2))\n", 134 | "print(f\"regression_data_scaled[5] = {regression_data_scaled[5]}\")" 135 | ] 136 | } 137 | ], 138 | "metadata": { 139 | "kernelspec": { 140 | "display_name": ".venv_pytorch", 141 | "language": "python", 142 | "name": "python3" 143 | }, 144 | "language_info": { 145 | "codemirror_mode": { 146 | "name": "ipython", 147 | "version": 3 148 | }, 149 | "file_extension": ".py", 150 | "mimetype": "text/x-python", 151 | "name": "python", 152 | "nbconvert_exporter": "python", 153 | "pygments_lexer": "ipython3", 154 | "version": "3.10.2" 155 | }, 156 | "vscode": { 157 | "interpreter": { 158 | "hash": "53195f2e71dcfe3ea716e20379841f9508c7537c854fc223ac4e4b5eed7dc1d5" 159 | } 160 | } 161 | }, 162 | "nbformat": 4, 163 | "nbformat_minor": 5 164 | } 165 | -------------------------------------------------------------------------------- /demos/pytorch/docs/demo_requirements.md: -------------------------------------------------------------------------------- 1 | # Demo Requirements 2 | 3 | This demo depends on the following Python packages: 4 | 5 | ```text title="demos/pytorch/requirements.txt" 6 | --8<-- "demos/pytorch/requirements.txt" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/pytorch/requirements.txt: -------------------------------------------------------------------------------- 1 | matplotlib==3.6.2 2 | numpy==1.23.5 3 | pandas==1.5.2 4 | pytorch-lightning==1.8.5 5 | seaborn==0.12.1 6 | torch==1.13.0 7 | torchvision==0.14.0 8 | tqdm==4.64.1 9 | -------------------------------------------------------------------------------- /demos/spacy/README.md: -------------------------------------------------------------------------------- 1 | # NLP Pipeline Basics with SpaCy 2 | 3 | From tokenising words to Named Entity Recognition (NER) and everything in-between, SpaCy provides you all the NLP tools you might need for basic text processing to create features for training ML models on text data. 4 | 5 | ## Demo Objectives 6 | 7 | * How to load a document and access sentences, tokens, and part-of-speech tags. 8 | * How to perform rules-based matching and phrase detection. 9 | * How to perform named entity recognition. 10 | 11 | ## Running the Demo 12 | 13 | This demo is contained within a single Jupyter notebook - `demos/spacy/spacy_101.ipynb`. Make sure you have the necessary Python package requirements installed into a Jupyter kernel for it to run successfully. 14 | -------------------------------------------------------------------------------- /demos/spacy/docs/demo_requirements.md: -------------------------------------------------------------------------------- 1 | # Demo Requirements 2 | 3 | This demo depends on the following Python packages: 4 | 5 | ```text title="demos/spacy/requirements.txt" 6 | --8<-- "demos/spacy/requirements.txt" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/spacy/requirements.txt: -------------------------------------------------------------------------------- 1 | en-core-web-sm @ https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.0.0/en_core_web_sm-3.0.0-py3-none-any.whl 2 | spacy==3.0.6 3 | -------------------------------------------------------------------------------- /demos/sqlalchemy/README.md: -------------------------------------------------------------------------------- 1 | # Object Relation Mapping using SQL Alchemy 2 | 3 | [SQL Alchemy](https://docs.sqlalchemy.org/en/14/index.html) is a Python package providing tools that facilitate structured database interaction via Python objects - i.e., for [Object-Relational Mapping (ORM)](https://en.wikipedia.org/wiki/Object–relational_mapping). [Alembic](https://alembic.sqlalchemy.org/en/latest/) is a Python package for managing database migrations based on the data models implied by the SQL Alchemy models - i.e., Python classes. 4 | 5 | ## Demo Objectives 6 | 7 | * How to define a data model using SQL Alchemy 8 | * How to interact with a database via SQL Alchemy's ORM. 9 | * How to use Alembic to manage database schema migrations. 10 | 11 | ## Dependencies 12 | 13 | This demo is contained primarily within a single Jupyter notebook - `demos/sqlalchemy/sql-alchemy-basics.ipynb`, that calls upon data models defined in `demos/sqlalchemy/models.py`. Make sure you have the necessary Python package requirements installed into a Jupyter kernel for it to run successfully. 14 | -------------------------------------------------------------------------------- /demos/sqlalchemy/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = migrations 6 | 7 | # template used to generate migration files 8 | # file_template = %%(rev)s_%%(slug)s 9 | 10 | # sys.path path, will be prepended to sys.path if present. 11 | # defaults to the current working directory. 12 | prepend_sys_path = . 13 | 14 | # timezone to use when rendering the date within the migration file 15 | # as well as the filename. 16 | # If specified, requires the python-dateutil library that can be 17 | # installed by adding `alembic[tz]` to the pip requirements 18 | # string value is passed to dateutil.tz.gettz() 19 | # leave blank for localtime 20 | # timezone = 21 | 22 | # max length of characters to apply to the 23 | # "slug" field 24 | # truncate_slug_length = 40 25 | 26 | # set to 'true' to run the environment during 27 | # the 'revision' command, regardless of autogenerate 28 | # revision_environment = false 29 | 30 | # set to 'true' to allow .pyc and .pyo files without 31 | # a source .py file to be detected as revisions in the 32 | # versions/ directory 33 | # sourceless = false 34 | 35 | # version location specification; This defaults 36 | # to migrations/versions. When using multiple version 37 | # directories, initial revisions must be specified with --version-path. 38 | # The path separator used here should be the separator specified by "version_path_separator" 39 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions 40 | 41 | # version path separator; As mentioned above, this is the character used to split 42 | # version_locations. Valid values are: 43 | # 44 | # version_path_separator = : 45 | # version_path_separator = ; 46 | # version_path_separator = space 47 | version_path_separator = os # default: use os.pathsep 48 | 49 | # the output encoding used when revision files 50 | # are written from script.py.mako 51 | # output_encoding = utf-8 52 | 53 | sqlalchemy.url = sqlite:///data.db 54 | 55 | 56 | [post_write_hooks] 57 | # post_write_hooks defines scripts or Python functions that are run 58 | # on newly generated revision scripts. See the documentation for further 59 | # detail and examples 60 | 61 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 62 | # hooks = black 63 | # black.type = console_scripts 64 | # black.entrypoint = black 65 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 66 | 67 | # Logging configuration 68 | [loggers] 69 | keys = root,sqlalchemy,alembic 70 | 71 | [handlers] 72 | keys = console 73 | 74 | [formatters] 75 | keys = generic 76 | 77 | [logger_root] 78 | level = WARN 79 | handlers = console 80 | qualname = 81 | 82 | [logger_sqlalchemy] 83 | level = WARN 84 | handlers = 85 | qualname = sqlalchemy.engine 86 | 87 | [logger_alembic] 88 | level = INFO 89 | handlers = 90 | qualname = alembic 91 | 92 | [handler_console] 93 | class = StreamHandler 94 | args = (sys.stderr,) 95 | level = NOTSET 96 | formatter = generic 97 | 98 | [formatter_generic] 99 | format = %(levelname)-5.5s [%(name)s] %(message)s 100 | datefmt = %H:%M:%S 101 | -------------------------------------------------------------------------------- /demos/sqlalchemy/docs/data_models.md: -------------------------------------------------------------------------------- 1 | # ORM Basics 2 | 3 | SQL Alchemy uses classes to define database table schema with class attributes used to define the type of each column. These classes and the objects created from then can then be used to read and write from the table without the need to write any SQL, as each object is mapped to a row in the table; this is object relational mapping. 4 | 5 | The script below defines two classes that define the schema for two tables - `Person` and `Address` - that are related to one another via one-to-many relationship. 6 | 7 | ```python title="demos/sqlalchemy/models.py" 8 | --8<-- "demos/sqlalchemy/models.py" 9 | ``` 10 | -------------------------------------------------------------------------------- /demos/sqlalchemy/docs/demo_requirements.md: -------------------------------------------------------------------------------- 1 | # Demo Requirements 2 | 3 | This demo depends on the following Python packages: 4 | 5 | ```text title="demos/sqlalchemy/requirements.txt" 6 | --8<-- "demos/sqlalchemy/requirements.txt" 7 | ``` 8 | -------------------------------------------------------------------------------- /demos/sqlalchemy/migrations/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /demos/sqlalchemy/migrations/env.py: -------------------------------------------------------------------------------- 1 | from logging.config import fileConfig 2 | 3 | from sqlalchemy import engine_from_config 4 | from sqlalchemy import pool 5 | 6 | from alembic import context 7 | 8 | # this is the Alembic Config object, which provides 9 | # access to the values within the .ini file in use. 10 | config = context.config 11 | 12 | # Interpret the config file for Python logging. 13 | # This line sets up loggers basically. 14 | fileConfig(config.config_file_name) 15 | 16 | # add your model's MetaData object here 17 | # for 'autogenerate' support 18 | from models import Base 19 | target_metadata = Base.metadata 20 | 21 | # other values from the config, defined by the needs of env.py, 22 | # can be acquired: 23 | # my_important_option = config.get_main_option("my_important_option") 24 | # ... etc. 25 | 26 | 27 | def run_migrations_offline(): 28 | """Run migrations in 'offline' mode. 29 | 30 | This configures the context with just a URL 31 | and not an Engine, though an Engine is acceptable 32 | here as well. By skipping the Engine creation 33 | we don't even need a DBAPI to be available. 34 | 35 | Calls to context.execute() here emit the given string to the 36 | script output. 37 | 38 | """ 39 | url = config.get_main_option("sqlalchemy.url") 40 | context.configure( 41 | url=url, 42 | target_metadata=target_metadata, 43 | literal_binds=True, 44 | dialect_opts={"paramstyle": "named"}, 45 | ) 46 | 47 | with context.begin_transaction(): 48 | context.run_migrations() 49 | 50 | 51 | def run_migrations_online(): 52 | """Run migrations in 'online' mode. 53 | 54 | In this scenario we need to create an Engine 55 | and associate a connection with the context. 56 | 57 | """ 58 | connectable = engine_from_config( 59 | config.get_section(config.config_ini_section), 60 | prefix="sqlalchemy.", 61 | poolclass=pool.NullPool, 62 | ) 63 | 64 | with connectable.connect() as connection: 65 | context.configure( 66 | connection=connection, target_metadata=target_metadata 67 | ) 68 | 69 | with context.begin_transaction(): 70 | context.run_migrations() 71 | 72 | 73 | if context.is_offline_mode(): 74 | run_migrations_offline() 75 | else: 76 | run_migrations_online() 77 | -------------------------------------------------------------------------------- /demos/sqlalchemy/migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /demos/sqlalchemy/migrations/versions/c31efd831ee7_initial_migration.py: -------------------------------------------------------------------------------- 1 | """Initial Migration 2 | 3 | Revision ID: c31efd831ee7 4 | Revises: 5 | Create Date: 2021-09-15 19:37:00.295751 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'c31efd831ee7' 14 | down_revision = None 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('address', 22 | sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), 23 | sa.Column('street', sa.String(), nullable=False), 24 | sa.Column('city', sa.String(), nullable=False), 25 | sa.Column('postcode', sa.String(), nullable=False), 26 | sa.PrimaryKeyConstraint('id') 27 | ) 28 | op.create_table('person', 29 | sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), 30 | sa.Column('address_id', sa.Integer(), nullable=True), 31 | sa.Column('name', sa.String(), nullable=False), 32 | sa.Column('age', sa.Float(), nullable=False), 33 | sa.ForeignKeyConstraint(['address_id'], ['address.id'], ), 34 | sa.PrimaryKeyConstraint('id') 35 | ) 36 | # ### end Alembic commands ### 37 | 38 | 39 | def downgrade(): 40 | # ### commands auto generated by Alembic - please adjust! ### 41 | op.drop_table('person') 42 | op.drop_table('address') 43 | # ### end Alembic commands ### 44 | -------------------------------------------------------------------------------- /demos/sqlalchemy/models.py: -------------------------------------------------------------------------------- 1 | """ 2 | All data Models can be found here. 3 | """ 4 | from typing import Any, Dict 5 | 6 | from sqlalchemy import ( 7 | Column, 8 | Float, 9 | Integer, 10 | String, 11 | ForeignKey 12 | ) 13 | from sqlalchemy.ext.declarative import declarative_base 14 | from sqlalchemy.orm import relationship 15 | 16 | Base = declarative_base() 17 | 18 | 19 | class Person(Base): 20 | """Personal information.""" 21 | 22 | __tablename__ = "person" 23 | 24 | id = Column(Integer, primary_key=True, autoincrement=True) 25 | address_id = Column(Integer, ForeignKey("address.id")) 26 | name = Column(String, nullable=False) 27 | age = Column(Float, nullable=False) 28 | 29 | address = relationship("Address", back_populates="person") 30 | 31 | def dict(self) -> Dict[str, Any]: 32 | return { 33 | "id": self.id, 34 | "address": self.address_id, 35 | "name": self.name, 36 | "age": self.age 37 | } 38 | 39 | 40 | class Address(Base): 41 | """Address information.""" 42 | 43 | __tablename__ = "address" 44 | 45 | id = Column(Integer, primary_key=True, autoincrement=True) 46 | street = Column(String, nullable=False) 47 | city = Column(String, nullable=False) 48 | postcode = Column(String, nullable=False) 49 | 50 | person = relationship("Person", back_populates="address") 51 | 52 | def dict(self) -> Dict[str, Any]: 53 | return { 54 | "id": self.id, 55 | "street": self.street, 56 | "city": self.city, 57 | "postcode": self.postcode 58 | } 59 | 60 | -------------------------------------------------------------------------------- /demos/sqlalchemy/requirements.txt: -------------------------------------------------------------------------------- 1 | SQLAlchemy==1.4.23 2 | alembic==1.7.1 3 | -------------------------------------------------------------------------------- /docs/assets/images/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AlexIoannides/notes-and-demos/535e9d35ac19395442e7f92feda10326f4a3b7ad/docs/assets/images/favicon.png -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.da.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Lunr languages, `Danish` language 3 | * https://github.com/MihaiValentin/lunr-languages 4 | * 5 | * Copyright 2014, Mihai Valentin 6 | * http://www.mozilla.org/MPL/ 7 | */ 8 | /*! 9 | * based on 10 | * Snowball JavaScript Library v0.3 11 | * http://code.google.com/p/urim/ 12 | * http://snowball.tartarus.org/ 13 | * 14 | * Copyright 2010, Oleg Mazko 15 | * http://www.mozilla.org/MPL/ 16 | */ 17 | 18 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.da=function(){this.pipeline.reset(),this.pipeline.add(e.da.trimmer,e.da.stopWordFilter,e.da.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.da.stemmer))},e.da.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.da.trimmer=e.trimmerSupport.generateTrimmer(e.da.wordCharacters),e.Pipeline.registerFunction(e.da.trimmer,"trimmer-da"),e.da.stemmer=function(){var r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){function e(){var e,r=f.cursor+3;if(d=f.limit,0<=r&&r<=f.limit){for(a=r;;){if(e=f.cursor,f.in_grouping(w,97,248)){f.cursor=e;break}if(f.cursor=e,e>=f.limit)return;f.cursor++}for(;!f.out_grouping(w,97,248);){if(f.cursor>=f.limit)return;f.cursor++}d=f.cursor,d=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(c,32),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del();break;case 2:f.in_grouping_b(p,97,229)&&f.slice_del()}}function t(){var e,r=f.limit-f.cursor;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.find_among_b(l,4)?(f.bra=f.cursor,f.limit_backward=e,f.cursor=f.limit-r,f.cursor>f.limit_backward&&(f.cursor--,f.bra=f.cursor,f.slice_del())):f.limit_backward=e)}function s(){var e,r,i,n=f.limit-f.cursor;if(f.ket=f.cursor,f.eq_s_b(2,"st")&&(f.bra=f.cursor,f.eq_s_b(2,"ig")&&f.slice_del()),f.cursor=f.limit-n,f.cursor>=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(m,5),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del(),i=f.limit-f.cursor,t(),f.cursor=f.limit-i;break;case 2:f.slice_from("løs")}}function o(){var e;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.out_grouping_b(w,97,248)?(f.bra=f.cursor,u=f.slice_to(u),f.limit_backward=e,f.eq_v_b(u)&&f.slice_del()):f.limit_backward=e)}var a,d,u,c=[new r("hed",-1,1),new r("ethed",0,1),new r("ered",-1,1),new r("e",-1,1),new r("erede",3,1),new r("ende",3,1),new r("erende",5,1),new r("ene",3,1),new r("erne",3,1),new r("ere",3,1),new r("en",-1,1),new r("heden",10,1),new r("eren",10,1),new r("er",-1,1),new r("heder",13,1),new r("erer",13,1),new r("s",-1,2),new r("heds",16,1),new r("es",16,1),new r("endes",18,1),new r("erendes",19,1),new r("enes",18,1),new r("ernes",18,1),new r("eres",18,1),new r("ens",16,1),new r("hedens",24,1),new r("erens",24,1),new r("ers",16,1),new r("ets",16,1),new r("erets",28,1),new r("et",-1,1),new r("eret",30,1)],l=[new r("gd",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1)],m=[new r("ig",-1,1),new r("lig",0,1),new r("elig",1,1),new r("els",-1,1),new r("løst",-1,2)],w=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],p=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16],f=new i;this.setCurrent=function(e){f.setCurrent(e)},this.getCurrent=function(){return f.getCurrent()},this.stem=function(){var r=f.cursor;return e(),f.limit_backward=r,f.cursor=f.limit,n(),f.cursor=f.limit,t(),f.cursor=f.limit,s(),f.cursor=f.limit,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.da.stemmer,"stemmer-da"),e.da.stopWordFilter=e.generateStopWordFilter("ad af alle alt anden at blev blive bliver da de dem den denne der deres det dette dig din disse dog du efter eller en end er et for fra ham han hans har havde have hende hendes her hos hun hvad hvis hvor i ikke ind jeg jer jo kunne man mange med meget men mig min mine mit mod ned noget nogle nu når og også om op os over på selv sig sin sine sit skal skulle som sådan thi til ud under var vi vil ville vor være været".split(" ")),e.Pipeline.registerFunction(e.da.stopWordFilter,"stopWordFilter-da")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.de.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Lunr languages, `German` language 3 | * https://github.com/MihaiValentin/lunr-languages 4 | * 5 | * Copyright 2014, Mihai Valentin 6 | * http://www.mozilla.org/MPL/ 7 | */ 8 | /*! 9 | * based on 10 | * Snowball JavaScript Library v0.3 11 | * http://code.google.com/p/urim/ 12 | * http://snowball.tartarus.org/ 13 | * 14 | * Copyright 2010, Oleg Mazko 15 | * http://www.mozilla.org/MPL/ 16 | */ 17 | 18 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.de=function(){this.pipeline.reset(),this.pipeline.add(e.de.trimmer,e.de.stopWordFilter,e.de.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.de.stemmer))},e.de.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.de.trimmer=e.trimmerSupport.generateTrimmer(e.de.wordCharacters),e.Pipeline.registerFunction(e.de.trimmer,"trimmer-de"),e.de.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){function e(e,r,n){return!(!v.eq_s(1,e)||(v.ket=v.cursor,!v.in_grouping(p,97,252)))&&(v.slice_from(r),v.cursor=n,!0)}function i(){for(var r,n,i,s,t=v.cursor;;)if(r=v.cursor,v.bra=r,v.eq_s(1,"ß"))v.ket=v.cursor,v.slice_from("ss");else{if(r>=v.limit)break;v.cursor=r+1}for(v.cursor=t;;)for(n=v.cursor;;){if(i=v.cursor,v.in_grouping(p,97,252)){if(s=v.cursor,v.bra=s,e("u","U",i))break;if(v.cursor=s,e("y","Y",i))break}if(i>=v.limit)return void(v.cursor=n);v.cursor=i+1}}function s(){for(;!v.in_grouping(p,97,252);){if(v.cursor>=v.limit)return!0;v.cursor++}for(;!v.out_grouping(p,97,252);){if(v.cursor>=v.limit)return!0;v.cursor++}return!1}function t(){m=v.limit,l=m;var e=v.cursor+3;0<=e&&e<=v.limit&&(d=e,s()||(m=v.cursor,m=v.limit)return;v.cursor++}}}function c(){return m<=v.cursor}function u(){return l<=v.cursor}function a(){var e,r,n,i,s=v.limit-v.cursor;if(v.ket=v.cursor,(e=v.find_among_b(w,7))&&(v.bra=v.cursor,c()))switch(e){case 1:v.slice_del();break;case 2:v.slice_del(),v.ket=v.cursor,v.eq_s_b(1,"s")&&(v.bra=v.cursor,v.eq_s_b(3,"nis")&&v.slice_del());break;case 3:v.in_grouping_b(g,98,116)&&v.slice_del()}if(v.cursor=v.limit-s,v.ket=v.cursor,(e=v.find_among_b(f,4))&&(v.bra=v.cursor,c()))switch(e){case 1:v.slice_del();break;case 2:if(v.in_grouping_b(k,98,116)){var t=v.cursor-3;v.limit_backward<=t&&t<=v.limit&&(v.cursor=t,v.slice_del())}}if(v.cursor=v.limit-s,v.ket=v.cursor,(e=v.find_among_b(_,8))&&(v.bra=v.cursor,u()))switch(e){case 1:v.slice_del(),v.ket=v.cursor,v.eq_s_b(2,"ig")&&(v.bra=v.cursor,r=v.limit-v.cursor,v.eq_s_b(1,"e")||(v.cursor=v.limit-r,u()&&v.slice_del()));break;case 2:n=v.limit-v.cursor,v.eq_s_b(1,"e")||(v.cursor=v.limit-n,v.slice_del());break;case 3:if(v.slice_del(),v.ket=v.cursor,i=v.limit-v.cursor,!v.eq_s_b(2,"er")&&(v.cursor=v.limit-i,!v.eq_s_b(2,"en")))break;v.bra=v.cursor,c()&&v.slice_del();break;case 4:v.slice_del(),v.ket=v.cursor,e=v.find_among_b(b,2),e&&(v.bra=v.cursor,u()&&1==e&&v.slice_del())}}var d,l,m,h=[new r("",-1,6),new r("U",0,2),new r("Y",0,1),new r("ä",0,3),new r("ö",0,4),new r("ü",0,5)],w=[new r("e",-1,2),new r("em",-1,1),new r("en",-1,2),new r("ern",-1,1),new r("er",-1,1),new r("s",-1,3),new r("es",5,2)],f=[new r("en",-1,1),new r("er",-1,1),new r("st",-1,2),new r("est",2,1)],b=[new r("ig",-1,1),new r("lich",-1,1)],_=[new r("end",-1,1),new r("ig",-1,2),new r("ung",-1,1),new r("lich",-1,3),new r("isch",-1,2),new r("ik",-1,2),new r("heit",-1,3),new r("keit",-1,4)],p=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32,8],g=[117,30,5],k=[117,30,4],v=new n;this.setCurrent=function(e){v.setCurrent(e)},this.getCurrent=function(){return v.getCurrent()},this.stem=function(){var e=v.cursor;return i(),v.cursor=e,t(),v.limit_backward=e,v.cursor=v.limit,a(),v.cursor=v.limit_backward,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.de.stemmer,"stemmer-de"),e.de.stopWordFilter=e.generateStopWordFilter("aber alle allem allen aller alles als also am an ander andere anderem anderen anderer anderes anderm andern anderr anders auch auf aus bei bin bis bist da damit dann das dasselbe dazu daß dein deine deinem deinen deiner deines dem demselben den denn denselben der derer derselbe derselben des desselben dessen dich die dies diese dieselbe dieselben diesem diesen dieser dieses dir doch dort du durch ein eine einem einen einer eines einig einige einigem einigen einiger einiges einmal er es etwas euch euer eure eurem euren eurer eures für gegen gewesen hab habe haben hat hatte hatten hier hin hinter ich ihm ihn ihnen ihr ihre ihrem ihren ihrer ihres im in indem ins ist jede jedem jeden jeder jedes jene jenem jenen jener jenes jetzt kann kein keine keinem keinen keiner keines können könnte machen man manche manchem manchen mancher manches mein meine meinem meinen meiner meines mich mir mit muss musste nach nicht nichts noch nun nur ob oder ohne sehr sein seine seinem seinen seiner seines selbst sich sie sind so solche solchem solchen solcher solches soll sollte sondern sonst um und uns unse unsem unsen unser unses unter viel vom von vor war waren warst was weg weil weiter welche welchem welchen welcher welches wenn werde werden wie wieder will wir wird wirst wo wollen wollte während würde würden zu zum zur zwar zwischen über".split(" ")),e.Pipeline.registerFunction(e.de.stopWordFilter,"stopWordFilter-de")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.du.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Lunr languages, `Dutch` language 3 | * https://github.com/MihaiValentin/lunr-languages 4 | * 5 | * Copyright 2014, Mihai Valentin 6 | * http://www.mozilla.org/MPL/ 7 | */ 8 | /*! 9 | * based on 10 | * Snowball JavaScript Library v0.3 11 | * http://code.google.com/p/urim/ 12 | * http://snowball.tartarus.org/ 13 | * 14 | * Copyright 2010, Oleg Mazko 15 | * http://www.mozilla.org/MPL/ 16 | */ 17 | 18 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");console.warn('[Lunr Languages] Please use the "nl" instead of the "du". The "nl" code is the standard code for Dutch language, and "du" will be removed in the next major versions.'),e.du=function(){this.pipeline.reset(),this.pipeline.add(e.du.trimmer,e.du.stopWordFilter,e.du.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.du.stemmer))},e.du.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.du.trimmer=e.trimmerSupport.generateTrimmer(e.du.wordCharacters),e.Pipeline.registerFunction(e.du.trimmer,"trimmer-du"),e.du.stemmer=function(){var r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){function e(){for(var e,r,i,o=C.cursor;;){if(C.bra=C.cursor,e=C.find_among(b,11))switch(C.ket=C.cursor,e){case 1:C.slice_from("a");continue;case 2:C.slice_from("e");continue;case 3:C.slice_from("i");continue;case 4:C.slice_from("o");continue;case 5:C.slice_from("u");continue;case 6:if(C.cursor>=C.limit)break;C.cursor++;continue}break}for(C.cursor=o,C.bra=o,C.eq_s(1,"y")?(C.ket=C.cursor,C.slice_from("Y")):C.cursor=o;;)if(r=C.cursor,C.in_grouping(q,97,232)){if(i=C.cursor,C.bra=i,C.eq_s(1,"i"))C.ket=C.cursor,C.in_grouping(q,97,232)&&(C.slice_from("I"),C.cursor=r);else if(C.cursor=i,C.eq_s(1,"y"))C.ket=C.cursor,C.slice_from("Y"),C.cursor=r;else if(n(r))break}else if(n(r))break}function n(e){return C.cursor=e,e>=C.limit||(C.cursor++,!1)}function o(){_=C.limit,f=_,t()||(_=C.cursor,_<3&&(_=3),t()||(f=C.cursor))}function t(){for(;!C.in_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}for(;!C.out_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}return!1}function s(){for(var e;;)if(C.bra=C.cursor,e=C.find_among(p,3))switch(C.ket=C.cursor,e){case 1:C.slice_from("y");break;case 2:C.slice_from("i");break;case 3:if(C.cursor>=C.limit)return;C.cursor++}}function u(){return _<=C.cursor}function c(){return f<=C.cursor}function a(){var e=C.limit-C.cursor;C.find_among_b(g,3)&&(C.cursor=C.limit-e,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del()))}function l(){var e;w=!1,C.ket=C.cursor,C.eq_s_b(1,"e")&&(C.bra=C.cursor,u()&&(e=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-e,C.slice_del(),w=!0,a())))}function m(){var e;u()&&(e=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-e,C.eq_s_b(3,"gem")||(C.cursor=C.limit-e,C.slice_del(),a())))}function d(){var e,r,i,n,o,t,s=C.limit-C.cursor;if(C.ket=C.cursor,e=C.find_among_b(h,5))switch(C.bra=C.cursor,e){case 1:u()&&C.slice_from("heid");break;case 2:m();break;case 3:u()&&C.out_grouping_b(z,97,232)&&C.slice_del()}if(C.cursor=C.limit-s,l(),C.cursor=C.limit-s,C.ket=C.cursor,C.eq_s_b(4,"heid")&&(C.bra=C.cursor,c()&&(r=C.limit-C.cursor,C.eq_s_b(1,"c")||(C.cursor=C.limit-r,C.slice_del(),C.ket=C.cursor,C.eq_s_b(2,"en")&&(C.bra=C.cursor,m())))),C.cursor=C.limit-s,C.ket=C.cursor,e=C.find_among_b(k,6))switch(C.bra=C.cursor,e){case 1:if(c()){if(C.slice_del(),i=C.limit-C.cursor,C.ket=C.cursor,C.eq_s_b(2,"ig")&&(C.bra=C.cursor,c()&&(n=C.limit-C.cursor,!C.eq_s_b(1,"e")))){C.cursor=C.limit-n,C.slice_del();break}C.cursor=C.limit-i,a()}break;case 2:c()&&(o=C.limit-C.cursor,C.eq_s_b(1,"e")||(C.cursor=C.limit-o,C.slice_del()));break;case 3:c()&&(C.slice_del(),l());break;case 4:c()&&C.slice_del();break;case 5:c()&&w&&C.slice_del()}C.cursor=C.limit-s,C.out_grouping_b(j,73,232)&&(t=C.limit-C.cursor,C.find_among_b(v,4)&&C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-t,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del())))}var f,_,w,b=[new r("",-1,6),new r("á",0,1),new r("ä",0,1),new r("é",0,2),new r("ë",0,2),new r("í",0,3),new r("ï",0,3),new r("ó",0,4),new r("ö",0,4),new r("ú",0,5),new r("ü",0,5)],p=[new r("",-1,3),new r("I",0,2),new r("Y",0,1)],g=[new r("dd",-1,-1),new r("kk",-1,-1),new r("tt",-1,-1)],h=[new r("ene",-1,2),new r("se",-1,3),new r("en",-1,2),new r("heden",2,1),new r("s",-1,3)],k=[new r("end",-1,1),new r("ig",-1,2),new r("ing",-1,1),new r("lijk",-1,3),new r("baar",-1,4),new r("bar",-1,5)],v=[new r("aa",-1,-1),new r("ee",-1,-1),new r("oo",-1,-1),new r("uu",-1,-1)],q=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],j=[1,0,0,17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],z=[17,67,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],C=new i;this.setCurrent=function(e){C.setCurrent(e)},this.getCurrent=function(){return C.getCurrent()},this.stem=function(){var r=C.cursor;return e(),C.cursor=r,o(),C.limit_backward=r,C.cursor=C.limit,d(),C.cursor=C.limit_backward,s(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.du.stemmer,"stemmer-du"),e.du.stopWordFilter=e.generateStopWordFilter(" aan al alles als altijd andere ben bij daar dan dat de der deze die dit doch doen door dus een eens en er ge geen geweest haar had heb hebben heeft hem het hier hij hoe hun iemand iets ik in is ja je kan kon kunnen maar me meer men met mij mijn moet na naar niet niets nog nu of om omdat onder ons ook op over reeds te tegen toch toen tot u uit uw van veel voor want waren was wat werd wezen wie wil worden wordt zal ze zelf zich zij zijn zo zonder zou".split(" ")),e.Pipeline.registerFunction(e.du.stopWordFilter,"stopWordFilter-du")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.el.min.js: -------------------------------------------------------------------------------- 1 | !function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.el=function(){this.pipeline.reset(),void 0===this.searchPipeline&&this.pipeline.add(e.el.trimmer,e.el.normilizer),this.pipeline.add(e.el.stopWordFilter,e.el.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.el.stemmer))},e.el.wordCharacters="A-Za-zΑαΒβΓγΔδΕεΖζΗηΘθΙιΚκΛλΜμΝνΞξΟοΠπΡρΣσςΤτΥυΦφΧχΨψΩωΆάΈέΉήΊίΌόΎύΏώΪΐΫΰΐΰ",e.el.trimmer=e.trimmerSupport.generateTrimmer(e.el.wordCharacters),e.Pipeline.registerFunction(e.el.trimmer,"trimmer-el"),e.el.stemmer=function(){function e(e){return s.test(e)}function t(e){return/[ΑΕΗΙΟΥΩ]$/.test(e)}function r(e){return/[ΑΕΗΙΟΩ]$/.test(e)}function n(n){var s=n;if(n.length<3)return s;if(!e(n))return s;if(i.indexOf(n)>=0)return s;var u=new RegExp("(.*)("+Object.keys(l).join("|")+")$"),o=u.exec(s);return null!==o&&(s=o[1]+l[o[2]]),null!==(o=/^(.+?)(ΑΔΕΣ|ΑΔΩΝ)$/.exec(s))&&(s=o[1],/(ΟΚ|ΜΑΜ|ΜΑΝ|ΜΠΑΜΠ|ΠΑΤΕΡ|ΓΙΑΓΙ|ΝΤΑΝΤ|ΚΥΡ|ΘΕΙ|ΠΕΘΕΡ|ΜΟΥΣΑΜ|ΚΑΠΛΑΜ|ΠΑΡ|ΨΑΡ|ΤΖΟΥΡ|ΤΑΜΠΟΥΡ|ΓΑΛΑΤ|ΦΑΦΛΑΤ)$/.test(o[1])||(s+="ΑΔ")),null!==(o=/^(.+?)(ΕΔΕΣ|ΕΔΩΝ)$/.exec(s))&&(s=o[1],/(ΟΠ|ΙΠ|ΕΜΠ|ΥΠ|ΓΗΠ|ΔΑΠ|ΚΡΑΣΠ|ΜΙΛ)$/.test(o[1])&&(s+="ΕΔ")),null!==(o=/^(.+?)(ΟΥΔΕΣ|ΟΥΔΩΝ)$/.exec(s))&&(s=o[1],/(ΑΡΚ|ΚΑΛΙΑΚ|ΠΕΤΑΛ|ΛΙΧ|ΠΛΕΞ|ΣΚ|Σ|ΦΛ|ΦΡ|ΒΕΛ|ΛΟΥΛ|ΧΝ|ΣΠ|ΤΡΑΓ|ΦΕ)$/.test(o[1])&&(s+="ΟΥΔ")),null!==(o=/^(.+?)(ΕΩΣ|ΕΩΝ|ΕΑΣ|ΕΑ)$/.exec(s))&&(s=o[1],/^(Θ|Δ|ΕΛ|ΓΑΛ|Ν|Π|ΙΔ|ΠΑΡ|ΣΤΕΡ|ΟΡΦ|ΑΝΔΡ|ΑΝΤΡ)$/.test(o[1])&&(s+="Ε")),null!==(o=/^(.+?)(ΕΙΟ|ΕΙΟΣ|ΕΙΟΙ|ΕΙΑ|ΕΙΑΣ|ΕΙΕΣ|ΕΙΟΥ|ΕΙΟΥΣ|ΕΙΩΝ)$/.exec(s))&&o[1].length>4&&(s=o[1]),null!==(o=/^(.+?)(ΙΟΥΣ|ΙΑΣ|ΙΕΣ|ΙΟΣ|ΙΟΥ|ΙΟΙ|ΙΩΝ|ΙΟΝ|ΙΑ|ΙΟ)$/.exec(s))&&(s=o[1],(t(s)||s.length<2||/^(ΑΓ|ΑΓΓΕΛ|ΑΓΡ|ΑΕΡ|ΑΘΛ|ΑΚΟΥΣ|ΑΞ|ΑΣ|Β|ΒΙΒΛ|ΒΥΤ|Γ|ΓΙΑΓ|ΓΩΝ|Δ|ΔΑΝ|ΔΗΛ|ΔΗΜ|ΔΟΚΙΜ|ΕΛ|ΖΑΧΑΡ|ΗΛ|ΗΠ|ΙΔ|ΙΣΚ|ΙΣΤ|ΙΟΝ|ΙΩΝ|ΚΙΜΩΛ|ΚΟΛΟΝ|ΚΟΡ|ΚΤΗΡ|ΚΥΡ|ΛΑΓ|ΛΟΓ|ΜΑΓ|ΜΠΑΝ|ΜΠΡ|ΝΑΥΤ|ΝΟΤ|ΟΠΑΛ|ΟΞ|ΟΡ|ΟΣ|ΠΑΝΑΓ|ΠΑΤΡ|ΠΗΛ|ΠΗΝ|ΠΛΑΙΣ|ΠΟΝΤ|ΡΑΔ|ΡΟΔ|ΣΚ|ΣΚΟΡΠ|ΣΟΥΝ|ΣΠΑΝ|ΣΤΑΔ|ΣΥΡ|ΤΗΛ|ΤΙΜ|ΤΟΚ|ΤΟΠ|ΤΡΟΧ|ΦΙΛ|ΦΩΤ|Χ|ΧΙΛ|ΧΡΩΜ|ΧΩΡ)$/.test(o[1]))&&(s+="Ι"),/^(ΠΑΛ)$/.test(o[1])&&(s+="ΑΙ")),null!==(o=/^(.+?)(ΙΚΟΣ|ΙΚΟΝ|ΙΚΕΙΣ|ΙΚΟΙ|ΙΚΕΣ|ΙΚΟΥΣ|ΙΚΗ|ΙΚΗΣ|ΙΚΟ|ΙΚΑ|ΙΚΟΥ|ΙΚΩΝ|ΙΚΩΣ)$/.exec(s))&&(s=o[1],(t(s)||/^(ΑΔ|ΑΛ|ΑΜΑΝ|ΑΜΕΡ|ΑΜΜΟΧΑΛ|ΑΝΗΘ|ΑΝΤΙΔ|ΑΠΛ|ΑΤΤ|ΑΦΡ|ΒΑΣ|ΒΡΩΜ|ΓΕΝ|ΓΕΡ|Δ|ΔΙΚΑΝ|ΔΥΤ|ΕΙΔ|ΕΝΔ|ΕΞΩΔ|ΗΘ|ΘΕΤ|ΚΑΛΛΙΝ|ΚΑΛΠ|ΚΑΤΑΔ|ΚΟΥΖΙΝ|ΚΡ|ΚΩΔ|ΛΟΓ|Μ|ΜΕΡ|ΜΟΝΑΔ|ΜΟΥΛ|ΜΟΥΣ|ΜΠΑΓΙΑΤ|ΜΠΑΝ|ΜΠΟΛ|ΜΠΟΣ|ΜΥΣΤ|Ν|ΝΙΤ|ΞΙΚ|ΟΠΤ|ΠΑΝ|ΠΕΤΣ|ΠΙΚΑΝΤ|ΠΙΤΣ|ΠΛΑΣΤ|ΠΛΙΑΤΣ|ΠΟΝΤ|ΠΟΣΤΕΛΝ|ΠΡΩΤΟΔ|ΣΕΡΤ|ΣΗΜΑΝΤ|ΣΤΑΤ|ΣΥΝΑΔ|ΣΥΝΟΜΗΛ|ΤΕΛ|ΤΕΧΝ|ΤΡΟΠ|ΤΣΑΜ|ΥΠΟΔ|Φ|ΦΙΛΟΝ|ΦΥΛΟΔ|ΦΥΣ|ΧΑΣ)$/.test(o[1])||/(ΦΟΙΝ)$/.test(o[1]))&&(s+="ΙΚ")),"ΑΓΑΜΕ"===s&&(s="ΑΓΑΜ"),null!==(o=/^(.+?)(ΑΓΑΜΕ|ΗΣΑΜΕ|ΟΥΣΑΜΕ|ΗΚΑΜΕ|ΗΘΗΚΑΜΕ)$/.exec(s))&&(s=o[1]),null!==(o=/^(.+?)(ΑΜΕ)$/.exec(s))&&(s=o[1],/^(ΑΝΑΠ|ΑΠΟΘ|ΑΠΟΚ|ΑΠΟΣΤ|ΒΟΥΒ|ΞΕΘ|ΟΥΛ|ΠΕΘ|ΠΙΚΡ|ΠΟΤ|ΣΙΧ|Χ)$/.test(o[1])&&(s+="ΑΜ")),null!==(o=/^(.+?)(ΑΓΑΝΕ|ΗΣΑΝΕ|ΟΥΣΑΝΕ|ΙΟΝΤΑΝΕ|ΙΟΤΑΝΕ|ΙΟΥΝΤΑΝΕ|ΟΝΤΑΝΕ|ΟΤΑΝΕ|ΟΥΝΤΑΝΕ|ΗΚΑΝΕ|ΗΘΗΚΑΝΕ)$/.exec(s))&&(s=o[1],/^(ΤΡ|ΤΣ)$/.test(o[1])&&(s+="ΑΓΑΝ")),null!==(o=/^(.+?)(ΑΝΕ)$/.exec(s))&&(s=o[1],(r(s)||/^(ΒΕΤΕΡ|ΒΟΥΛΚ|ΒΡΑΧΜ|Γ|ΔΡΑΔΟΥΜ|Θ|ΚΑΛΠΟΥΖ|ΚΑΣΤΕΛ|ΚΟΡΜΟΡ|ΛΑΟΠΛ|ΜΩΑΜΕΘ|Μ|ΜΟΥΣΟΥΛΜΑΝ|ΟΥΛ|Π|ΠΕΛΕΚ|ΠΛ|ΠΟΛΙΣ|ΠΟΡΤΟΛ|ΣΑΡΑΚΑΤΣ|ΣΟΥΛΤ|ΤΣΑΡΛΑΤ|ΟΡΦ|ΤΣΙΓΓ|ΤΣΟΠ|ΦΩΤΟΣΤΕΦ|Χ|ΨΥΧΟΠΛ|ΑΓ|ΟΡΦ|ΓΑΛ|ΓΕΡ|ΔΕΚ|ΔΙΠΛ|ΑΜΕΡΙΚΑΝ|ΟΥΡ|ΠΙΘ|ΠΟΥΡΙΤ|Σ|ΖΩΝΤ|ΙΚ|ΚΑΣΤ|ΚΟΠ|ΛΙΧ|ΛΟΥΘΗΡ|ΜΑΙΝΤ|ΜΕΛ|ΣΙΓ|ΣΠ|ΣΤΕΓ|ΤΡΑΓ|ΤΣΑΓ|Φ|ΕΡ|ΑΔΑΠ|ΑΘΙΓΓ|ΑΜΗΧ|ΑΝΙΚ|ΑΝΟΡΓ|ΑΠΗΓ|ΑΠΙΘ|ΑΤΣΙΓΓ|ΒΑΣ|ΒΑΣΚ|ΒΑΘΥΓΑΛ|ΒΙΟΜΗΧ|ΒΡΑΧΥΚ|ΔΙΑΤ|ΔΙΑΦ|ΕΝΟΡΓ|ΘΥΣ|ΚΑΠΝΟΒΙΟΜΗΧ|ΚΑΤΑΓΑΛ|ΚΛΙΒ|ΚΟΙΛΑΡΦ|ΛΙΒ|ΜΕΓΛΟΒΙΟΜΗΧ|ΜΙΚΡΟΒΙΟΜΗΧ|ΝΤΑΒ|ΞΗΡΟΚΛΙΒ|ΟΛΙΓΟΔΑΜ|ΟΛΟΓΑΛ|ΠΕΝΤΑΡΦ|ΠΕΡΗΦ|ΠΕΡΙΤΡ|ΠΛΑΤ|ΠΟΛΥΔΑΠ|ΠΟΛΥΜΗΧ|ΣΤΕΦ|ΤΑΒ|ΤΕΤ|ΥΠΕΡΗΦ|ΥΠΟΚΟΠ|ΧΑΜΗΛΟΔΑΠ|ΨΗΛΟΤΑΒ)$/.test(o[1]))&&(s+="ΑΝ")),null!==(o=/^(.+?)(ΗΣΕΤΕ)$/.exec(s))&&(s=o[1]),null!==(o=/^(.+?)(ΕΤΕ)$/.exec(s))&&(s=o[1],(r(s)||/(ΟΔ|ΑΙΡ|ΦΟΡ|ΤΑΘ|ΔΙΑΘ|ΣΧ|ΕΝΔ|ΕΥΡ|ΤΙΘ|ΥΠΕΡΘ|ΡΑΘ|ΕΝΘ|ΡΟΘ|ΣΘ|ΠΥΡ|ΑΙΝ|ΣΥΝΔ|ΣΥΝ|ΣΥΝΘ|ΧΩΡ|ΠΟΝ|ΒΡ|ΚΑΘ|ΕΥΘ|ΕΚΘ|ΝΕΤ|ΡΟΝ|ΑΡΚ|ΒΑΡ|ΒΟΛ|ΩΦΕΛ)$/.test(o[1])||/^(ΑΒΑΡ|ΒΕΝ|ΕΝΑΡ|ΑΒΡ|ΑΔ|ΑΘ|ΑΝ|ΑΠΛ|ΒΑΡΟΝ|ΝΤΡ|ΣΚ|ΚΟΠ|ΜΠΟΡ|ΝΙΦ|ΠΑΓ|ΠΑΡΑΚΑΛ|ΣΕΡΠ|ΣΚΕΛ|ΣΥΡΦ|ΤΟΚ|Υ|Δ|ΕΜ|ΘΑΡΡ|Θ)$/.test(o[1]))&&(s+="ΕΤ")),null!==(o=/^(.+?)(ΟΝΤΑΣ|ΩΝΤΑΣ)$/.exec(s))&&(s=o[1],/^ΑΡΧ$/.test(o[1])&&(s+="ΟΝΤ"),/ΚΡΕ$/.test(o[1])&&(s+="ΩΝΤ")),null!==(o=/^(.+?)(ΟΜΑΣΤΕ|ΙΟΜΑΣΤΕ)$/.exec(s))&&(s=o[1],/^ΟΝ$/.test(o[1])&&(s+="ΟΜΑΣΤ")),null!==(o=/^(.+?)(ΙΕΣΤΕ)$/.exec(s))&&(s=o[1],/^(Π|ΑΠ|ΣΥΜΠ|ΑΣΥΜΠ|ΑΚΑΤΑΠ|ΑΜΕΤΑΜΦ)$/.test(o[1])&&(s+="ΙΕΣΤ")),null!==(o=/^(.+?)(ΕΣΤΕ)$/.exec(s))&&(s=o[1],/^(ΑΛ|ΑΡ|ΕΚΤΕΛ|Ζ|Μ|Ξ|ΠΑΡΑΚΑΛ|ΠΡΟ|ΝΙΣ)$/.test(o[1])&&(s+="ΕΣΤ")),null!==(o=/^(.+?)(ΗΘΗΚΑ|ΗΘΗΚΕΣ|ΗΘΗΚΕ)$/.exec(s))&&(s=o[1]),null!==(o=/^(.+?)(ΗΚΑ|ΗΚΕΣ|ΗΚΕ)$/.exec(s))&&(s=o[1],(/(ΣΚΩΛ|ΣΚΟΥΛ|ΝΑΡΘ|ΣΦ|ΟΘ|ΠΙΘ)$/.test(o[1])||/^(ΔΙΑΘ|Θ|ΠΑΡΑΚΑΤΑΘ|ΠΡΟΣΘ|ΣΥΝΘ)$/.test(o[1]))&&(s+="ΗΚ")),null!==(o=/^(.+?)(ΟΥΣΑ|ΟΥΣΕΣ|ΟΥΣΕ)$/.exec(s))&&(s=o[1],(t(s)||/^(ΦΑΡΜΑΚ|ΧΑΔ|ΑΓΚ|ΑΝΑΡΡ|ΒΡΟΜ|ΕΚΛΙΠ|ΛΑΜΠΙΔ|ΛΕΧ|Μ|ΠΑΤ|Ρ|Λ|ΜΕΔ|ΜΕΣΑΖ|ΥΠΟΤΕΙΝ|ΑΜ|ΑΙΘ|ΑΝΗΚ|ΔΕΣΠΟΖ|ΕΝΔΙΑΦΕΡ)$/.test(o[1])||/(ΠΟΔΑΡ|ΒΛΕΠ|ΠΑΝΤΑΧ|ΦΡΥΔ|ΜΑΝΤΙΛ|ΜΑΛΛ|ΚΥΜΑΤ|ΛΑΧ|ΛΗΓ|ΦΑΓ|ΟΜ|ΠΡΩΤ)$/.test(o[1]))&&(s+="ΟΥΣ")),null!==(o=/^(.+?)(ΑΓΑ|ΑΓΕΣ|ΑΓΕ)$/.exec(s))&&(s=o[1],(/^(ΑΒΑΣΤ|ΠΟΛΥΦ|ΑΔΗΦ|ΠΑΜΦ|Ρ|ΑΣΠ|ΑΦ|ΑΜΑΛ|ΑΜΑΛΛΙ|ΑΝΥΣΤ|ΑΠΕΡ|ΑΣΠΑΡ|ΑΧΑΡ|ΔΕΡΒΕΝ|ΔΡΟΣΟΠ|ΞΕΦ|ΝΕΟΠ|ΝΟΜΟΤ|ΟΛΟΠ|ΟΜΟΤ|ΠΡΟΣΤ|ΠΡΟΣΩΠΟΠ|ΣΥΜΠ|ΣΥΝΤ|Τ|ΥΠΟΤ|ΧΑΡ|ΑΕΙΠ|ΑΙΜΟΣΤ|ΑΝΥΠ|ΑΠΟΤ|ΑΡΤΙΠ|ΔΙΑΤ|ΕΝ|ΕΠΙΤ|ΚΡΟΚΑΛΟΠ|ΣΙΔΗΡΟΠ|Λ|ΝΑΥ|ΟΥΛΑΜ|ΟΥΡ|Π|ΤΡ|Μ)$/.test(o[1])||/(ΟΦ|ΠΕΛ|ΧΟΡΤ|ΛΛ|ΣΦ|ΡΠ|ΦΡ|ΠΡ|ΛΟΧ|ΣΜΗΝ)$/.test(o[1])&&!/^(ΨΟΦ|ΝΑΥΛΟΧ)$/.test(o[1])||/(ΚΟΛΛ)$/.test(o[1]))&&(s+="ΑΓ")),null!==(o=/^(.+?)(ΗΣΕ|ΗΣΟΥ|ΗΣΑ)$/.exec(s))&&(s=o[1],/^(Ν|ΧΕΡΣΟΝ|ΔΩΔΕΚΑΝ|ΕΡΗΜΟΝ|ΜΕΓΑΛΟΝ|ΕΠΤΑΝ|Ι)$/.test(o[1])&&(s+="ΗΣ")),null!==(o=/^(.+?)(ΗΣΤΕ)$/.exec(s))&&(s=o[1],/^(ΑΣΒ|ΣΒ|ΑΧΡ|ΧΡ|ΑΠΛ|ΑΕΙΜΝ|ΔΥΣΧΡ|ΕΥΧΡ|ΚΟΙΝΟΧΡ|ΠΑΛΙΜΨ)$/.test(o[1])&&(s+="ΗΣΤ")),null!==(o=/^(.+?)(ΟΥΝΕ|ΗΣΟΥΝΕ|ΗΘΟΥΝΕ)$/.exec(s))&&(s=o[1],/^(Ν|Ρ|ΣΠΙ|ΣΤΡΑΒΟΜΟΥΤΣ|ΚΑΚΟΜΟΥΤΣ|ΕΞΩΝ)$/.test(o[1])&&(s+="ΟΥΝ")),null!==(o=/^(.+?)(ΟΥΜΕ|ΗΣΟΥΜΕ|ΗΘΟΥΜΕ)$/.exec(s))&&(s=o[1],/^(ΠΑΡΑΣΟΥΣ|Φ|Χ|ΩΡΙΟΠΛ|ΑΖ|ΑΛΛΟΣΟΥΣ|ΑΣΟΥΣ)$/.test(o[1])&&(s+="ΟΥΜ")),null!=(o=/^(.+?)(ΜΑΤΟΙ|ΜΑΤΟΥΣ|ΜΑΤΟ|ΜΑΤΑ|ΜΑΤΩΣ|ΜΑΤΩΝ|ΜΑΤΟΣ|ΜΑΤΕΣ|ΜΑΤΗ|ΜΑΤΗΣ|ΜΑΤΟΥ)$/.exec(s))&&(s=o[1]+"Μ",/^(ΓΡΑΜ)$/.test(o[1])?s+="Α":/^(ΓΕ|ΣΤΑ)$/.test(o[1])&&(s+="ΑΤ")),null!==(o=/^(.+?)(ΟΥΑ)$/.exec(s))&&(s=o[1]+"ΟΥ"),n.length===s.length&&null!==(o=/^(.+?)(Α|ΑΓΑΤΕ|ΑΓΑΝ|ΑΕΙ|ΑΜΑΙ|ΑΝ|ΑΣ|ΑΣΑΙ|ΑΤΑΙ|ΑΩ|Ε|ΕΙ|ΕΙΣ|ΕΙΤΕ|ΕΣΑΙ|ΕΣ|ΕΤΑΙ|Ι|ΙΕΜΑΙ|ΙΕΜΑΣΤΕ|ΙΕΤΑΙ|ΙΕΣΑΙ|ΙΕΣΑΣΤΕ|ΙΟΜΑΣΤΑΝ|ΙΟΜΟΥΝ|ΙΟΜΟΥΝΑ|ΙΟΝΤΑΝ|ΙΟΝΤΟΥΣΑΝ|ΙΟΣΑΣΤΑΝ|ΙΟΣΑΣΤΕ|ΙΟΣΟΥΝ|ΙΟΣΟΥΝΑ|ΙΟΤΑΝ|ΙΟΥΜΑ|ΙΟΥΜΑΣΤΕ|ΙΟΥΝΤΑΙ|ΙΟΥΝΤΑΝ|Η|ΗΔΕΣ|ΗΔΩΝ|ΗΘΕΙ|ΗΘΕΙΣ|ΗΘΕΙΤΕ|ΗΘΗΚΑΤΕ|ΗΘΗΚΑΝ|ΗΘΟΥΝ|ΗΘΩ|ΗΚΑΤΕ|ΗΚΑΝ|ΗΣ|ΗΣΑΝ|ΗΣΑΤΕ|ΗΣΕΙ|ΗΣΕΣ|ΗΣΟΥΝ|ΗΣΩ|Ο|ΟΙ|ΟΜΑΙ|ΟΜΑΣΤΑΝ|ΟΜΟΥΝ|ΟΜΟΥΝΑ|ΟΝΤΑΙ|ΟΝΤΑΝ|ΟΝΤΟΥΣΑΝ|ΟΣ|ΟΣΑΣΤΑΝ|ΟΣΑΣΤΕ|ΟΣΟΥΝ|ΟΣΟΥΝΑ|ΟΤΑΝ|ΟΥ|ΟΥΜΑΙ|ΟΥΜΑΣΤΕ|ΟΥΝ|ΟΥΝΤΑΙ|ΟΥΝΤΑΝ|ΟΥΣ|ΟΥΣΑΝ|ΟΥΣΑΤΕ|Υ||ΥΑ|ΥΣ|Ω|ΩΝ|ΟΙΣ)$/.exec(s))&&(s=o[1]),null!=(o=/^(.+?)(ΕΣΤΕΡ|ΕΣΤΑΤ|ΟΤΕΡ|ΟΤΑΤ|ΥΤΕΡ|ΥΤΑΤ|ΩΤΕΡ|ΩΤΑΤ)$/.exec(s))&&(/^(ΕΞ|ΕΣ|ΑΝ|ΚΑΤ|Κ|ΠΡ)$/.test(o[1])||(s=o[1]),/^(ΚΑ|Μ|ΕΛΕ|ΛΕ|ΔΕ)$/.test(o[1])&&(s+="ΥΤ")),s}var l={"ΦΑΓΙΑ":"ΦΑ","ΦΑΓΙΟΥ":"ΦΑ","ΦΑΓΙΩΝ":"ΦΑ","ΣΚΑΓΙΑ":"ΣΚΑ","ΣΚΑΓΙΟΥ":"ΣΚΑ","ΣΚΑΓΙΩΝ":"ΣΚΑ","ΣΟΓΙΟΥ":"ΣΟ","ΣΟΓΙΑ":"ΣΟ","ΣΟΓΙΩΝ":"ΣΟ","ΤΑΤΟΓΙΑ":"ΤΑΤΟ","ΤΑΤΟΓΙΟΥ":"ΤΑΤΟ","ΤΑΤΟΓΙΩΝ":"ΤΑΤΟ","ΚΡΕΑΣ":"ΚΡΕ","ΚΡΕΑΤΟΣ":"ΚΡΕ","ΚΡΕΑΤΑ":"ΚΡΕ","ΚΡΕΑΤΩΝ":"ΚΡΕ","ΠΕΡΑΣ":"ΠΕΡ","ΠΕΡΑΤΟΣ":"ΠΕΡ","ΠΕΡΑΤΑ":"ΠΕΡ","ΠΕΡΑΤΩΝ":"ΠΕΡ","ΤΕΡΑΣ":"ΤΕΡ","ΤΕΡΑΤΟΣ":"ΤΕΡ","ΤΕΡΑΤΑ":"ΤΕΡ","ΤΕΡΑΤΩΝ":"ΤΕΡ","ΦΩΣ":"ΦΩ","ΦΩΤΟΣ":"ΦΩ","ΦΩΤΑ":"ΦΩ","ΦΩΤΩΝ":"ΦΩ","ΚΑΘΕΣΤΩΣ":"ΚΑΘΕΣΤ","ΚΑΘΕΣΤΩΤΟΣ":"ΚΑΘΕΣΤ","ΚΑΘΕΣΤΩΤΑ":"ΚΑΘΕΣΤ","ΚΑΘΕΣΤΩΤΩΝ":"ΚΑΘΕΣΤ","ΓΕΓΟΝΟΣ":"ΓΕΓΟΝ","ΓΕΓΟΝΟΤΟΣ":"ΓΕΓΟΝ","ΓΕΓΟΝΟΤΑ":"ΓΕΓΟΝ","ΓΕΓΟΝΟΤΩΝ":"ΓΕΓΟΝ","ΕΥΑ":"ΕΥ"},i=["ΑΚΡΙΒΩΣ","ΑΛΑ","ΑΛΛΑ","ΑΛΛΙΩΣ","ΑΛΛΟΤΕ","ΑΜΑ","ΑΝΩ","ΑΝΑ","ΑΝΑΜΕΣΑ","ΑΝΑΜΕΤΑΞΥ","ΑΝΕΥ","ΑΝΤΙ","ΑΝΤΙΠΕΡΑ","ΑΝΤΙΟ","ΑΞΑΦΝΑ","ΑΠΟ","ΑΠΟΨΕ","ΑΡΑ","ΑΡΑΓΕ","ΑΥΡΙΟ","ΑΦΟΙ","ΑΦΟΥ","ΑΦΟΤΟΥ","ΒΡΕ","ΓΕΙΑ","ΓΙΑ","ΓΙΑΤΙ","ΓΡΑΜΜΑ","ΔΕΗ","ΔΕΝ","ΔΗΛΑΔΗ","ΔΙΧΩΣ","ΔΥΟ","ΕΑΝ","ΕΓΩ","ΕΔΩ","ΕΔΑ","ΕΙΘΕ","ΕΙΜΑΙ","ΕΙΜΑΣΤΕ","ΕΙΣΑΙ","ΕΙΣΑΣΤΕ","ΕΙΝΑΙ","ΕΙΣΤΕ","ΕΙΤΕ","ΕΚΕΙ","ΕΚΟ","ΕΛΑ","ΕΜΑΣ","ΕΜΕΙΣ","ΕΝΤΕΛΩΣ","ΕΝΤΟΣ","ΕΝΤΩΜΕΤΑΞΥ","ΕΝΩ","ΕΞΙ","ΕΞΙΣΟΥ","ΕΞΗΣ","ΕΞΩ","ΕΟΚ","ΕΠΑΝΩ","ΕΠΕΙΔΗ","ΕΠΕΙΤΑ","ΕΠΙ","ΕΠΙΣΗΣ","ΕΠΟΜΕΝΩΣ","ΕΠΤΑ","ΕΣΑΣ","ΕΣΕΙΣ","ΕΣΤΩ","ΕΣΥ","ΕΣΩ","ΕΤΣΙ","ΕΥΓΕ","ΕΦΕ","ΕΦΕΞΗΣ","ΕΧΤΕΣ","ΕΩΣ","ΗΔΗ","ΗΜΙ","ΗΠΑ","ΗΤΟΙ","ΘΕΣ","ΙΔΙΩΣ","ΙΔΗ","ΙΚΑ","ΙΣΩΣ","ΚΑΘΕ","ΚΑΘΕΤΙ","ΚΑΘΟΛΟΥ","ΚΑΘΩΣ","ΚΑΙ","ΚΑΝ","ΚΑΠΟΤΕ","ΚΑΠΟΥ","ΚΑΤΑ","ΚΑΤΙ","ΚΑΤΟΠΙΝ","ΚΑΤΩ","ΚΕΙ","ΚΙΧ","ΚΚΕ","ΚΟΛΑΝ","ΚΥΡΙΩΣ","ΚΩΣ","ΜΑΚΑΡΙ","ΜΑΛΙΣΤΑ","ΜΑΛΛΟΝ","ΜΑΙ","ΜΑΟ","ΜΑΟΥΣ","ΜΑΣ","ΜΕΘΑΥΡΙΟ","ΜΕΣ","ΜΕΣΑ","ΜΕΤΑ","ΜΕΤΑΞΥ","ΜΕΧΡΙ","ΜΗΔΕ","ΜΗΝ","ΜΗΠΩΣ","ΜΗΤΕ","ΜΙΑ","ΜΙΑΣ","ΜΙΣ","ΜΜΕ","ΜΟΛΟΝΟΤΙ","ΜΟΥ","ΜΠΑ","ΜΠΑΣ","ΜΠΟΥΦΑΝ","ΜΠΡΟΣ","ΝΑΙ","ΝΕΣ","ΝΤΑ","ΝΤΕ","ΞΑΝΑ","ΟΗΕ","ΟΚΤΩ","ΟΜΩΣ","ΟΝΕ","ΟΠΑ","ΟΠΟΥ","ΟΠΩΣ","ΟΣΟ","ΟΤΑΝ","ΟΤΕ","ΟΤΙ","ΟΥΤΕ","ΟΧΙ","ΠΑΛΙ","ΠΑΝ","ΠΑΝΟ","ΠΑΝΤΟΤΕ","ΠΑΝΤΟΥ","ΠΑΝΤΩΣ","ΠΑΝΩ","ΠΑΡΑ","ΠΕΡΑ","ΠΕΡΙ","ΠΕΡΙΠΟΥ","ΠΙΑ","ΠΙΟ","ΠΙΣΩ","ΠΛΑΙ","ΠΛΕΟΝ","ΠΛΗΝ","ΠΟΤΕ","ΠΟΥ","ΠΡΟ","ΠΡΟΣ","ΠΡΟΧΤΕΣ","ΠΡΟΧΘΕΣ","ΡΟΔΙ","ΠΩΣ","ΣΑΙ","ΣΑΣ","ΣΑΝ","ΣΕΙΣ","ΣΙΑ","ΣΚΙ","ΣΟΙ","ΣΟΥ","ΣΡΙ","ΣΥΝ","ΣΥΝΑΜΑ","ΣΧΕΔΟΝ","ΤΑΔΕ","ΤΑΞΙ","ΤΑΧΑ","ΤΕΙ","ΤΗΝ","ΤΗΣ","ΤΙΠΟΤΑ","ΤΙΠΟΤΕ","ΤΙΣ","ΤΟΝ","ΤΟΤΕ","ΤΟΥ","ΤΟΥΣ","ΤΣΑ","ΤΣΕ","ΤΣΙ","ΤΣΟΥ","ΤΩΝ","ΥΠΟ","ΥΠΟΨΗ","ΥΠΟΨΙΝ","ΥΣΤΕΡΑ","ΦΕΤΟΣ","ΦΙΣ","ΦΠΑ","ΧΑΦ","ΧΘΕΣ","ΧΤΕΣ","ΧΩΡΙΣ","ΩΣ","ΩΣΑΝ","ΩΣΟΤΟΥ","ΩΣΠΟΥ","ΩΣΤΕ","ΩΣΤΟΣΟ"],s=new RegExp("^[ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩ]+$");return function(e){return"function"==typeof e.update?e.update(function(e){return n(e.toUpperCase()).toLowerCase()}):n(e.toUpperCase()).toLowerCase()}}(),e.Pipeline.registerFunction(e.el.stemmer,"stemmer-el"),e.el.stopWordFilter=e.generateStopWordFilter("αλλα αν αντι απο αυτα αυτεσ αυτη αυτο αυτοι αυτοσ αυτουσ αυτων για δε δεν εαν ειμαι ειμαστε ειναι εισαι ειστε εκεινα εκεινεσ εκεινη εκεινο εκεινοι εκεινοσ εκεινουσ εκεινων ενω επι η θα ισωσ κ και κατα κι μα με μετα μη μην να ο οι ομωσ οπωσ οσο οτι παρα ποια ποιεσ ποιο ποιοι ποιοσ ποιουσ ποιων που προσ πωσ σε στη στην στο στον τα την τησ το τον τοτε του των ωσ".split(" ")),e.Pipeline.registerFunction(e.el.stopWordFilter,"stopWordFilter-el"),e.el.normilizer=function(){var e={"Ά":"Α","ά":"α","Έ":"Ε","έ":"ε","Ή":"Η","ή":"η","Ί":"Ι","ί":"ι","Ό":"Ο","ο":"ο","Ύ":"Υ","ύ":"υ","Ώ":"Ω","ώ":"ω","Ϊ":"Ι","ϊ":"ι","Ϋ":"Υ","ϋ":"υ","ΐ":"ι","ΰ":"υ"};return function(t){if("function"==typeof t.update)return t.update(function(t){for(var r="",n=0;n=A.limit)return!0;A.cursor++}for(A.cursor=i;!A.out_grouping(W,97,246);){if(A.cursor>=A.limit)return!0;A.cursor++}return!1}function t(){return d<=A.cursor}function s(){var i,e;if(A.cursor>=f)if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,i=A.find_among_b(h,10)){switch(A.bra=A.cursor,A.limit_backward=e,i){case 1:if(!A.in_grouping_b(x,97,246))return;break;case 2:if(!t())return}A.slice_del()}else A.limit_backward=e}function o(){var i,e,r;if(A.cursor>=f)if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,i=A.find_among_b(v,9))switch(A.bra=A.cursor,A.limit_backward=e,i){case 1:r=A.limit-A.cursor,A.eq_s_b(1,"k")||(A.cursor=A.limit-r,A.slice_del());break;case 2:A.slice_del(),A.ket=A.cursor,A.eq_s_b(3,"kse")&&(A.bra=A.cursor,A.slice_from("ksi"));break;case 3:A.slice_del();break;case 4:A.find_among_b(p,6)&&A.slice_del();break;case 5:A.find_among_b(g,6)&&A.slice_del();break;case 6:A.find_among_b(j,2)&&A.slice_del()}else A.limit_backward=e}function l(){return A.find_among_b(q,7)}function a(){return A.eq_s_b(1,"i")&&A.in_grouping_b(L,97,246)}function u(){var i,e,r;if(A.cursor>=f)if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,i=A.find_among_b(C,30)){switch(A.bra=A.cursor,A.limit_backward=e,i){case 1:if(!A.eq_s_b(1,"a"))return;break;case 2:case 9:if(!A.eq_s_b(1,"e"))return;break;case 3:if(!A.eq_s_b(1,"i"))return;break;case 4:if(!A.eq_s_b(1,"o"))return;break;case 5:if(!A.eq_s_b(1,"ä"))return;break;case 6:if(!A.eq_s_b(1,"ö"))return;break;case 7:if(r=A.limit-A.cursor,!l()&&(A.cursor=A.limit-r,!A.eq_s_b(2,"ie"))){A.cursor=A.limit-r;break}if(A.cursor=A.limit-r,A.cursor<=A.limit_backward){A.cursor=A.limit-r;break}A.cursor--,A.bra=A.cursor;break;case 8:if(!A.in_grouping_b(W,97,246)||!A.out_grouping_b(W,97,246))return}A.slice_del(),k=!0}else A.limit_backward=e}function c(){var i,e,r;if(A.cursor>=d)if(e=A.limit_backward,A.limit_backward=d,A.ket=A.cursor,i=A.find_among_b(P,14)){if(A.bra=A.cursor,A.limit_backward=e,1==i){if(r=A.limit-A.cursor,A.eq_s_b(2,"po"))return;A.cursor=A.limit-r}A.slice_del()}else A.limit_backward=e}function m(){var i;A.cursor>=f&&(i=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,A.find_among_b(F,2)?(A.bra=A.cursor,A.limit_backward=i,A.slice_del()):A.limit_backward=i)}function w(){var i,e,r,n,t,s;if(A.cursor>=f){if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,A.eq_s_b(1,"t")&&(A.bra=A.cursor,r=A.limit-A.cursor,A.in_grouping_b(W,97,246)&&(A.cursor=A.limit-r,A.slice_del(),A.limit_backward=e,n=A.limit-A.cursor,A.cursor>=d&&(A.cursor=d,t=A.limit_backward,A.limit_backward=A.cursor,A.cursor=A.limit-n,A.ket=A.cursor,i=A.find_among_b(S,2))))){if(A.bra=A.cursor,A.limit_backward=t,1==i){if(s=A.limit-A.cursor,A.eq_s_b(2,"po"))return;A.cursor=A.limit-s}return void A.slice_del()}A.limit_backward=e}}function _(){var i,e,r,n;if(A.cursor>=f){for(i=A.limit_backward,A.limit_backward=f,e=A.limit-A.cursor,l()&&(A.cursor=A.limit-e,A.ket=A.cursor,A.cursor>A.limit_backward&&(A.cursor--,A.bra=A.cursor,A.slice_del())),A.cursor=A.limit-e,A.ket=A.cursor,A.in_grouping_b(y,97,228)&&(A.bra=A.cursor,A.out_grouping_b(W,97,246)&&A.slice_del()),A.cursor=A.limit-e,A.ket=A.cursor,A.eq_s_b(1,"j")&&(A.bra=A.cursor,r=A.limit-A.cursor,A.eq_s_b(1,"o")?A.slice_del():(A.cursor=A.limit-r,A.eq_s_b(1,"u")&&A.slice_del())),A.cursor=A.limit-e,A.ket=A.cursor,A.eq_s_b(1,"o")&&(A.bra=A.cursor,A.eq_s_b(1,"j")&&A.slice_del()),A.cursor=A.limit-e,A.limit_backward=i;;){if(n=A.limit-A.cursor,A.out_grouping_b(W,97,246)){A.cursor=A.limit-n;break}if(A.cursor=A.limit-n,A.cursor<=A.limit_backward)return;A.cursor--}A.ket=A.cursor,A.cursor>A.limit_backward&&(A.cursor--,A.bra=A.cursor,b=A.slice_to(),A.eq_v_b(b)&&A.slice_del())}}var k,b,d,f,h=[new e("pa",-1,1),new e("sti",-1,2),new e("kaan",-1,1),new e("han",-1,1),new e("kin",-1,1),new e("hän",-1,1),new e("kään",-1,1),new e("ko",-1,1),new e("pä",-1,1),new e("kö",-1,1)],p=[new e("lla",-1,-1),new e("na",-1,-1),new e("ssa",-1,-1),new e("ta",-1,-1),new e("lta",3,-1),new e("sta",3,-1)],g=[new e("llä",-1,-1),new e("nä",-1,-1),new e("ssä",-1,-1),new e("tä",-1,-1),new e("ltä",3,-1),new e("stä",3,-1)],j=[new e("lle",-1,-1),new e("ine",-1,-1)],v=[new e("nsa",-1,3),new e("mme",-1,3),new e("nne",-1,3),new e("ni",-1,2),new e("si",-1,1),new e("an",-1,4),new e("en",-1,6),new e("än",-1,5),new e("nsä",-1,3)],q=[new e("aa",-1,-1),new e("ee",-1,-1),new e("ii",-1,-1),new e("oo",-1,-1),new e("uu",-1,-1),new e("ää",-1,-1),new e("öö",-1,-1)],C=[new e("a",-1,8),new e("lla",0,-1),new e("na",0,-1),new e("ssa",0,-1),new e("ta",0,-1),new e("lta",4,-1),new e("sta",4,-1),new e("tta",4,9),new e("lle",-1,-1),new e("ine",-1,-1),new e("ksi",-1,-1),new e("n",-1,7),new e("han",11,1),new e("den",11,-1,a),new e("seen",11,-1,l),new e("hen",11,2),new e("tten",11,-1,a),new e("hin",11,3),new e("siin",11,-1,a),new e("hon",11,4),new e("hän",11,5),new e("hön",11,6),new e("ä",-1,8),new e("llä",22,-1),new e("nä",22,-1),new e("ssä",22,-1),new e("tä",22,-1),new e("ltä",26,-1),new e("stä",26,-1),new e("ttä",26,9)],P=[new e("eja",-1,-1),new e("mma",-1,1),new e("imma",1,-1),new e("mpa",-1,1),new e("impa",3,-1),new e("mmi",-1,1),new e("immi",5,-1),new e("mpi",-1,1),new e("impi",7,-1),new e("ejä",-1,-1),new e("mmä",-1,1),new e("immä",10,-1),new e("mpä",-1,1),new e("impä",12,-1)],F=[new e("i",-1,-1),new e("j",-1,-1)],S=[new e("mma",-1,1),new e("imma",0,-1)],y=[17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8],W=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],L=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],x=[17,97,24,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],A=new r;this.setCurrent=function(i){A.setCurrent(i)},this.getCurrent=function(){return A.getCurrent()},this.stem=function(){var e=A.cursor;return i(),k=!1,A.limit_backward=e,A.cursor=A.limit,s(),A.cursor=A.limit,o(),A.cursor=A.limit,u(),A.cursor=A.limit,c(),A.cursor=A.limit,k?(m(),A.cursor=A.limit):(A.cursor=A.limit,w(),A.cursor=A.limit),_(),!0}};return function(i){return"function"==typeof i.update?i.update(function(i){return n.setCurrent(i),n.stem(),n.getCurrent()}):(n.setCurrent(i),n.stem(),n.getCurrent())}}(),i.Pipeline.registerFunction(i.fi.stemmer,"stemmer-fi"),i.fi.stopWordFilter=i.generateStopWordFilter("ei eivät emme en et ette että he heidän heidät heihin heille heillä heiltä heissä heistä heitä hän häneen hänelle hänellä häneltä hänen hänessä hänestä hänet häntä itse ja johon joiden joihin joiksi joilla joille joilta joina joissa joista joita joka joksi jolla jolle jolta jona jonka jos jossa josta jota jotka kanssa keiden keihin keiksi keille keillä keiltä keinä keissä keistä keitä keneen keneksi kenelle kenellä keneltä kenen kenenä kenessä kenestä kenet ketkä ketkä ketä koska kuin kuka kun me meidän meidät meihin meille meillä meiltä meissä meistä meitä mihin miksi mikä mille millä miltä minkä minkä minua minulla minulle minulta minun minussa minusta minut minuun minä minä missä mistä mitkä mitä mukaan mutta ne niiden niihin niiksi niille niillä niiltä niin niin niinä niissä niistä niitä noiden noihin noiksi noilla noille noilta noin noina noissa noista noita nuo nyt näiden näihin näiksi näille näillä näiltä näinä näissä näistä näitä nämä ole olemme olen olet olette oli olimme olin olisi olisimme olisin olisit olisitte olisivat olit olitte olivat olla olleet ollut on ovat poikki se sekä sen siihen siinä siitä siksi sille sillä sillä siltä sinua sinulla sinulle sinulta sinun sinussa sinusta sinut sinuun sinä sinä sitä tai te teidän teidät teihin teille teillä teiltä teissä teistä teitä tuo tuohon tuoksi tuolla tuolle tuolta tuon tuona tuossa tuosta tuota tähän täksi tälle tällä tältä tämä tämän tänä tässä tästä tätä vaan vai vaikka yli".split(" ")),i.Pipeline.registerFunction(i.fi.stopWordFilter,"stopWordFilter-fi")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.fr.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Lunr languages, `French` language 3 | * https://github.com/MihaiValentin/lunr-languages 4 | * 5 | * Copyright 2014, Mihai Valentin 6 | * http://www.mozilla.org/MPL/ 7 | */ 8 | /*! 9 | * based on 10 | * Snowball JavaScript Library v0.3 11 | * http://code.google.com/p/urim/ 12 | * http://snowball.tartarus.org/ 13 | * 14 | * Copyright 2010, Oleg Mazko 15 | * http://www.mozilla.org/MPL/ 16 | */ 17 | 18 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.fr=function(){this.pipeline.reset(),this.pipeline.add(e.fr.trimmer,e.fr.stopWordFilter,e.fr.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.fr.stemmer))},e.fr.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.fr.trimmer=e.trimmerSupport.generateTrimmer(e.fr.wordCharacters),e.Pipeline.registerFunction(e.fr.trimmer,"trimmer-fr"),e.fr.stemmer=function(){var r=e.stemmerSupport.Among,s=e.stemmerSupport.SnowballProgram,i=new function(){function e(e,r,s){return!(!W.eq_s(1,e)||(W.ket=W.cursor,!W.in_grouping(F,97,251)))&&(W.slice_from(r),W.cursor=s,!0)}function i(e,r,s){return!!W.eq_s(1,e)&&(W.ket=W.cursor,W.slice_from(r),W.cursor=s,!0)}function n(){for(var r,s;;){if(r=W.cursor,W.in_grouping(F,97,251)){if(W.bra=W.cursor,s=W.cursor,e("u","U",r))continue;if(W.cursor=s,e("i","I",r))continue;if(W.cursor=s,i("y","Y",r))continue}if(W.cursor=r,W.bra=r,!e("y","Y",r)){if(W.cursor=r,W.eq_s(1,"q")&&(W.bra=W.cursor,i("u","U",r)))continue;if(W.cursor=r,r>=W.limit)return;W.cursor++}}}function t(){for(;!W.in_grouping(F,97,251);){if(W.cursor>=W.limit)return!0;W.cursor++}for(;!W.out_grouping(F,97,251);){if(W.cursor>=W.limit)return!0;W.cursor++}return!1}function u(){var e=W.cursor;if(q=W.limit,g=q,p=q,W.in_grouping(F,97,251)&&W.in_grouping(F,97,251)&&W.cursor=W.limit){W.cursor=q;break}W.cursor++}while(!W.in_grouping(F,97,251))}q=W.cursor,W.cursor=e,t()||(g=W.cursor,t()||(p=W.cursor))}function o(){for(var e,r;;){if(r=W.cursor,W.bra=r,!(e=W.find_among(h,4)))break;switch(W.ket=W.cursor,e){case 1:W.slice_from("i");break;case 2:W.slice_from("u");break;case 3:W.slice_from("y");break;case 4:if(W.cursor>=W.limit)return;W.cursor++}}}function c(){return q<=W.cursor}function a(){return g<=W.cursor}function l(){return p<=W.cursor}function w(){var e,r;if(W.ket=W.cursor,e=W.find_among_b(C,43)){switch(W.bra=W.cursor,e){case 1:if(!l())return!1;W.slice_del();break;case 2:if(!l())return!1;W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"ic")&&(W.bra=W.cursor,l()?W.slice_del():W.slice_from("iqU"));break;case 3:if(!l())return!1;W.slice_from("log");break;case 4:if(!l())return!1;W.slice_from("u");break;case 5:if(!l())return!1;W.slice_from("ent");break;case 6:if(!c())return!1;if(W.slice_del(),W.ket=W.cursor,e=W.find_among_b(z,6))switch(W.bra=W.cursor,e){case 1:l()&&(W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"at")&&(W.bra=W.cursor,l()&&W.slice_del()));break;case 2:l()?W.slice_del():a()&&W.slice_from("eux");break;case 3:l()&&W.slice_del();break;case 4:c()&&W.slice_from("i")}break;case 7:if(!l())return!1;if(W.slice_del(),W.ket=W.cursor,e=W.find_among_b(y,3))switch(W.bra=W.cursor,e){case 1:l()?W.slice_del():W.slice_from("abl");break;case 2:l()?W.slice_del():W.slice_from("iqU");break;case 3:l()&&W.slice_del()}break;case 8:if(!l())return!1;if(W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"at")&&(W.bra=W.cursor,l()&&(W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"ic")))){W.bra=W.cursor,l()?W.slice_del():W.slice_from("iqU");break}break;case 9:W.slice_from("eau");break;case 10:if(!a())return!1;W.slice_from("al");break;case 11:if(l())W.slice_del();else{if(!a())return!1;W.slice_from("eux")}break;case 12:if(!a()||!W.out_grouping_b(F,97,251))return!1;W.slice_del();break;case 13:return c()&&W.slice_from("ant"),!1;case 14:return c()&&W.slice_from("ent"),!1;case 15:return r=W.limit-W.cursor,W.in_grouping_b(F,97,251)&&c()&&(W.cursor=W.limit-r,W.slice_del()),!1}return!0}return!1}function f(){var e,r;if(W.cursor=q){if(s=W.limit_backward,W.limit_backward=q,W.ket=W.cursor,e=W.find_among_b(P,7))switch(W.bra=W.cursor,e){case 1:if(l()){if(i=W.limit-W.cursor,!W.eq_s_b(1,"s")&&(W.cursor=W.limit-i,!W.eq_s_b(1,"t")))break;W.slice_del()}break;case 2:W.slice_from("i");break;case 3:W.slice_del();break;case 4:W.eq_s_b(2,"gu")&&W.slice_del()}W.limit_backward=s}}function b(){var e=W.limit-W.cursor;W.find_among_b(U,5)&&(W.cursor=W.limit-e,W.ket=W.cursor,W.cursor>W.limit_backward&&(W.cursor--,W.bra=W.cursor,W.slice_del()))}function d(){for(var e,r=1;W.out_grouping_b(F,97,251);)r--;if(r<=0){if(W.ket=W.cursor,e=W.limit-W.cursor,!W.eq_s_b(1,"é")&&(W.cursor=W.limit-e,!W.eq_s_b(1,"è")))return;W.bra=W.cursor,W.slice_from("e")}}function k(){if(!w()&&(W.cursor=W.limit,!f()&&(W.cursor=W.limit,!m())))return W.cursor=W.limit,void _();W.cursor=W.limit,W.ket=W.cursor,W.eq_s_b(1,"Y")?(W.bra=W.cursor,W.slice_from("i")):(W.cursor=W.limit,W.eq_s_b(1,"ç")&&(W.bra=W.cursor,W.slice_from("c")))}var p,g,q,v=[new r("col",-1,-1),new r("par",-1,-1),new r("tap",-1,-1)],h=[new r("",-1,4),new r("I",0,1),new r("U",0,2),new r("Y",0,3)],z=[new r("iqU",-1,3),new r("abl",-1,3),new r("Ièr",-1,4),new r("ièr",-1,4),new r("eus",-1,2),new r("iv",-1,1)],y=[new r("ic",-1,2),new r("abil",-1,1),new r("iv",-1,3)],C=[new r("iqUe",-1,1),new r("atrice",-1,2),new r("ance",-1,1),new r("ence",-1,5),new r("logie",-1,3),new r("able",-1,1),new r("isme",-1,1),new r("euse",-1,11),new r("iste",-1,1),new r("ive",-1,8),new r("if",-1,8),new r("usion",-1,4),new r("ation",-1,2),new r("ution",-1,4),new r("ateur",-1,2),new r("iqUes",-1,1),new r("atrices",-1,2),new r("ances",-1,1),new r("ences",-1,5),new r("logies",-1,3),new r("ables",-1,1),new r("ismes",-1,1),new r("euses",-1,11),new r("istes",-1,1),new r("ives",-1,8),new r("ifs",-1,8),new r("usions",-1,4),new r("ations",-1,2),new r("utions",-1,4),new r("ateurs",-1,2),new r("ments",-1,15),new r("ements",30,6),new r("issements",31,12),new r("ités",-1,7),new r("ment",-1,15),new r("ement",34,6),new r("issement",35,12),new r("amment",34,13),new r("emment",34,14),new r("aux",-1,10),new r("eaux",39,9),new r("eux",-1,1),new r("ité",-1,7)],x=[new r("ira",-1,1),new r("ie",-1,1),new r("isse",-1,1),new r("issante",-1,1),new r("i",-1,1),new r("irai",4,1),new r("ir",-1,1),new r("iras",-1,1),new r("ies",-1,1),new r("îmes",-1,1),new r("isses",-1,1),new r("issantes",-1,1),new r("îtes",-1,1),new r("is",-1,1),new r("irais",13,1),new r("issais",13,1),new r("irions",-1,1),new r("issions",-1,1),new r("irons",-1,1),new r("issons",-1,1),new r("issants",-1,1),new r("it",-1,1),new r("irait",21,1),new r("issait",21,1),new r("issant",-1,1),new r("iraIent",-1,1),new r("issaIent",-1,1),new r("irent",-1,1),new r("issent",-1,1),new r("iront",-1,1),new r("ît",-1,1),new r("iriez",-1,1),new r("issiez",-1,1),new r("irez",-1,1),new r("issez",-1,1)],I=[new r("a",-1,3),new r("era",0,2),new r("asse",-1,3),new r("ante",-1,3),new r("ée",-1,2),new r("ai",-1,3),new r("erai",5,2),new r("er",-1,2),new r("as",-1,3),new r("eras",8,2),new r("âmes",-1,3),new r("asses",-1,3),new r("antes",-1,3),new r("âtes",-1,3),new r("ées",-1,2),new r("ais",-1,3),new r("erais",15,2),new r("ions",-1,1),new r("erions",17,2),new r("assions",17,3),new r("erons",-1,2),new r("ants",-1,3),new r("és",-1,2),new r("ait",-1,3),new r("erait",23,2),new r("ant",-1,3),new r("aIent",-1,3),new r("eraIent",26,2),new r("èrent",-1,2),new r("assent",-1,3),new r("eront",-1,2),new r("ât",-1,3),new r("ez",-1,2),new r("iez",32,2),new r("eriez",33,2),new r("assiez",33,3),new r("erez",32,2),new r("é",-1,2)],P=[new r("e",-1,3),new r("Ière",0,2),new r("ière",0,2),new r("ion",-1,1),new r("Ier",-1,2),new r("ier",-1,2),new r("ë",-1,4)],U=[new r("ell",-1,-1),new r("eill",-1,-1),new r("enn",-1,-1),new r("onn",-1,-1),new r("ett",-1,-1)],F=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,128,130,103,8,5],S=[1,65,20,0,0,0,0,0,0,0,0,0,0,0,0,0,128],W=new s;this.setCurrent=function(e){W.setCurrent(e)},this.getCurrent=function(){return W.getCurrent()},this.stem=function(){var e=W.cursor;return n(),W.cursor=e,u(),W.limit_backward=e,W.cursor=W.limit,k(),W.cursor=W.limit,b(),W.cursor=W.limit,d(),W.cursor=W.limit_backward,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.fr.stemmer,"stemmer-fr"),e.fr.stopWordFilter=e.generateStopWordFilter("ai aie aient aies ait as au aura aurai auraient aurais aurait auras aurez auriez aurions aurons auront aux avaient avais avait avec avez aviez avions avons ayant ayez ayons c ce ceci celà ces cet cette d dans de des du elle en es est et eu eue eues eurent eus eusse eussent eusses eussiez eussions eut eux eûmes eût eûtes furent fus fusse fussent fusses fussiez fussions fut fûmes fût fûtes ici il ils j je l la le les leur leurs lui m ma mais me mes moi mon même n ne nos notre nous on ont ou par pas pour qu que quel quelle quelles quels qui s sa sans se sera serai seraient serais serait seras serez seriez serions serons seront ses soi soient sois soit sommes son sont soyez soyons suis sur t ta te tes toi ton tu un une vos votre vous y à étaient étais était étant étiez étions été étée étées étés êtes".split(" ")),e.Pipeline.registerFunction(e.fr.stopWordFilter,"stopWordFilter-fr")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.he.min.js: -------------------------------------------------------------------------------- 1 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.he=function(){this.pipeline.reset(),this.pipeline.add(e.he.trimmer,e.he.stopWordFilter,e.he.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.he.stemmer))},e.he.wordCharacters="֑-״א-תa-zA-Za-zA-Z0-90-9",e.he.trimmer=e.trimmerSupport.generateTrimmer(e.he.wordCharacters),e.Pipeline.registerFunction(e.he.trimmer,"trimmer-he"),e.he.stemmer=function(){var e=this;return e.result=!1,e.preRemoved=!1,e.sufRemoved=!1,e.pre={pre1:"ה ו י ת",pre2:"ב כ ל מ ש כש",pre3:"הב הכ הל המ הש בש לכ",pre4:"וב וכ ול ומ וש",pre5:"מה שה כל",pre6:"מב מכ מל ממ מש",pre7:"בה בו בי בת כה כו כי כת לה לו לי לת",pre8:"ובה ובו ובי ובת וכה וכו וכי וכת ולה ולו ולי ולת"},e.suf={suf1:"ך כ ם ן נ",suf2:"ים ות וך וכ ום ון ונ הם הן יכ יך ינ ים",suf3:"תי תך תכ תם תן תנ",suf4:"ותי ותך ותכ ותם ותן ותנ",suf5:"נו כם כן הם הן",suf6:"ונו וכם וכן והם והן",suf7:"תכם תכן תנו תהם תהן",suf8:"הוא היא הם הן אני אתה את אנו אתם אתן",suf9:"ני נו כי כו כם כן תי תך תכ תם תן",suf10:"י ך כ ם ן נ ת"},e.patterns=JSON.parse('{"hebrewPatterns": [{"pt1": [{"c": "ה", "l": 0}]}, {"pt2": [{"c": "ו", "l": 0}]}, {"pt3": [{"c": "י", "l": 0}]}, {"pt4": [{"c": "ת", "l": 0}]}, {"pt5": [{"c": "מ", "l": 0}]}, {"pt6": [{"c": "ל", "l": 0}]}, {"pt7": [{"c": "ב", "l": 0}]}, {"pt8": [{"c": "כ", "l": 0}]}, {"pt9": [{"c": "ש", "l": 0}]}, {"pt10": [{"c": "כש", "l": 0}]}, {"pt11": [{"c": "בה", "l": 0}]}, {"pt12": [{"c": "וב", "l": 0}]}, {"pt13": [{"c": "וכ", "l": 0}]}, {"pt14": [{"c": "ול", "l": 0}]}, {"pt15": [{"c": "ומ", "l": 0}]}, {"pt16": [{"c": "וש", "l": 0}]}, {"pt17": [{"c": "הב", "l": 0}]}, {"pt18": [{"c": "הכ", "l": 0}]}, {"pt19": [{"c": "הל", "l": 0}]}, {"pt20": [{"c": "המ", "l": 0}]}, {"pt21": [{"c": "הש", "l": 0}]}, {"pt22": [{"c": "מה", "l": 0}]}, {"pt23": [{"c": "שה", "l": 0}]}, {"pt24": [{"c": "כל", "l": 0}]}]}'),e.execArray=["cleanWord","removeDiacritics","removeStopWords","normalizeHebrewCharacters"],e.stem=function(){var r=0;for(e.result=!1,e.preRemoved=!1,e.sufRemoved=!1;r=0)return!0},e.normalizeHebrewCharacters=function(){return e.word=e.word.replace("ך","כ"),e.word=e.word.replace("ם","מ"),e.word=e.word.replace("ן","נ"),e.word=e.word.replace("ף","פ"),e.word=e.word.replace("ץ","צ"),!1},function(r){return"function"==typeof r.update?r.update(function(r){return e.setCurrent(r),e.stem(),e.getCurrent()}):(e.setCurrent(r),e.stem(),e.getCurrent())}}(),e.Pipeline.registerFunction(e.he.stemmer,"stemmer-he"),e.he.stopWordFilter=e.generateStopWordFilter("אבל או אולי אותו אותי אותך אותם אותן אותנו אז אחר אחרות אחרי אחריכן אחרים אחרת אי איזה איך אין איפה אל אלה אלו אם אנחנו אני אף אפשר את אתה אתכם אתכן אתם אתן באיזה באיזו בגלל בין בלבד בעבור בעזרת בכל בכן בלי במידה במקום שבו ברוב בשביל בשעה ש בתוך גם דרך הוא היא היה היי היכן היתה היתי הם הן הנה הסיבה שבגללה הרי ואילו ואת זאת זה זות יהיה יוכל יוכלו יותר מדי יכול יכולה יכולות יכולים יכל יכלה יכלו יש כאן כאשר כולם כולן כזה כי כיצד כך כל כלל כמו כן כפי כש לא לאו לאיזותך לאן לבין לה להיות להם להן לו לזה לזות לי לך לכם לכן למה למעלה למעלה מ למטה למטה מ למעט למקום שבו למרות לנו לעבר לעיכן לפיכך לפני מאד מאחורי מאיזו סיבה מאין מאיפה מבלי מבעד מדוע מה מהיכן מול מחוץ מי מידע מכאן מכל מכן מלבד מן מנין מסוגל מעט מעטים מעל מצד מקום בו מתחת מתי נגד נגר נו עד עז על עלי עליו עליה עליהם עליך עלינו עם עצמה עצמהם עצמהן עצמו עצמי עצמם עצמן עצמנו פה רק שוב של שלה שלהם שלהן שלו שלי שלך שלכה שלכם שלכן שלנו שם תהיה תחת".split(" ")),e.Pipeline.registerFunction(e.he.stopWordFilter,"stopWordFilter-he")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.hi.min.js: -------------------------------------------------------------------------------- 1 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hi=function(){this.pipeline.reset(),this.pipeline.add(e.hi.trimmer,e.hi.stopWordFilter,e.hi.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.hi.stemmer))},e.hi.wordCharacters="ऀ-ःऄ-एऐ-टठ-यर-िी-ॏॐ-य़ॠ-९॰-ॿa-zA-Za-zA-Z0-90-9",e.hi.trimmer=e.trimmerSupport.generateTrimmer(e.hi.wordCharacters),e.Pipeline.registerFunction(e.hi.trimmer,"trimmer-hi"),e.hi.stopWordFilter=e.generateStopWordFilter("अत अपना अपनी अपने अभी अंदर आदि आप इत्यादि इन इनका इन्हीं इन्हें इन्हों इस इसका इसकी इसके इसमें इसी इसे उन उनका उनकी उनके उनको उन्हीं उन्हें उन्हों उस उसके उसी उसे एक एवं एस ऐसे और कई कर करता करते करना करने करें कहते कहा का काफ़ी कि कितना किन्हें किन्हों किया किर किस किसी किसे की कुछ कुल के को कोई कौन कौनसा गया घर जब जहाँ जा जितना जिन जिन्हें जिन्हों जिस जिसे जीधर जैसा जैसे जो तक तब तरह तिन तिन्हें तिन्हों तिस तिसे तो था थी थे दबारा दिया दुसरा दूसरे दो द्वारा न नके नहीं ना निहायत नीचे ने पर पहले पूरा पे फिर बनी बही बहुत बाद बाला बिलकुल भी भीतर मगर मानो मे में यदि यह यहाँ यही या यिह ये रखें रहा रहे ऱ्वासा लिए लिये लेकिन व वग़ैरह वर्ग वह वहाँ वहीं वाले वुह वे वो सकता सकते सबसे सभी साथ साबुत साभ सारा से सो संग ही हुआ हुई हुए है हैं हो होता होती होते होना होने".split(" ")),e.hi.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var r=e.wordcut;r.init(),e.hi.tokenizer=function(i){if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(r){return isLunr2?new e.Token(r.toLowerCase()):r.toLowerCase()});var t=i.toString().toLowerCase().replace(/^\s+/,"");return r.cut(t).split("|")},e.Pipeline.registerFunction(e.hi.stemmer,"stemmer-hi"),e.Pipeline.registerFunction(e.hi.stopWordFilter,"stopWordFilter-hi")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.hu.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Lunr languages, `Hungarian` language 3 | * https://github.com/MihaiValentin/lunr-languages 4 | * 5 | * Copyright 2014, Mihai Valentin 6 | * http://www.mozilla.org/MPL/ 7 | */ 8 | /*! 9 | * based on 10 | * Snowball JavaScript Library v0.3 11 | * http://code.google.com/p/urim/ 12 | * http://snowball.tartarus.org/ 13 | * 14 | * Copyright 2010, Oleg Mazko 15 | * http://www.mozilla.org/MPL/ 16 | */ 17 | 18 | !function(e,n){"function"==typeof define&&define.amd?define(n):"object"==typeof exports?module.exports=n():n()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hu=function(){this.pipeline.reset(),this.pipeline.add(e.hu.trimmer,e.hu.stopWordFilter,e.hu.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.hu.stemmer))},e.hu.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.hu.trimmer=e.trimmerSupport.generateTrimmer(e.hu.wordCharacters),e.Pipeline.registerFunction(e.hu.trimmer,"trimmer-hu"),e.hu.stemmer=function(){var n=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,i=new function(){function e(){var e,n=L.cursor;if(d=L.limit,L.in_grouping(W,97,252))for(;;){if(e=L.cursor,L.out_grouping(W,97,252))return L.cursor=e,L.find_among(g,8)||(L.cursor=e,e=L.limit)return void(d=e);L.cursor++}if(L.cursor=n,L.out_grouping(W,97,252)){for(;!L.in_grouping(W,97,252);){if(L.cursor>=L.limit)return;L.cursor++}d=L.cursor}}function i(){return d<=L.cursor}function a(){var e;if(L.ket=L.cursor,(e=L.find_among_b(h,2))&&(L.bra=L.cursor,i()))switch(e){case 1:L.slice_from("a");break;case 2:L.slice_from("e")}}function t(){var e=L.limit-L.cursor;return!!L.find_among_b(p,23)&&(L.cursor=L.limit-e,!0)}function s(){if(L.cursor>L.limit_backward){L.cursor--,L.ket=L.cursor;var e=L.cursor-1;L.limit_backward<=e&&e<=L.limit&&(L.cursor=e,L.bra=e,L.slice_del())}}function c(){var e;if(L.ket=L.cursor,(e=L.find_among_b(_,2))&&(L.bra=L.cursor,i())){if((1==e||2==e)&&!t())return;L.slice_del(),s()}}function o(){L.ket=L.cursor,L.find_among_b(v,44)&&(L.bra=L.cursor,i()&&(L.slice_del(),a()))}function w(){var e;if(L.ket=L.cursor,(e=L.find_among_b(z,3))&&(L.bra=L.cursor,i()))switch(e){case 1:L.slice_from("e");break;case 2:case 3:L.slice_from("a")}}function l(){var e;if(L.ket=L.cursor,(e=L.find_among_b(y,6))&&(L.bra=L.cursor,i()))switch(e){case 1:case 2:L.slice_del();break;case 3:L.slice_from("a");break;case 4:L.slice_from("e")}}function u(){var e;if(L.ket=L.cursor,(e=L.find_among_b(j,2))&&(L.bra=L.cursor,i())){if((1==e||2==e)&&!t())return;L.slice_del(),s()}}function m(){var e;if(L.ket=L.cursor,(e=L.find_among_b(C,7))&&(L.bra=L.cursor,i()))switch(e){case 1:L.slice_from("a");break;case 2:L.slice_from("e");break;case 3:case 4:case 5:case 6:case 7:L.slice_del()}}function k(){var e;if(L.ket=L.cursor,(e=L.find_among_b(P,12))&&(L.bra=L.cursor,i()))switch(e){case 1:case 4:case 7:case 9:L.slice_del();break;case 2:case 5:case 8:L.slice_from("e");break;case 3:case 6:L.slice_from("a")}}function f(){var e;if(L.ket=L.cursor,(e=L.find_among_b(F,31))&&(L.bra=L.cursor,i()))switch(e){case 1:case 4:case 7:case 8:case 9:case 12:case 13:case 16:case 17:case 18:L.slice_del();break;case 2:case 5:case 10:case 14:case 19:L.slice_from("a");break;case 3:case 6:case 11:case 15:case 20:L.slice_from("e")}}function b(){var e;if(L.ket=L.cursor,(e=L.find_among_b(S,42))&&(L.bra=L.cursor,i()))switch(e){case 1:case 4:case 5:case 6:case 9:case 10:case 11:case 14:case 15:case 16:case 17:case 20:case 21:case 24:case 25:case 26:case 29:L.slice_del();break;case 2:case 7:case 12:case 18:case 22:case 27:L.slice_from("a");break;case 3:case 8:case 13:case 19:case 23:case 28:L.slice_from("e")}}var d,g=[new n("cs",-1,-1),new n("dzs",-1,-1),new n("gy",-1,-1),new n("ly",-1,-1),new n("ny",-1,-1),new n("sz",-1,-1),new n("ty",-1,-1),new n("zs",-1,-1)],h=[new n("á",-1,1),new n("é",-1,2)],p=[new n("bb",-1,-1),new n("cc",-1,-1),new n("dd",-1,-1),new n("ff",-1,-1),new n("gg",-1,-1),new n("jj",-1,-1),new n("kk",-1,-1),new n("ll",-1,-1),new n("mm",-1,-1),new n("nn",-1,-1),new n("pp",-1,-1),new n("rr",-1,-1),new n("ccs",-1,-1),new n("ss",-1,-1),new n("zzs",-1,-1),new n("tt",-1,-1),new n("vv",-1,-1),new n("ggy",-1,-1),new n("lly",-1,-1),new n("nny",-1,-1),new n("tty",-1,-1),new n("ssz",-1,-1),new n("zz",-1,-1)],_=[new n("al",-1,1),new n("el",-1,2)],v=[new n("ba",-1,-1),new n("ra",-1,-1),new n("be",-1,-1),new n("re",-1,-1),new n("ig",-1,-1),new n("nak",-1,-1),new n("nek",-1,-1),new n("val",-1,-1),new n("vel",-1,-1),new n("ul",-1,-1),new n("nál",-1,-1),new n("nél",-1,-1),new n("ból",-1,-1),new n("ról",-1,-1),new n("tól",-1,-1),new n("bõl",-1,-1),new n("rõl",-1,-1),new n("tõl",-1,-1),new n("ül",-1,-1),new n("n",-1,-1),new n("an",19,-1),new n("ban",20,-1),new n("en",19,-1),new n("ben",22,-1),new n("képpen",22,-1),new n("on",19,-1),new n("ön",19,-1),new n("képp",-1,-1),new n("kor",-1,-1),new n("t",-1,-1),new n("at",29,-1),new n("et",29,-1),new n("ként",29,-1),new n("anként",32,-1),new n("enként",32,-1),new n("onként",32,-1),new n("ot",29,-1),new n("ért",29,-1),new n("öt",29,-1),new n("hez",-1,-1),new n("hoz",-1,-1),new n("höz",-1,-1),new n("vá",-1,-1),new n("vé",-1,-1)],z=[new n("án",-1,2),new n("én",-1,1),new n("ánként",-1,3)],y=[new n("stul",-1,2),new n("astul",0,1),new n("ástul",0,3),new n("stül",-1,2),new n("estül",3,1),new n("éstül",3,4)],j=[new n("á",-1,1),new n("é",-1,2)],C=[new n("k",-1,7),new n("ak",0,4),new n("ek",0,6),new n("ok",0,5),new n("ák",0,1),new n("ék",0,2),new n("ök",0,3)],P=[new n("éi",-1,7),new n("áéi",0,6),new n("ééi",0,5),new n("é",-1,9),new n("ké",3,4),new n("aké",4,1),new n("eké",4,1),new n("oké",4,1),new n("áké",4,3),new n("éké",4,2),new n("öké",4,1),new n("éé",3,8)],F=[new n("a",-1,18),new n("ja",0,17),new n("d",-1,16),new n("ad",2,13),new n("ed",2,13),new n("od",2,13),new n("ád",2,14),new n("éd",2,15),new n("öd",2,13),new n("e",-1,18),new n("je",9,17),new n("nk",-1,4),new n("unk",11,1),new n("ánk",11,2),new n("énk",11,3),new n("ünk",11,1),new n("uk",-1,8),new n("juk",16,7),new n("ájuk",17,5),new n("ük",-1,8),new n("jük",19,7),new n("éjük",20,6),new n("m",-1,12),new n("am",22,9),new n("em",22,9),new n("om",22,9),new n("ám",22,10),new n("ém",22,11),new n("o",-1,18),new n("á",-1,19),new n("é",-1,20)],S=[new n("id",-1,10),new n("aid",0,9),new n("jaid",1,6),new n("eid",0,9),new n("jeid",3,6),new n("áid",0,7),new n("éid",0,8),new n("i",-1,15),new n("ai",7,14),new n("jai",8,11),new n("ei",7,14),new n("jei",10,11),new n("ái",7,12),new n("éi",7,13),new n("itek",-1,24),new n("eitek",14,21),new n("jeitek",15,20),new n("éitek",14,23),new n("ik",-1,29),new n("aik",18,26),new n("jaik",19,25),new n("eik",18,26),new n("jeik",21,25),new n("áik",18,27),new n("éik",18,28),new n("ink",-1,20),new n("aink",25,17),new n("jaink",26,16),new n("eink",25,17),new n("jeink",28,16),new n("áink",25,18),new n("éink",25,19),new n("aitok",-1,21),new n("jaitok",32,20),new n("áitok",-1,22),new n("im",-1,5),new n("aim",35,4),new n("jaim",36,1),new n("eim",35,4),new n("jeim",38,1),new n("áim",35,2),new n("éim",35,3)],W=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,52,14],L=new r;this.setCurrent=function(e){L.setCurrent(e)},this.getCurrent=function(){return L.getCurrent()},this.stem=function(){var n=L.cursor;return e(),L.limit_backward=n,L.cursor=L.limit,c(),L.cursor=L.limit,o(),L.cursor=L.limit,w(),L.cursor=L.limit,l(),L.cursor=L.limit,u(),L.cursor=L.limit,k(),L.cursor=L.limit,f(),L.cursor=L.limit,b(),L.cursor=L.limit,m(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.hu.stemmer,"stemmer-hu"),e.hu.stopWordFilter=e.generateStopWordFilter("a abban ahhoz ahogy ahol aki akik akkor alatt amely amelyek amelyekben amelyeket amelyet amelynek ami amikor amit amolyan amíg annak arra arról az azok azon azonban azt aztán azután azzal azért be belül benne bár cikk cikkek cikkeket csak de e ebben eddig egy egyes egyetlen egyik egyre egyéb egész ehhez ekkor el ellen elsõ elég elõ elõször elõtt emilyen ennek erre ez ezek ezen ezt ezzel ezért fel felé hanem hiszen hogy hogyan igen ill ill. illetve ilyen ilyenkor ismét ison itt jobban jó jól kell kellett keressünk keresztül ki kívül között közül legalább legyen lehet lehetett lenne lenni lesz lett maga magát majd majd meg mellett mely melyek mert mi mikor milyen minden mindenki mindent mindig mint mintha mit mivel miért most már más másik még míg nagy nagyobb nagyon ne nekem neki nem nincs néha néhány nélkül olyan ott pedig persze rá s saját sem semmi sok sokat sokkal szemben szerint szinte számára talán tehát teljes tovább továbbá több ugyanis utolsó után utána vagy vagyis vagyok valaki valami valamint való van vannak vele vissza viszont volna volt voltak voltam voltunk által általában át én éppen és így õ õk õket össze úgy új újabb újra".split(" ")),e.Pipeline.registerFunction(e.hu.stopWordFilter,"stopWordFilter-hu")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.hy.min.js: -------------------------------------------------------------------------------- 1 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hy=function(){this.pipeline.reset(),this.pipeline.add(e.hy.trimmer,e.hy.stopWordFilter)},e.hy.wordCharacters="[A-Za-z԰-֏ff-ﭏ]",e.hy.trimmer=e.trimmerSupport.generateTrimmer(e.hy.wordCharacters),e.Pipeline.registerFunction(e.hy.trimmer,"trimmer-hy"),e.hy.stopWordFilter=e.generateStopWordFilter("դու և եք էիր էիք հետո նաև նրանք որը վրա է որ պիտի են այս մեջ ն իր ու ի այդ որոնք այն կամ էր մի ես համար այլ իսկ էին ենք հետ ին թ էինք մենք նրա նա դուք եմ էի ըստ որպես ում".split(" ")),e.Pipeline.registerFunction(e.hy.stopWordFilter,"stopWordFilter-hy"),e.hy.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}(),e.Pipeline.registerFunction(e.hy.stemmer,"stemmer-hy")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.ja.min.js: -------------------------------------------------------------------------------- 1 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.ja=function(){this.pipeline.reset(),this.pipeline.add(e.ja.trimmer,e.ja.stopWordFilter,e.ja.stemmer),r?this.tokenizer=e.ja.tokenizer:(e.tokenizer&&(e.tokenizer=e.ja.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.ja.tokenizer))};var t=new e.TinySegmenter;e.ja.tokenizer=function(i){var n,o,s,p,a,u,m,l,c,f;if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(t){return r?new e.Token(t.toLowerCase()):t.toLowerCase()});for(o=i.toString().toLowerCase().replace(/^\s+/,""),n=o.length-1;n>=0;n--)if(/\S/.test(o.charAt(n))){o=o.substring(0,n+1);break}for(a=[],s=o.length,c=0,l=0;c<=s;c++)if(u=o.charAt(c),m=c-l,u.match(/\s/)||c==s){if(m>0)for(p=t.segment(o.slice(l,c)).filter(function(e){return!!e}),f=l,n=0;n=C.limit)break;C.cursor++;continue}break}for(C.cursor=o,C.bra=o,C.eq_s(1,"y")?(C.ket=C.cursor,C.slice_from("Y")):C.cursor=o;;)if(e=C.cursor,C.in_grouping(q,97,232)){if(i=C.cursor,C.bra=i,C.eq_s(1,"i"))C.ket=C.cursor,C.in_grouping(q,97,232)&&(C.slice_from("I"),C.cursor=e);else if(C.cursor=i,C.eq_s(1,"y"))C.ket=C.cursor,C.slice_from("Y"),C.cursor=e;else if(n(e))break}else if(n(e))break}function n(r){return C.cursor=r,r>=C.limit||(C.cursor++,!1)}function o(){_=C.limit,d=_,t()||(_=C.cursor,_<3&&(_=3),t()||(d=C.cursor))}function t(){for(;!C.in_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}for(;!C.out_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}return!1}function s(){for(var r;;)if(C.bra=C.cursor,r=C.find_among(p,3))switch(C.ket=C.cursor,r){case 1:C.slice_from("y");break;case 2:C.slice_from("i");break;case 3:if(C.cursor>=C.limit)return;C.cursor++}}function u(){return _<=C.cursor}function c(){return d<=C.cursor}function a(){var r=C.limit-C.cursor;C.find_among_b(g,3)&&(C.cursor=C.limit-r,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del()))}function l(){var r;w=!1,C.ket=C.cursor,C.eq_s_b(1,"e")&&(C.bra=C.cursor,u()&&(r=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-r,C.slice_del(),w=!0,a())))}function m(){var r;u()&&(r=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-r,C.eq_s_b(3,"gem")||(C.cursor=C.limit-r,C.slice_del(),a())))}function f(){var r,e,i,n,o,t,s=C.limit-C.cursor;if(C.ket=C.cursor,r=C.find_among_b(h,5))switch(C.bra=C.cursor,r){case 1:u()&&C.slice_from("heid");break;case 2:m();break;case 3:u()&&C.out_grouping_b(j,97,232)&&C.slice_del()}if(C.cursor=C.limit-s,l(),C.cursor=C.limit-s,C.ket=C.cursor,C.eq_s_b(4,"heid")&&(C.bra=C.cursor,c()&&(e=C.limit-C.cursor,C.eq_s_b(1,"c")||(C.cursor=C.limit-e,C.slice_del(),C.ket=C.cursor,C.eq_s_b(2,"en")&&(C.bra=C.cursor,m())))),C.cursor=C.limit-s,C.ket=C.cursor,r=C.find_among_b(k,6))switch(C.bra=C.cursor,r){case 1:if(c()){if(C.slice_del(),i=C.limit-C.cursor,C.ket=C.cursor,C.eq_s_b(2,"ig")&&(C.bra=C.cursor,c()&&(n=C.limit-C.cursor,!C.eq_s_b(1,"e")))){C.cursor=C.limit-n,C.slice_del();break}C.cursor=C.limit-i,a()}break;case 2:c()&&(o=C.limit-C.cursor,C.eq_s_b(1,"e")||(C.cursor=C.limit-o,C.slice_del()));break;case 3:c()&&(C.slice_del(),l());break;case 4:c()&&C.slice_del();break;case 5:c()&&w&&C.slice_del()}C.cursor=C.limit-s,C.out_grouping_b(z,73,232)&&(t=C.limit-C.cursor,C.find_among_b(v,4)&&C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-t,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del())))}var d,_,w,b=[new e("",-1,6),new e("á",0,1),new e("ä",0,1),new e("é",0,2),new e("ë",0,2),new e("í",0,3),new e("ï",0,3),new e("ó",0,4),new e("ö",0,4),new e("ú",0,5),new e("ü",0,5)],p=[new e("",-1,3),new e("I",0,2),new e("Y",0,1)],g=[new e("dd",-1,-1),new e("kk",-1,-1),new e("tt",-1,-1)],h=[new e("ene",-1,2),new e("se",-1,3),new e("en",-1,2),new e("heden",2,1),new e("s",-1,3)],k=[new e("end",-1,1),new e("ig",-1,2),new e("ing",-1,1),new e("lijk",-1,3),new e("baar",-1,4),new e("bar",-1,5)],v=[new e("aa",-1,-1),new e("ee",-1,-1),new e("oo",-1,-1),new e("uu",-1,-1)],q=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],z=[1,0,0,17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],j=[17,67,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],C=new i;this.setCurrent=function(r){C.setCurrent(r)},this.getCurrent=function(){return C.getCurrent()},this.stem=function(){var e=C.cursor;return r(),C.cursor=e,o(),C.limit_backward=e,C.cursor=C.limit,f(),C.cursor=C.limit_backward,s(),!0}};return function(r){return"function"==typeof r.update?r.update(function(r){return n.setCurrent(r),n.stem(),n.getCurrent()}):(n.setCurrent(r),n.stem(),n.getCurrent())}}(),r.Pipeline.registerFunction(r.nl.stemmer,"stemmer-nl"),r.nl.stopWordFilter=r.generateStopWordFilter(" aan al alles als altijd andere ben bij daar dan dat de der deze die dit doch doen door dus een eens en er ge geen geweest haar had heb hebben heeft hem het hier hij hoe hun iemand iets ik in is ja je kan kon kunnen maar me meer men met mij mijn moet na naar niet niets nog nu of om omdat onder ons ook op over reeds te tegen toch toen tot u uit uw van veel voor want waren was wat werd wezen wie wil worden wordt zal ze zelf zich zij zijn zo zonder zou".split(" ")),r.Pipeline.registerFunction(r.nl.stopWordFilter,"stopWordFilter-nl")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.no.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Lunr languages, `Norwegian` language 3 | * https://github.com/MihaiValentin/lunr-languages 4 | * 5 | * Copyright 2014, Mihai Valentin 6 | * http://www.mozilla.org/MPL/ 7 | */ 8 | /*! 9 | * based on 10 | * Snowball JavaScript Library v0.3 11 | * http://code.google.com/p/urim/ 12 | * http://snowball.tartarus.org/ 13 | * 14 | * Copyright 2010, Oleg Mazko 15 | * http://www.mozilla.org/MPL/ 16 | */ 17 | 18 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.no=function(){this.pipeline.reset(),this.pipeline.add(e.no.trimmer,e.no.stopWordFilter,e.no.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.no.stemmer))},e.no.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.no.trimmer=e.trimmerSupport.generateTrimmer(e.no.wordCharacters),e.Pipeline.registerFunction(e.no.trimmer,"trimmer-no"),e.no.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){function e(){var e,r=w.cursor+3;if(a=w.limit,0<=r||r<=w.limit){for(s=r;;){if(e=w.cursor,w.in_grouping(d,97,248)){w.cursor=e;break}if(e>=w.limit)return;w.cursor=e+1}for(;!w.out_grouping(d,97,248);){if(w.cursor>=w.limit)return;w.cursor++}a=w.cursor,a=a&&(r=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,e=w.find_among_b(m,29),w.limit_backward=r,e))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:n=w.limit-w.cursor,w.in_grouping_b(c,98,122)?w.slice_del():(w.cursor=w.limit-n,w.eq_s_b(1,"k")&&w.out_grouping_b(d,97,248)&&w.slice_del());break;case 3:w.slice_from("er")}}function t(){var e,r=w.limit-w.cursor;w.cursor>=a&&(e=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,w.find_among_b(u,2)?(w.bra=w.cursor,w.limit_backward=e,w.cursor=w.limit-r,w.cursor>w.limit_backward&&(w.cursor--,w.bra=w.cursor,w.slice_del())):w.limit_backward=e)}function o(){var e,r;w.cursor>=a&&(r=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,e=w.find_among_b(l,11),e?(w.bra=w.cursor,w.limit_backward=r,1==e&&w.slice_del()):w.limit_backward=r)}var s,a,m=[new r("a",-1,1),new r("e",-1,1),new r("ede",1,1),new r("ande",1,1),new r("ende",1,1),new r("ane",1,1),new r("ene",1,1),new r("hetene",6,1),new r("erte",1,3),new r("en",-1,1),new r("heten",9,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",12,1),new r("s",-1,2),new r("as",14,1),new r("es",14,1),new r("edes",16,1),new r("endes",16,1),new r("enes",16,1),new r("hetenes",19,1),new r("ens",14,1),new r("hetens",21,1),new r("ers",14,1),new r("ets",14,1),new r("et",-1,1),new r("het",25,1),new r("ert",-1,3),new r("ast",-1,1)],u=[new r("dt",-1,-1),new r("vt",-1,-1)],l=[new r("leg",-1,1),new r("eleg",0,1),new r("ig",-1,1),new r("eig",2,1),new r("lig",2,1),new r("elig",4,1),new r("els",-1,1),new r("lov",-1,1),new r("elov",7,1),new r("slov",7,1),new r("hetslov",9,1)],d=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],c=[119,125,149,1],w=new n;this.setCurrent=function(e){w.setCurrent(e)},this.getCurrent=function(){return w.getCurrent()},this.stem=function(){var r=w.cursor;return e(),w.limit_backward=r,w.cursor=w.limit,i(),w.cursor=w.limit,t(),w.cursor=w.limit,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.no.stemmer,"stemmer-no"),e.no.stopWordFilter=e.generateStopWordFilter("alle at av bare begge ble blei bli blir blitt både båe da de deg dei deim deira deires dem den denne der dere deres det dette di din disse ditt du dykk dykkar då eg ein eit eitt eller elles en enn er et ett etter for fordi fra før ha hadde han hans har hennar henne hennes her hjå ho hoe honom hoss hossen hun hva hvem hver hvilke hvilken hvis hvor hvordan hvorfor i ikke ikkje ikkje ingen ingi inkje inn inni ja jeg kan kom korleis korso kun kunne kva kvar kvarhelst kven kvi kvifor man mange me med medan meg meget mellom men mi min mine mitt mot mykje ned no noe noen noka noko nokon nokor nokre nå når og også om opp oss over på samme seg selv si si sia sidan siden sin sine sitt sjøl skal skulle slik so som som somme somt så sånn til um upp ut uten var vart varte ved vere verte vi vil ville vore vors vort vår være være vært å".split(" ")),e.Pipeline.registerFunction(e.no.stopWordFilter,"stopWordFilter-no")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.pt.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Lunr languages, `Portuguese` language 3 | * https://github.com/MihaiValentin/lunr-languages 4 | * 5 | * Copyright 2014, Mihai Valentin 6 | * http://www.mozilla.org/MPL/ 7 | */ 8 | /*! 9 | * based on 10 | * Snowball JavaScript Library v0.3 11 | * http://code.google.com/p/urim/ 12 | * http://snowball.tartarus.org/ 13 | * 14 | * Copyright 2010, Oleg Mazko 15 | * http://www.mozilla.org/MPL/ 16 | */ 17 | 18 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.pt=function(){this.pipeline.reset(),this.pipeline.add(e.pt.trimmer,e.pt.stopWordFilter,e.pt.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.pt.stemmer))},e.pt.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.pt.trimmer=e.trimmerSupport.generateTrimmer(e.pt.wordCharacters),e.Pipeline.registerFunction(e.pt.trimmer,"trimmer-pt"),e.pt.stemmer=function(){var r=e.stemmerSupport.Among,s=e.stemmerSupport.SnowballProgram,n=new function(){function e(){for(var e;;){if(z.bra=z.cursor,e=z.find_among(k,3))switch(z.ket=z.cursor,e){case 1:z.slice_from("a~");continue;case 2:z.slice_from("o~");continue;case 3:if(z.cursor>=z.limit)break;z.cursor++;continue}break}}function n(){if(z.out_grouping(y,97,250)){for(;!z.in_grouping(y,97,250);){if(z.cursor>=z.limit)return!0;z.cursor++}return!1}return!0}function i(){if(z.in_grouping(y,97,250))for(;!z.out_grouping(y,97,250);){if(z.cursor>=z.limit)return!1;z.cursor++}return g=z.cursor,!0}function o(){var e,r,s=z.cursor;if(z.in_grouping(y,97,250))if(e=z.cursor,n()){if(z.cursor=e,i())return}else g=z.cursor;if(z.cursor=s,z.out_grouping(y,97,250)){if(r=z.cursor,n()){if(z.cursor=r,!z.in_grouping(y,97,250)||z.cursor>=z.limit)return;z.cursor++}g=z.cursor}}function t(){for(;!z.in_grouping(y,97,250);){if(z.cursor>=z.limit)return!1;z.cursor++}for(;!z.out_grouping(y,97,250);){if(z.cursor>=z.limit)return!1;z.cursor++}return!0}function a(){var e=z.cursor;g=z.limit,b=g,h=g,o(),z.cursor=e,t()&&(b=z.cursor,t()&&(h=z.cursor))}function u(){for(var e;;){if(z.bra=z.cursor,e=z.find_among(q,3))switch(z.ket=z.cursor,e){case 1:z.slice_from("ã");continue;case 2:z.slice_from("õ");continue;case 3:if(z.cursor>=z.limit)break;z.cursor++;continue}break}}function w(){return g<=z.cursor}function m(){return b<=z.cursor}function c(){return h<=z.cursor}function l(){var e;if(z.ket=z.cursor,!(e=z.find_among_b(F,45)))return!1;switch(z.bra=z.cursor,e){case 1:if(!c())return!1;z.slice_del();break;case 2:if(!c())return!1;z.slice_from("log");break;case 3:if(!c())return!1;z.slice_from("u");break;case 4:if(!c())return!1;z.slice_from("ente");break;case 5:if(!m())return!1;z.slice_del(),z.ket=z.cursor,e=z.find_among_b(j,4),e&&(z.bra=z.cursor,c()&&(z.slice_del(),1==e&&(z.ket=z.cursor,z.eq_s_b(2,"at")&&(z.bra=z.cursor,c()&&z.slice_del()))));break;case 6:if(!c())return!1;z.slice_del(),z.ket=z.cursor,e=z.find_among_b(C,3),e&&(z.bra=z.cursor,1==e&&c()&&z.slice_del());break;case 7:if(!c())return!1;z.slice_del(),z.ket=z.cursor,e=z.find_among_b(P,3),e&&(z.bra=z.cursor,1==e&&c()&&z.slice_del());break;case 8:if(!c())return!1;z.slice_del(),z.ket=z.cursor,z.eq_s_b(2,"at")&&(z.bra=z.cursor,c()&&z.slice_del());break;case 9:if(!w()||!z.eq_s_b(1,"e"))return!1;z.slice_from("ir")}return!0}function f(){var e,r;if(z.cursor>=g){if(r=z.limit_backward,z.limit_backward=g,z.ket=z.cursor,e=z.find_among_b(S,120))return z.bra=z.cursor,1==e&&z.slice_del(),z.limit_backward=r,!0;z.limit_backward=r}return!1}function d(){var e;z.ket=z.cursor,(e=z.find_among_b(W,7))&&(z.bra=z.cursor,1==e&&w()&&z.slice_del())}function v(e,r){if(z.eq_s_b(1,e)){z.bra=z.cursor;var s=z.limit-z.cursor;if(z.eq_s_b(1,r))return z.cursor=z.limit-s,w()&&z.slice_del(),!1}return!0}function p(){var e;if(z.ket=z.cursor,e=z.find_among_b(L,4))switch(z.bra=z.cursor,e){case 1:w()&&(z.slice_del(),z.ket=z.cursor,z.limit-z.cursor,v("u","g")&&v("i","c"));break;case 2:z.slice_from("c")}}function _(){if(!l()&&(z.cursor=z.limit,!f()))return z.cursor=z.limit,void d();z.cursor=z.limit,z.ket=z.cursor,z.eq_s_b(1,"i")&&(z.bra=z.cursor,z.eq_s_b(1,"c")&&(z.cursor=z.limit,w()&&z.slice_del()))}var h,b,g,k=[new r("",-1,3),new r("ã",0,1),new r("õ",0,2)],q=[new r("",-1,3),new r("a~",0,1),new r("o~",0,2)],j=[new r("ic",-1,-1),new r("ad",-1,-1),new r("os",-1,-1),new r("iv",-1,1)],C=[new r("ante",-1,1),new r("avel",-1,1),new r("ível",-1,1)],P=[new r("ic",-1,1),new r("abil",-1,1),new r("iv",-1,1)],F=[new r("ica",-1,1),new r("ância",-1,1),new r("ência",-1,4),new r("ira",-1,9),new r("adora",-1,1),new r("osa",-1,1),new r("ista",-1,1),new r("iva",-1,8),new r("eza",-1,1),new r("logía",-1,2),new r("idade",-1,7),new r("ante",-1,1),new r("mente",-1,6),new r("amente",12,5),new r("ável",-1,1),new r("ível",-1,1),new r("ución",-1,3),new r("ico",-1,1),new r("ismo",-1,1),new r("oso",-1,1),new r("amento",-1,1),new r("imento",-1,1),new r("ivo",-1,8),new r("aça~o",-1,1),new r("ador",-1,1),new r("icas",-1,1),new r("ências",-1,4),new r("iras",-1,9),new r("adoras",-1,1),new r("osas",-1,1),new r("istas",-1,1),new r("ivas",-1,8),new r("ezas",-1,1),new r("logías",-1,2),new r("idades",-1,7),new r("uciones",-1,3),new r("adores",-1,1),new r("antes",-1,1),new r("aço~es",-1,1),new r("icos",-1,1),new r("ismos",-1,1),new r("osos",-1,1),new r("amentos",-1,1),new r("imentos",-1,1),new r("ivos",-1,8)],S=[new r("ada",-1,1),new r("ida",-1,1),new r("ia",-1,1),new r("aria",2,1),new r("eria",2,1),new r("iria",2,1),new r("ara",-1,1),new r("era",-1,1),new r("ira",-1,1),new r("ava",-1,1),new r("asse",-1,1),new r("esse",-1,1),new r("isse",-1,1),new r("aste",-1,1),new r("este",-1,1),new r("iste",-1,1),new r("ei",-1,1),new r("arei",16,1),new r("erei",16,1),new r("irei",16,1),new r("am",-1,1),new r("iam",20,1),new r("ariam",21,1),new r("eriam",21,1),new r("iriam",21,1),new r("aram",20,1),new r("eram",20,1),new r("iram",20,1),new r("avam",20,1),new r("em",-1,1),new r("arem",29,1),new r("erem",29,1),new r("irem",29,1),new r("assem",29,1),new r("essem",29,1),new r("issem",29,1),new r("ado",-1,1),new r("ido",-1,1),new r("ando",-1,1),new r("endo",-1,1),new r("indo",-1,1),new r("ara~o",-1,1),new r("era~o",-1,1),new r("ira~o",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("ir",-1,1),new r("as",-1,1),new r("adas",47,1),new r("idas",47,1),new r("ias",47,1),new r("arias",50,1),new r("erias",50,1),new r("irias",50,1),new r("aras",47,1),new r("eras",47,1),new r("iras",47,1),new r("avas",47,1),new r("es",-1,1),new r("ardes",58,1),new r("erdes",58,1),new r("irdes",58,1),new r("ares",58,1),new r("eres",58,1),new r("ires",58,1),new r("asses",58,1),new r("esses",58,1),new r("isses",58,1),new r("astes",58,1),new r("estes",58,1),new r("istes",58,1),new r("is",-1,1),new r("ais",71,1),new r("eis",71,1),new r("areis",73,1),new r("ereis",73,1),new r("ireis",73,1),new r("áreis",73,1),new r("éreis",73,1),new r("íreis",73,1),new r("ásseis",73,1),new r("ésseis",73,1),new r("ísseis",73,1),new r("áveis",73,1),new r("íeis",73,1),new r("aríeis",84,1),new r("eríeis",84,1),new r("iríeis",84,1),new r("ados",-1,1),new r("idos",-1,1),new r("amos",-1,1),new r("áramos",90,1),new r("éramos",90,1),new r("íramos",90,1),new r("ávamos",90,1),new r("íamos",90,1),new r("aríamos",95,1),new r("eríamos",95,1),new r("iríamos",95,1),new r("emos",-1,1),new r("aremos",99,1),new r("eremos",99,1),new r("iremos",99,1),new r("ássemos",99,1),new r("êssemos",99,1),new r("íssemos",99,1),new r("imos",-1,1),new r("armos",-1,1),new r("ermos",-1,1),new r("irmos",-1,1),new r("ámos",-1,1),new r("arás",-1,1),new r("erás",-1,1),new r("irás",-1,1),new r("eu",-1,1),new r("iu",-1,1),new r("ou",-1,1),new r("ará",-1,1),new r("erá",-1,1),new r("irá",-1,1)],W=[new r("a",-1,1),new r("i",-1,1),new r("o",-1,1),new r("os",-1,1),new r("á",-1,1),new r("í",-1,1),new r("ó",-1,1)],L=[new r("e",-1,1),new r("ç",-1,2),new r("é",-1,1),new r("ê",-1,1)],y=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,3,19,12,2],z=new s;this.setCurrent=function(e){z.setCurrent(e)},this.getCurrent=function(){return z.getCurrent()},this.stem=function(){var r=z.cursor;return e(),z.cursor=r,a(),z.limit_backward=r,z.cursor=z.limit,_(),z.cursor=z.limit,p(),z.cursor=z.limit_backward,u(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.pt.stemmer,"stemmer-pt"),e.pt.stopWordFilter=e.generateStopWordFilter("a ao aos aquela aquelas aquele aqueles aquilo as até com como da das de dela delas dele deles depois do dos e ela elas ele eles em entre era eram essa essas esse esses esta estamos estas estava estavam este esteja estejam estejamos estes esteve estive estivemos estiver estivera estiveram estiverem estivermos estivesse estivessem estivéramos estivéssemos estou está estávamos estão eu foi fomos for fora foram forem formos fosse fossem fui fôramos fôssemos haja hajam hajamos havemos hei houve houvemos houver houvera houveram houverei houverem houveremos houveria houveriam houvermos houverá houverão houveríamos houvesse houvessem houvéramos houvéssemos há hão isso isto já lhe lhes mais mas me mesmo meu meus minha minhas muito na nas nem no nos nossa nossas nosso nossos num numa não nós o os ou para pela pelas pelo pelos por qual quando que quem se seja sejam sejamos sem serei seremos seria seriam será serão seríamos seu seus somos sou sua suas são só também te tem temos tenha tenham tenhamos tenho terei teremos teria teriam terá terão teríamos teu teus teve tinha tinham tive tivemos tiver tivera tiveram tiverem tivermos tivesse tivessem tivéramos tivéssemos tu tua tuas tém tínhamos um uma você vocês vos à às éramos".split(" ")),e.Pipeline.registerFunction(e.pt.stopWordFilter,"stopWordFilter-pt")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.ru.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Lunr languages, `Russian` language 3 | * https://github.com/MihaiValentin/lunr-languages 4 | * 5 | * Copyright 2014, Mihai Valentin 6 | * http://www.mozilla.org/MPL/ 7 | */ 8 | /*! 9 | * based on 10 | * Snowball JavaScript Library v0.3 11 | * http://code.google.com/p/urim/ 12 | * http://snowball.tartarus.org/ 13 | * 14 | * Copyright 2010, Oleg Mazko 15 | * http://www.mozilla.org/MPL/ 16 | */ 17 | 18 | !function(e,n){"function"==typeof define&&define.amd?define(n):"object"==typeof exports?module.exports=n():n()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ru=function(){this.pipeline.reset(),this.pipeline.add(e.ru.trimmer,e.ru.stopWordFilter,e.ru.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ru.stemmer))},e.ru.wordCharacters="Ѐ-҄҇-ԯᴫᵸⷠ-ⷿꙀ-ꚟ︮︯",e.ru.trimmer=e.trimmerSupport.generateTrimmer(e.ru.wordCharacters),e.Pipeline.registerFunction(e.ru.trimmer,"trimmer-ru"),e.ru.stemmer=function(){var n=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,t=new function(){function e(){for(;!W.in_grouping(S,1072,1103);){if(W.cursor>=W.limit)return!1;W.cursor++}return!0}function t(){for(;!W.out_grouping(S,1072,1103);){if(W.cursor>=W.limit)return!1;W.cursor++}return!0}function w(){b=W.limit,_=b,e()&&(b=W.cursor,t()&&e()&&t()&&(_=W.cursor))}function i(){return _<=W.cursor}function u(e,n){var r,t;if(W.ket=W.cursor,r=W.find_among_b(e,n)){switch(W.bra=W.cursor,r){case 1:if(t=W.limit-W.cursor,!W.eq_s_b(1,"а")&&(W.cursor=W.limit-t,!W.eq_s_b(1,"я")))return!1;case 2:W.slice_del()}return!0}return!1}function o(){return u(h,9)}function s(e,n){var r;return W.ket=W.cursor,!!(r=W.find_among_b(e,n))&&(W.bra=W.cursor,1==r&&W.slice_del(),!0)}function c(){return s(g,26)}function m(){return!!c()&&(u(C,8),!0)}function f(){return s(k,2)}function l(){return u(P,46)}function a(){s(v,36)}function p(){var e;W.ket=W.cursor,(e=W.find_among_b(F,2))&&(W.bra=W.cursor,i()&&1==e&&W.slice_del())}function d(){var e;if(W.ket=W.cursor,e=W.find_among_b(q,4))switch(W.bra=W.cursor,e){case 1:if(W.slice_del(),W.ket=W.cursor,!W.eq_s_b(1,"н"))break;W.bra=W.cursor;case 2:if(!W.eq_s_b(1,"н"))break;case 3:W.slice_del()}}var _,b,h=[new n("в",-1,1),new n("ив",0,2),new n("ыв",0,2),new n("вши",-1,1),new n("ивши",3,2),new n("ывши",3,2),new n("вшись",-1,1),new n("ившись",6,2),new n("ывшись",6,2)],g=[new n("ее",-1,1),new n("ие",-1,1),new n("ое",-1,1),new n("ые",-1,1),new n("ими",-1,1),new n("ыми",-1,1),new n("ей",-1,1),new n("ий",-1,1),new n("ой",-1,1),new n("ый",-1,1),new n("ем",-1,1),new n("им",-1,1),new n("ом",-1,1),new n("ым",-1,1),new n("его",-1,1),new n("ого",-1,1),new n("ему",-1,1),new n("ому",-1,1),new n("их",-1,1),new n("ых",-1,1),new n("ею",-1,1),new n("ою",-1,1),new n("ую",-1,1),new n("юю",-1,1),new n("ая",-1,1),new n("яя",-1,1)],C=[new n("ем",-1,1),new n("нн",-1,1),new n("вш",-1,1),new n("ивш",2,2),new n("ывш",2,2),new n("щ",-1,1),new n("ющ",5,1),new n("ующ",6,2)],k=[new n("сь",-1,1),new n("ся",-1,1)],P=[new n("ла",-1,1),new n("ила",0,2),new n("ыла",0,2),new n("на",-1,1),new n("ена",3,2),new n("ете",-1,1),new n("ите",-1,2),new n("йте",-1,1),new n("ейте",7,2),new n("уйте",7,2),new n("ли",-1,1),new n("или",10,2),new n("ыли",10,2),new n("й",-1,1),new n("ей",13,2),new n("уй",13,2),new n("л",-1,1),new n("ил",16,2),new n("ыл",16,2),new n("ем",-1,1),new n("им",-1,2),new n("ым",-1,2),new n("н",-1,1),new n("ен",22,2),new n("ло",-1,1),new n("ило",24,2),new n("ыло",24,2),new n("но",-1,1),new n("ено",27,2),new n("нно",27,1),new n("ет",-1,1),new n("ует",30,2),new n("ит",-1,2),new n("ыт",-1,2),new n("ют",-1,1),new n("уют",34,2),new n("ят",-1,2),new n("ны",-1,1),new n("ены",37,2),new n("ть",-1,1),new n("ить",39,2),new n("ыть",39,2),new n("ешь",-1,1),new n("ишь",-1,2),new n("ю",-1,2),new n("ую",44,2)],v=[new n("а",-1,1),new n("ев",-1,1),new n("ов",-1,1),new n("е",-1,1),new n("ие",3,1),new n("ье",3,1),new n("и",-1,1),new n("еи",6,1),new n("ии",6,1),new n("ами",6,1),new n("ями",6,1),new n("иями",10,1),new n("й",-1,1),new n("ей",12,1),new n("ией",13,1),new n("ий",12,1),new n("ой",12,1),new n("ам",-1,1),new n("ем",-1,1),new n("ием",18,1),new n("ом",-1,1),new n("ям",-1,1),new n("иям",21,1),new n("о",-1,1),new n("у",-1,1),new n("ах",-1,1),new n("ях",-1,1),new n("иях",26,1),new n("ы",-1,1),new n("ь",-1,1),new n("ю",-1,1),new n("ию",30,1),new n("ью",30,1),new n("я",-1,1),new n("ия",33,1),new n("ья",33,1)],F=[new n("ост",-1,1),new n("ость",-1,1)],q=[new n("ейше",-1,1),new n("н",-1,2),new n("ейш",-1,1),new n("ь",-1,3)],S=[33,65,8,232],W=new r;this.setCurrent=function(e){W.setCurrent(e)},this.getCurrent=function(){return W.getCurrent()},this.stem=function(){return w(),W.cursor=W.limit,!(W.cursor=i&&(e-=i,t[e>>3]&1<<(7&e)))return this.cursor++,!0}return!1},in_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e<=s&&e>=i&&(e-=i,t[e>>3]&1<<(7&e)))return this.cursor--,!0}return!1},out_grouping:function(t,i,s){if(this.cursors||e>3]&1<<(7&e)))return this.cursor++,!0}return!1},out_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e>s||e>3]&1<<(7&e)))return this.cursor--,!0}return!1},eq_s:function(t,i){if(this.limit-this.cursor>1),f=0,l=o0||e==s||c)break;c=!0}}for(;;){var _=t[s];if(o>=_.s_size){if(this.cursor=n+_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n+_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},find_among_b:function(t,i){for(var s=0,e=i,n=this.cursor,u=this.limit_backward,o=0,h=0,c=!1;;){for(var a=s+(e-s>>1),f=0,l=o=0;m--){if(n-l==u){f=-1;break}if(f=r.charCodeAt(n-1-l)-_.s[m])break;l++}if(f<0?(e=a,h=l):(s=a,o=l),e-s<=1){if(s>0||e==s||c)break;c=!0}}for(;;){var _=t[s];if(o>=_.s_size){if(this.cursor=n-_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n-_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},replace_s:function(t,i,s){var e=s.length-(i-t),n=r.substring(0,t),u=r.substring(i);return r=n+s+u,this.limit+=e,this.cursor>=i?this.cursor+=e:this.cursor>t&&(this.cursor=t),e},slice_check:function(){if(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>r.length)throw"faulty slice operation"},slice_from:function(r){this.slice_check(),this.replace_s(this.bra,this.ket,r)},slice_del:function(){this.slice_from("")},insert:function(r,t,i){var s=this.replace_s(r,t,i);r<=this.bra&&(this.bra+=s),r<=this.ket&&(this.ket+=s)},slice_to:function(){return this.slice_check(),r.substring(this.bra,this.ket)},eq_v_b:function(r){return this.eq_s_b(r.length,r)}}}},r.trimmerSupport={generateTrimmer:function(r){var t=new RegExp("^[^"+r+"]+"),i=new RegExp("[^"+r+"]+$");return function(r){return"function"==typeof r.update?r.update(function(r){return r.replace(t,"").replace(i,"")}):r.replace(t,"").replace(i,"")}}}}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.sv.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Lunr languages, `Swedish` language 3 | * https://github.com/MihaiValentin/lunr-languages 4 | * 5 | * Copyright 2014, Mihai Valentin 6 | * http://www.mozilla.org/MPL/ 7 | */ 8 | /*! 9 | * based on 10 | * Snowball JavaScript Library v0.3 11 | * http://code.google.com/p/urim/ 12 | * http://snowball.tartarus.org/ 13 | * 14 | * Copyright 2010, Oleg Mazko 15 | * http://www.mozilla.org/MPL/ 16 | */ 17 | 18 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.sv=function(){this.pipeline.reset(),this.pipeline.add(e.sv.trimmer,e.sv.stopWordFilter,e.sv.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.sv.stemmer))},e.sv.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.sv.trimmer=e.trimmerSupport.generateTrimmer(e.sv.wordCharacters),e.Pipeline.registerFunction(e.sv.trimmer,"trimmer-sv"),e.sv.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,t=new function(){function e(){var e,r=w.cursor+3;if(o=w.limit,0<=r||r<=w.limit){for(a=r;;){if(e=w.cursor,w.in_grouping(l,97,246)){w.cursor=e;break}if(w.cursor=e,w.cursor>=w.limit)return;w.cursor++}for(;!w.out_grouping(l,97,246);){if(w.cursor>=w.limit)return;w.cursor++}o=w.cursor,o=o&&(w.limit_backward=o,w.cursor=w.limit,w.ket=w.cursor,e=w.find_among_b(u,37),w.limit_backward=r,e))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:w.in_grouping_b(d,98,121)&&w.slice_del()}}function i(){var e=w.limit_backward;w.cursor>=o&&(w.limit_backward=o,w.cursor=w.limit,w.find_among_b(c,7)&&(w.cursor=w.limit,w.ket=w.cursor,w.cursor>w.limit_backward&&(w.bra=--w.cursor,w.slice_del())),w.limit_backward=e)}function s(){var e,r;if(w.cursor>=o){if(r=w.limit_backward,w.limit_backward=o,w.cursor=w.limit,w.ket=w.cursor,e=w.find_among_b(m,5))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:w.slice_from("lös");break;case 3:w.slice_from("full")}w.limit_backward=r}}var a,o,u=[new r("a",-1,1),new r("arna",0,1),new r("erna",0,1),new r("heterna",2,1),new r("orna",0,1),new r("ad",-1,1),new r("e",-1,1),new r("ade",6,1),new r("ande",6,1),new r("arne",6,1),new r("are",6,1),new r("aste",6,1),new r("en",-1,1),new r("anden",12,1),new r("aren",12,1),new r("heten",12,1),new r("ern",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",18,1),new r("or",-1,1),new r("s",-1,2),new r("as",21,1),new r("arnas",22,1),new r("ernas",22,1),new r("ornas",22,1),new r("es",21,1),new r("ades",26,1),new r("andes",26,1),new r("ens",21,1),new r("arens",29,1),new r("hetens",29,1),new r("erns",21,1),new r("at",-1,1),new r("andet",-1,1),new r("het",-1,1),new r("ast",-1,1)],c=[new r("dd",-1,-1),new r("gd",-1,-1),new r("nn",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1),new r("tt",-1,-1)],m=[new r("ig",-1,1),new r("lig",0,1),new r("els",-1,1),new r("fullt",-1,3),new r("löst",-1,2)],l=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,24,0,32],d=[119,127,149],w=new n;this.setCurrent=function(e){w.setCurrent(e)},this.getCurrent=function(){return w.getCurrent()},this.stem=function(){var r=w.cursor;return e(),w.limit_backward=r,w.cursor=w.limit,t(),w.cursor=w.limit,i(),w.cursor=w.limit,s(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return t.setCurrent(e),t.stem(),t.getCurrent()}):(t.setCurrent(e),t.stem(),t.getCurrent())}}(),e.Pipeline.registerFunction(e.sv.stemmer,"stemmer-sv"),e.sv.stopWordFilter=e.generateStopWordFilter("alla allt att av blev bli blir blivit de dem den denna deras dess dessa det detta dig din dina ditt du där då efter ej eller en er era ert ett från för ha hade han hans har henne hennes hon honom hur här i icke ingen inom inte jag ju kan kunde man med mellan men mig min mina mitt mot mycket ni nu när någon något några och om oss på samma sedan sig sin sina sitta själv skulle som så sådan sådana sådant till under upp ut utan vad var vara varför varit varje vars vart vem vi vid vilka vilkas vilken vilket vår våra vårt än är åt över".split(" ")),e.Pipeline.registerFunction(e.sv.stopWordFilter,"stopWordFilter-sv")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.ta.min.js: -------------------------------------------------------------------------------- 1 | !function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ta=function(){this.pipeline.reset(),this.pipeline.add(e.ta.trimmer,e.ta.stopWordFilter,e.ta.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ta.stemmer))},e.ta.wordCharacters="஀-உஊ-ஏஐ-ஙச-ட஠-னப-யர-ஹ஺-ிீ-௉ொ-௏ௐ-௙௚-௟௠-௩௪-௯௰-௹௺-௿a-zA-Za-zA-Z0-90-9",e.ta.trimmer=e.trimmerSupport.generateTrimmer(e.ta.wordCharacters),e.Pipeline.registerFunction(e.ta.trimmer,"trimmer-ta"),e.ta.stopWordFilter=e.generateStopWordFilter("அங்கு அங்கே அது அதை அந்த அவர் அவர்கள் அவள் அவன் அவை ஆக ஆகவே ஆகையால் ஆதலால் ஆதலினால் ஆனாலும் ஆனால் இங்கு இங்கே இது இதை இந்த இப்படி இவர் இவர்கள் இவள் இவன் இவை இவ்வளவு உனக்கு உனது உன் உன்னால் எங்கு எங்கே எது எதை எந்த எப்படி எவர் எவர்கள் எவள் எவன் எவை எவ்வளவு எனக்கு எனது எனவே என் என்ன என்னால் ஏது ஏன் தனது தன்னால் தானே தான் நாங்கள் நாம் நான் நீ நீங்கள்".split(" ")),e.ta.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var t=e.wordcut;t.init(),e.ta.tokenizer=function(r){if(!arguments.length||null==r||void 0==r)return[];if(Array.isArray(r))return r.map(function(t){return isLunr2?new e.Token(t.toLowerCase()):t.toLowerCase()});var i=r.toString().toLowerCase().replace(/^\s+/,"");return t.cut(i).split("|")},e.Pipeline.registerFunction(e.ta.stemmer,"stemmer-ta"),e.Pipeline.registerFunction(e.ta.stopWordFilter,"stopWordFilter-ta")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.te.min.js: -------------------------------------------------------------------------------- 1 | !function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.te=function(){this.pipeline.reset(),this.pipeline.add(e.te.trimmer,e.te.stopWordFilter,e.te.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.te.stemmer))},e.te.wordCharacters="ఀ-ఄఅ-ఔక-హా-ౌౕ-ౖౘ-ౚౠ-ౡౢ-ౣ౦-౯౸-౿఼ఽ్ౝ౷౤౥",e.te.trimmer=e.trimmerSupport.generateTrimmer(e.te.wordCharacters),e.Pipeline.registerFunction(e.te.trimmer,"trimmer-te"),e.te.stopWordFilter=e.generateStopWordFilter("అందరూ అందుబాటులో అడగండి అడగడం అడ్డంగా అనుగుణంగా అనుమతించు అనుమతిస్తుంది అయితే ఇప్పటికే ఉన్నారు ఎక్కడైనా ఎప్పుడు ఎవరైనా ఎవరో ఏ ఏదైనా ఏమైనప్పటికి ఒక ఒకరు కనిపిస్తాయి కాదు కూడా గా గురించి చుట్టూ చేయగలిగింది తగిన తర్వాత దాదాపు దూరంగా నిజంగా పై ప్రకారం ప్రక్కన మధ్య మరియు మరొక మళ్ళీ మాత్రమే మెచ్చుకో వద్ద వెంట వేరుగా వ్యతిరేకంగా సంబంధం".split(" ")),e.te.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var t=e.wordcut;t.init(),e.te.tokenizer=function(r){if(!arguments.length||null==r||void 0==r)return[];if(Array.isArray(r))return r.map(function(t){return isLunr2?new e.Token(t.toLowerCase()):t.toLowerCase()});var i=r.toString().toLowerCase().replace(/^\s+/,"");return t.cut(i).split("|")},e.Pipeline.registerFunction(e.te.stemmer,"stemmer-te"),e.Pipeline.registerFunction(e.te.stopWordFilter,"stopWordFilter-te")}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.th.min.js: -------------------------------------------------------------------------------- 1 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.th=function(){this.pipeline.reset(),this.pipeline.add(e.th.trimmer),r?this.tokenizer=e.th.tokenizer:(e.tokenizer&&(e.tokenizer=e.th.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.th.tokenizer))},e.th.wordCharacters="[฀-๿]",e.th.trimmer=e.trimmerSupport.generateTrimmer(e.th.wordCharacters),e.Pipeline.registerFunction(e.th.trimmer,"trimmer-th");var t=e.wordcut;t.init(),e.th.tokenizer=function(i){if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(t){return r?new e.Token(t):t});var n=i.toString().replace(/^\s+/,"");return t.cut(n).split("|")}}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.vi.min.js: -------------------------------------------------------------------------------- 1 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.vi=function(){this.pipeline.reset(),this.pipeline.add(e.vi.stopWordFilter,e.vi.trimmer)},e.vi.wordCharacters="[A-Za-ẓ̀͐́͑̉̃̓ÂâÊêÔôĂ-ăĐ-đƠ-ơƯ-ư]",e.vi.trimmer=e.trimmerSupport.generateTrimmer(e.vi.wordCharacters),e.Pipeline.registerFunction(e.vi.trimmer,"trimmer-vi"),e.vi.stopWordFilter=e.generateStopWordFilter("là cái nhưng mà".split(" "))}}); -------------------------------------------------------------------------------- /docs/assets/javascripts/lunr/min/lunr.zh.min.js: -------------------------------------------------------------------------------- 1 | !function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r(require("@node-rs/jieba")):r()(e.lunr)}(this,function(e){return function(r,t){if(void 0===r)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===r.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var i="2"==r.version[0];r.zh=function(){this.pipeline.reset(),this.pipeline.add(r.zh.trimmer,r.zh.stopWordFilter,r.zh.stemmer),i?this.tokenizer=r.zh.tokenizer:(r.tokenizer&&(r.tokenizer=r.zh.tokenizer),this.tokenizerFn&&(this.tokenizerFn=r.zh.tokenizer))},r.zh.tokenizer=function(n){if(!arguments.length||null==n||void 0==n)return[];if(Array.isArray(n))return n.map(function(e){return i?new r.Token(e.toLowerCase()):e.toLowerCase()});t&&e.load(t);var o=n.toString().trim().toLowerCase(),s=[];e.cut(o,!0).forEach(function(e){s=s.concat(e.split(" "))}),s=s.filter(function(e){return!!e});var u=0;return s.map(function(e,t){if(i){var n=o.indexOf(e,u),s={};return s.position=[n,e.length],s.index=t,u=n,new r.Token(e,s)}return e})},r.zh.wordCharacters="\\w一-龥",r.zh.trimmer=r.trimmerSupport.generateTrimmer(r.zh.wordCharacters),r.Pipeline.registerFunction(r.zh.trimmer,"trimmer-zh"),r.zh.stemmer=function(){return function(e){return e}}(),r.Pipeline.registerFunction(r.zh.stemmer,"stemmer-zh"),r.zh.stopWordFilter=r.generateStopWordFilter("的 一 不 在 人 有 是 为 為 以 于 於 上 他 而 后 後 之 来 來 及 了 因 下 可 到 由 这 這 与 與 也 此 但 并 並 个 個 其 已 无 無 小 我 们 們 起 最 再 今 去 好 只 又 或 很 亦 某 把 那 你 乃 它 吧 被 比 别 趁 当 當 从 從 得 打 凡 儿 兒 尔 爾 该 該 各 给 給 跟 和 何 还 還 即 几 幾 既 看 据 據 距 靠 啦 另 么 麽 每 嘛 拿 哪 您 凭 憑 且 却 卻 让 讓 仍 啥 如 若 使 谁 誰 虽 雖 随 隨 同 所 她 哇 嗡 往 些 向 沿 哟 喲 用 咱 则 則 怎 曾 至 致 着 著 诸 諸 自".split(" ")),r.Pipeline.registerFunction(r.zh.stopWordFilter,"stopWordFilter-zh")}}); -------------------------------------------------------------------------------- /docs/assets/stylesheets/palette.06af60db.min.css.map: -------------------------------------------------------------------------------- 1 | {"version":3,"sources":["src/templates/assets/stylesheets/palette/_scheme.scss","../../../../src/templates/assets/stylesheets/palette.scss","src/templates/assets/stylesheets/palette/_accent.scss","src/templates/assets/stylesheets/palette/_primary.scss","src/templates/assets/stylesheets/utilities/_break.scss"],"names":[],"mappings":"AA2BA,cAGE,6BAME,sDAAA,CACA,6DAAA,CACA,+DAAA,CACA,gEAAA,CACA,mDAAA,CACA,6DAAA,CACA,+DAAA,CACA,gEAAA,CAGA,mDAAA,CACA,gDAAA,CAGA,0BAAA,CACA,mCAAA,CAGA,iCAAA,CACA,kCAAA,CACA,mCAAA,CACA,mCAAA,CACA,kCAAA,CACA,iCAAA,CACA,+CAAA,CACA,6DAAA,CACA,gEAAA,CACA,4DAAA,CACA,4DAAA,CACA,6DAAA,CAGA,6CAAA,CAGA,+CAAA,CAGA,uDAAA,CACA,6DAAA,CACA,2DAAA,CAGA,iCAAA,CAGA,yDAAA,CACA,iEAAA,CAGA,mDAAA,CACA,mDAAA,CAGA,qDAAA,CACA,uDAAA,CAGA,8DAAA,CAKA,8DAAA,CAKA,0DAAA,CAvEA,iBCeF,CD6DE,kHAEE,YC3DJ,CDkFE,yDACE,4BChFJ,CD+EE,2DACE,4BC7EJ,CD4EE,gEACE,4BC1EJ,CDyEE,2DACE,4BCvEJ,CDsEE,yDACE,4BCpEJ,CDmEE,0DACE,4BCjEJ,CDgEE,gEACE,4BC9DJ,CD6DE,0DACE,4BC3DJ,CD0DE,2OACE,4BC/CJ,CDsDA,+FAGE,iCCpDF,CACF,CC/CE,2BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD2CN,CCrDE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDkDN,CC5DE,8BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDyDN,CCnEE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDgEN,CC1EE,8BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDuEN,CCjFE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD8EN,CCxFE,kCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDqFN,CC/FE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD4FN,CCtGE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDmGN,CC7GE,6BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD0GN,CCpHE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDiHN,CC3HE,4BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCD2HN,CClIE,8BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDkIN,CCzIE,6BACE,yBAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDyIN,CChJE,8BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDgJN,CCvJE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDoJN,CEzJE,4BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsJN,CEjKE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8JN,CEzKE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsKN,CEjLE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8KN,CEzLE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsLN,CEjME,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8LN,CEzME,mCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsMN,CEjNE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8MN,CEzNE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsNN,CEjOE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8NN,CEzOE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsON,CEjPE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFiPN,CEzPE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFyPN,CEjQE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFiQN,CEzQE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFyQN,CEjRE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8QN,CEzRE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsRN,CEjSE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCAAA,CAKA,4BF0RN,CE1SE,kCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCAAA,CAKA,4BFmSN,CEpRE,sEACE,4BFuRJ,CExRE,+DACE,4BF2RJ,CE5RE,iEACE,4BF+RJ,CEhSE,gEACE,4BFmSJ,CEpSE,iEACE,4BFuSJ,CE9RA,8BACE,mDAAA,CACA,4DAAA,CACA,0DAAA,CACA,oDAAA,CACA,2DAAA,CAGA,4BF+RF,CE5RE,yCACE,+BF8RJ,CE3RI,kDAEE,0CAAA,CACA,sCAAA,CAFA,mCF+RN,CG3MI,mCD1EA,+CACE,8CFwRJ,CErRI,qDACE,8CFuRN,CElRE,iEACE,mCFoRJ,CACF,CGtNI,sCDvDA,uCACE,oCFgRJ,CACF,CEvQA,8BACE,kDAAA,CACA,4DAAA,CACA,wDAAA,CACA,oDAAA,CACA,6DAAA,CAGA,4BFwQF,CErQE,yCACE,+BFuQJ,CEpQI,kDAEE,0CAAA,CACA,sCAAA,CAFA,mCFwQN,CEjQE,yCACE,6CFmQJ,CG5NI,0CDhCA,8CACE,gDF+PJ,CACF,CGjOI,0CDvBA,iFACE,6CF2PJ,CACF,CGzPI,sCDKA,uCACE,6CFuPJ,CACF","file":"palette.css"} -------------------------------------------------------------------------------- /docs/dagster/pipelines/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AlexIoannides/notes-and-demos/535e9d35ac19395442e7f92feda10326f4a3b7ad/docs/dagster/pipelines/__init__.py -------------------------------------------------------------------------------- /docs/dagster/pipelines/example_pipeline.py: -------------------------------------------------------------------------------- 1 | """ 2 | Example Dagster solids and pipeline from the Dagster tutorial. 3 | """ 4 | import csv 5 | from typing import Any, Dict, List 6 | 7 | import requests 8 | from dagster import execute_pipeline, pipeline, solid 9 | from dagster.core.execution.context.compute import SolidExecutionContext 10 | 11 | 12 | @solid 13 | def download_data(context: SolidExecutionContext) -> List[Dict[str, Any]]: 14 | """Download dataset.""" 15 | response = requests.get("https://docs.dagster.io/assets/cereal.csv") 16 | lines = response.text.split("\n") 17 | cereals = [row for row in csv.DictReader(lines)] 18 | context.log.info(f"Found {len(cereals)} cereals") 19 | return cereals 20 | 21 | 22 | @solid 23 | def find_max_sugar_cereal( 24 | context: SolidExecutionContext, cereals: List[Dict[str, Any]] 25 | ) -> str: 26 | """Find the product that has the maximum value for sugar content""" 27 | sorted_by_sugar = sorted(cereals, key=lambda cereal: cereal["sugars"]) 28 | max_sugar_cereal = sorted_by_sugar[-1]["name"] 29 | context.log.info(f"{max_sugar_cereal} has the greatest amount of sugar.") 30 | return max_sugar_cereal 31 | 32 | 33 | @pipeline 34 | def cereal_data_pipeline() -> str: 35 | """Compose the end-to-end cereal data pipeline.""" 36 | return find_max_sugar_cereal(download_data()) 37 | 38 | 39 | if __name__ == "__main__": 40 | result = execute_pipeline(cereal_data_pipeline) 41 | -------------------------------------------------------------------------------- /docs/dagster/repository.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module defines the Dagster pipeline repository. 3 | """ 4 | from dagster import repository 5 | 6 | from pipelines.example_pipeline import cereal_data_pipeline 7 | 8 | 9 | @repository 10 | def team_one(): 11 | return { 12 | "pipelines": { 13 | "cereal_data_pipeline": lambda: cereal_data_pipeline 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /docs/dagster/requirements.txt: -------------------------------------------------------------------------------- 1 | dagit==0.12.10 2 | dagster==0.12.10 3 | requests>=2.26.0 4 | pytest==6.2.5 5 | -------------------------------------------------------------------------------- /docs/dagster/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AlexIoannides/notes-and-demos/535e9d35ac19395442e7f92feda10326f4a3b7ad/docs/dagster/tests/__init__.py -------------------------------------------------------------------------------- /docs/dagster/tests/test_data.csv: -------------------------------------------------------------------------------- 1 | name,mfr,type,calories,protein,fat,sodium,fiber,carbo,sugars,potass,vitamins,shelf,weight,cups,rating 2 | 100% Bran,N,C,70,4,1,130,10,5,6,280,25,3,1,0.33,68.402973 3 | 100% Natural Bran,Q,C,120,3,5,15,2,8,8,135,0,3,1,1,33.983679 4 | All-Bran,K,C,70,4,1,260,9,7,5,320,25,3,1,0.33,59.425505 5 | All-Bran with Extra Fiber,K,C,50,4,0,140,14,8,0,330,25,3,1,0.5,93.704912 6 | Almond Delight,R,C,110,2,2,200,1,14,8,-1,25,3,1,0.75,34.384843 7 | Apple Cinnamon Cheerios,G,C,110,2,2,180,1.5,10.5,10,70,25,1,1,0.75,29.509541 8 | Apple Jacks,K,C,110,2,0,125,1,11,14,30,25,2,1,1,33.174094 9 | Basic 4,G,C,130,3,2,210,2,18,8,100,25,3,1.33,0.75,37.038562 10 | Bran Chex,R,C,90,2,1,200,4,15,6,125,25,1,1,0.67,49.120253 11 | Bran Flakes,P,C,90,3,0,210,5,13,5,190,25,3,1,0.67,53.313813 12 | Cap'n'Crunch,Q,C,120,1,2,220,0,12,12,35,25,2,1,0.75,18.042851 13 | Cheerios,G,C,110,6,2,290,2,17,1,105,25,1,1,1.25,50.764999 14 | Cinnamon Toast Crunch,G,C,120,1,3,210,0,13,9,45,25,2,1,0.75,19.823573 15 | Clusters,G,C,110,3,2,140,2,13,7,105,25,3,1,0.5,40.400208 16 | Cocoa Puffs,G,C,110,1,1,180,0,12,13,55,25,2,1,1,22.736446 17 | Corn Chex,R,C,110,2,0,280,0,22,3,25,25,1,1,1,41.445019 18 | Corn Flakes,K,C,100,2,0,290,1,21,2,35,25,1,1,1,45.863324 19 | Corn Pops,K,C,110,1,0,90,1,13,12,20,25,2,1,1,35.782791 20 | Count Chocula,G,C,110,1,1,180,0,12,13,65,25,2,1,1,22.396513 21 | Cracklin' Oat Bran,K,C,110,3,3,140,4,10,7,160,25,3,1,0.5,40.448772 22 | Cream of Wheat (Quick),N,H,100,3,0,80,1,21,0,-1,0,2,1,1,64.533816 23 | Crispix,K,C,110,2,0,220,1,21,3,30,25,3,1,1,46.895644 24 | Crispy Wheat & Raisins,G,C,100,2,1,140,2,11,10,120,25,3,1,0.75,36.176196 25 | Double Chex,R,C,100,2,0,190,1,18,5,80,25,3,1,0.75,44.330856 26 | Froot Loops,K,C,110,2,1,125,1,11,13,30,25,2,1,1,32.207582 27 | Frosted Flakes,K,C,110,1,0,200,1,14,11,25,25,1,1,0.75,31.435973 28 | Frosted Mini-Wheats,K,C,100,3,0,0,3,14,7,100,25,2,1,0.8,58.345141 29 | Fruit & Fibre Dates; Walnuts; and Oats,P,C,120,3,2,160,5,12,10,200,25,3,1.25,0.67,40.917047 30 | Fruitful Bran,K,C,120,3,0,240,5,14,12,190,25,3,1.33,0.67,41.015492 31 | Fruity Pebbles,P,C,110,1,1,135,0,13,12,25,25,2,1,0.75,28.025765 32 | Golden Crisp,P,C,100,2,0,45,0,11,15,40,25,1,1,0.88,35.252444 33 | Golden Grahams,G,C,110,1,1,280,0,15,9,45,25,2,1,0.75,23.804043 34 | Grape Nuts Flakes,P,C,100,3,1,140,3,15,5,85,25,3,1,0.88,52.076897 35 | Grape-Nuts,P,C,110,3,0,170,3,17,3,90,25,3,1,0.25,53.371007 36 | Great Grains Pecan,P,C,120,3,3,75,3,13,4,100,25,3,1,0.33,45.811716 37 | Honey Graham Ohs,Q,C,120,1,2,220,1,12,11,45,25,2,1,1,21.871292 38 | Honey Nut Cheerios,G,C,110,3,1,250,1.5,11.5,10,90,25,1,1,0.75,31.072217 39 | Honey-comb,P,C,110,1,0,180,0,14,11,35,25,1,1,1.33,28.742414 40 | Just Right Crunchy Nuggets,K,C,110,2,1,170,1,17,6,60,100,3,1,1,36.523683 41 | Just Right Fruit & Nut,K,C,140,3,1,170,2,20,9,95,100,3,1.3,0.75,36.471512 42 | Kix,G,C,110,2,1,260,0,21,3,40,25,2,1,1.5,39.241114 43 | Life,Q,C,100,4,2,150,2,12,6,95,25,2,1,0.67,45.328074 44 | Lucky Charms,G,C,110,2,1,180,0,12,12,55,25,2,1,1,26.734515 45 | Maypo,A,H,100,4,1,0,0,16,3,95,25,2,1,1,54.850917 46 | Muesli Raisins; Dates; & Almonds,R,C,150,4,3,95,3,16,11,170,25,3,1,1,37.136863 47 | Muesli Raisins; Peaches; & Pecans,R,C,150,4,3,150,3,16,11,170,25,3,1,1,34.139765 48 | Mueslix Crispy Blend,K,C,160,3,2,150,3,17,13,160,25,3,1.5,0.67,30.313351 49 | Multi-Grain Cheerios,G,C,100,2,1,220,2,15,6,90,25,1,1,1,40.105965 50 | Nut&Honey Crunch,K,C,120,2,1,190,0,15,9,40,25,2,1,0.67,29.924285 51 | Nutri-Grain Almond-Raisin,K,C,140,3,2,220,3,21,7,130,25,3,1.33,0.67,40.692320 52 | Nutri-grain Wheat,K,C,90,3,0,170,3,18,2,90,25,3,1,1,59.642837 53 | Oatmeal Raisin Crisp,G,C,130,3,2,170,1.5,13.5,10,120,25,3,1.25,0.5,30.450843 54 | Post Nat. Raisin Bran,P,C,120,3,1,200,6,11,14,260,25,3,1.33,0.67,37.840594 55 | Product 19,K,C,100,3,0,320,1,20,3,45,100,3,1,1,41.503540 56 | Puffed Rice,Q,C,50,1,0,0,0,13,0,15,0,3,0.5,1,60.756112 57 | Puffed Wheat,Q,C,50,2,0,0,1,10,0,50,0,3,0.5,1,63.005645 58 | Quaker Oat Squares,Q,C,100,4,1,135,2,14,6,110,25,3,1,0.5,49.511874 59 | Quaker Oatmeal,Q,H,100,5,2,0,2.7,-1,-1,110,0,1,1,0.67,50.828392 60 | Raisin Bran,K,C,120,3,1,210,5,14,12,240,25,2,1.33,0.75,39.259197 61 | Raisin Nut Bran,G,C,100,3,2,140,2.5,10.5,8,140,25,3,1,0.5,39.703400 62 | Raisin Squares,K,C,90,2,0,0,2,15,6,110,25,3,1,0.5,55.333142 63 | Rice Chex,R,C,110,1,0,240,0,23,2,30,25,1,1,1.13,41.998933 64 | Rice Krispies,K,C,110,2,0,290,0,22,3,35,25,1,1,1,40.560159 65 | Shredded Wheat,N,C,80,2,0,0,3,16,0,95,0,1,0.83,1,68.235885 66 | Shredded Wheat 'n'Bran,N,C,90,3,0,0,4,19,0,140,0,1,1,0.67,74.472949 67 | Shredded Wheat spoon size,N,C,90,3,0,0,3,20,0,120,0,1,1,0.67,72.801787 68 | Smacks,K,C,110,2,1,70,1,9,15,40,25,2,1,0.75,31.230054 69 | Special K,K,C,110,6,0,230,1,16,3,55,25,1,1,1,53.131324 70 | Strawberry Fruit Wheats,N,C,90,2,0,15,3,15,5,90,25,2,1,1,59.363993 71 | Total Corn Flakes,G,C,110,2,1,200,0,21,3,35,100,3,1,1,38.839746 72 | Total Raisin Bran,G,C,140,3,1,190,4,15,14,230,100,3,1.5,1,28.592785 73 | Total Whole Grain,G,C,100,3,1,200,3,16,3,110,100,3,1,1,46.658844 74 | Triples,G,C,110,2,1,250,0,21,3,60,25,3,1,0.75,39.106174 75 | Trix,G,C,110,1,1,140,0,13,12,25,25,2,1,1,27.753301 76 | Wheat Chex,R,C,100,3,1,230,3,17,3,115,25,1,1,0.67,49.787445 77 | Wheaties,G,C,100,3,1,200,3,17,3,110,25,1,1,1,51.592193 78 | Wheaties Honey Gold,G,C,110,2,1,200,1,16,8,60,25,1,1,0.75,36.187559 79 | -------------------------------------------------------------------------------- /docs/dagster/tests/test_example_pipeline.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for the example pipeline. 3 | """ 4 | from io import BytesIO 5 | from unittest.mock import MagicMock, patch 6 | 7 | from dagster import execute_pipeline, build_solid_context 8 | from dagster.core.execution.context.compute import SolidExecutionContext 9 | from pytest import fixture 10 | from requests import Response 11 | 12 | from pipelines.example_pipeline import cereal_data_pipeline, download_data 13 | 14 | 15 | @fixture(scope="session") 16 | def test_data() -> BytesIO: 17 | with open("tests/test_data.csv", "r+b") as f: 18 | file_bytes = f.read() 19 | return BytesIO(file_bytes) 20 | 21 | 22 | @fixture(scope="session") 23 | def context() -> SolidExecutionContext: 24 | return build_solid_context() 25 | 26 | 27 | @patch("pipelines.example_pipeline.requests") 28 | def test_download_data_downloads_data( 29 | mock_requests: MagicMock, test_data: BytesIO, context: SolidExecutionContext 30 | ): 31 | mock_response = Response() 32 | mock_response.raw = test_data 33 | mock_requests.get.return_value = mock_response 34 | dataset = download_data(context) 35 | assert len(dataset) == 77 36 | assert len(dataset[0].keys()) == 16 37 | 38 | 39 | def test_cereal_data_pipeline(): 40 | result = execute_pipeline(cereal_data_pipeline) 41 | assert result.success 42 | -------------------------------------------------------------------------------- /docs/dagster/workspace.yaml: -------------------------------------------------------------------------------- 1 | load_from: 2 | - python_file: 3 | relative_path: repository.py 4 | executable_path: ".venv/bin/python" 5 | -------------------------------------------------------------------------------- /docs/dvc-pipelines/artefacts/model.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AlexIoannides/notes-and-demos/535e9d35ac19395442e7f92feda10326f4a3b7ad/docs/dvc-pipelines/artefacts/model.joblib -------------------------------------------------------------------------------- /docs/dvc-pipelines/dvc.lock: -------------------------------------------------------------------------------- 1 | schema: '2.0' 2 | stages: 3 | get_data: 4 | cmd: python stages/get_data.py 5 | deps: 6 | - path: stages/get_data.py 7 | md5: 947cf9166abd9bb26357fa15a6e8c1d2 8 | size: 388 9 | outs: 10 | - path: artefacts/dataset.csv 11 | md5: 9ee47ada0122c0951fcc98bc1e26ca50 12 | size: 38932 13 | train_model: 14 | cmd: python stages/train_model.py 15 | deps: 16 | - path: artefacts/dataset.csv 17 | md5: 9ee47ada0122c0951fcc98bc1e26ca50 18 | size: 38932 19 | - path: stages/get_data.py 20 | md5: 947cf9166abd9bb26357fa15a6e8c1d2 21 | size: 388 22 | params: 23 | params.yaml: 24 | train.random_state: 42 25 | outs: 26 | - path: artefacts/model.joblib 27 | md5: 98f7f0dc59a7a416f004a31ab305f320 28 | size: 849 29 | - path: metrics/metrics.json 30 | md5: 45679f988a4725b9e860c5eb0323c4f3 31 | size: 34 32 | -------------------------------------------------------------------------------- /docs/dvc-pipelines/dvc.yaml: -------------------------------------------------------------------------------- 1 | stages: 2 | get_data: 3 | cmd: python stages/get_data.py 4 | deps: 5 | - stages/get_data.py 6 | outs: 7 | - artefacts/dataset.csv 8 | train_model: 9 | cmd: python stages/train_model.py 10 | deps: 11 | - artefacts/dataset.csv 12 | - stages/get_data.py 13 | params: 14 | - train.random_state 15 | outs: 16 | - artefacts/model.joblib 17 | metrics: 18 | - metrics/metrics.json: 19 | cache: false 20 | -------------------------------------------------------------------------------- /docs/dvc-pipelines/params.yaml: -------------------------------------------------------------------------------- 1 | train: 2 | random_state: 42 3 | -------------------------------------------------------------------------------- /docs/dvc-pipelines/requirements.txt: -------------------------------------------------------------------------------- 1 | dvc[s3]==2.9.5 2 | pandas==1.3.5 3 | scikit-learn==1.0.2 4 | joblib==1.1.0 5 | -------------------------------------------------------------------------------- /docs/dvc-pipelines/stages/config.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pipeline stage configuration. 3 | """ 4 | DATASET_FILENAME = "artefacts/dataset.csv" 5 | METRICS_FILENAME = "metrics/metrics.json" 6 | MODEL_FILENAME = "artefacts/model.joblib" 7 | -------------------------------------------------------------------------------- /docs/dvc-pipelines/stages/get_data.py: -------------------------------------------------------------------------------- 1 | """ 2 | Stage that gets a dataset for training a ML model. 3 | """ 4 | import numpy as np 5 | import pandas as pd 6 | 7 | from config import DATASET_FILENAME 8 | 9 | 10 | def run_stage() -> None: 11 | x = np.random.standard_normal(1000) 12 | y = 2.0 * x + 0.1 * np.random.standard_normal(1000) 13 | df = pd.DataFrame({"y": y, "x": x}) 14 | df.to_csv(DATASET_FILENAME, index=False) 15 | 16 | 17 | if __name__ == "__main__": 18 | run_stage() 19 | -------------------------------------------------------------------------------- /docs/dvc-pipelines/stages/train_model.py: -------------------------------------------------------------------------------- 1 | """ 2 | Train regression model on dataset 3 | """ 4 | import joblib 5 | import json 6 | import pandas as pd 7 | import yaml 8 | from sklearn.linear_model import LinearRegression 9 | from sklearn.metrics import mean_absolute_error 10 | from sklearn.model_selection import train_test_split 11 | 12 | from config import DATASET_FILENAME, METRICS_FILENAME, MODEL_FILENAME 13 | 14 | 15 | def run_stage() -> None: 16 | params = yaml.safe_load(open("params.yaml"))["train"] 17 | data = pd.read_csv(DATASET_FILENAME) 18 | X_train, X_test, y_train, y_test = train_test_split( 19 | data[["x"]], data["y"], random_state=params["random_state"] 20 | ) 21 | model = LinearRegression() 22 | model.fit(X_train, y_train) 23 | joblib.dump(model, MODEL_FILENAME) 24 | 25 | y_test_pred = model.predict(X_test) 26 | mae = mean_absolute_error(y_test, y_test_pred) 27 | with open(METRICS_FILENAME, "w") as metrics_file: 28 | json.dump({"MAE": mae}, metrics_file, indent=4) 29 | 30 | 31 | if __name__ == "__main__": 32 | run_stage() 33 | -------------------------------------------------------------------------------- /docs/dvc/datasets/example.csv.dvc: -------------------------------------------------------------------------------- 1 | outs: 2 | - md5: f08bbb05ad36122a35304ed5f7aa5212 3 | size: 35 4 | path: example.csv 5 | -------------------------------------------------------------------------------- /docs/dvc/requirements.txt: -------------------------------------------------------------------------------- 1 | dvc[s3]==2.9.5 2 | pandas==1.3.5 3 | -------------------------------------------------------------------------------- /docs/ibis/geography.sqlite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AlexIoannides/notes-and-demos/535e9d35ac19395442e7f92feda10326f4a3b7ad/docs/ibis/geography.sqlite -------------------------------------------------------------------------------- /docs/ibis/requirements.txt: -------------------------------------------------------------------------------- 1 | ibis-framework[sqlite]==3.0.2 2 | pandas==1.4.3 3 | -------------------------------------------------------------------------------- /docs/jax/requirements.txt: -------------------------------------------------------------------------------- 1 | flax==0.6.1 2 | jax==0.3.24 3 | jaxlib==0.3.24 4 | matplotlib==3.6.2 5 | numpy==1.23.1 6 | pandas==1.4.3 7 | seaborn==0.11.2 8 | tqdm==4.64.0 9 | torch==1.13.0 10 | torchvision==0.14.0 11 | -------------------------------------------------------------------------------- /docs/mlflow/requirements.txt: -------------------------------------------------------------------------------- 1 | jupyter==1.0.0 2 | mlflow==1.12.1 3 | sklearn==1.0.0 4 | pandas==1.1.5 5 | tqdm==4.54.1 6 | numpy==1.19.4 7 | -------------------------------------------------------------------------------- /docs/pydantic/config.yaml: -------------------------------------------------------------------------------- 1 | # example config valyes that we will use Pydantic to validate 2 | 3 | SCHEMA_VERSION: "0.1" 4 | 5 | PROJECT_ID: 012345 6 | PROJECT_ENV: prod 7 | PROJECT_URL: http://foo.com/bar.html 8 | 9 | USER_CERT: 10 | secret_resource_name: http://foo.com/secrets/ 11 | filename: README.md 12 | 13 | USERNAME: 14 | env_var: foo-bar-1 15 | -------------------------------------------------------------------------------- /docs/pydantic/config_schema.py: -------------------------------------------------------------------------------- 1 | """Config file schema defined using Pydantic.""" 2 | 3 | from typing import Literal, Optional 4 | 5 | from pydantic import BaseModel, ConfigDict, FilePath, HttpUrl 6 | 7 | 8 | class _UserCert(BaseModel): 9 | """Schema for User certificate config components.""" 10 | 11 | secret_resource_name: HttpUrl 12 | filename: FilePath 13 | 14 | 15 | class ConfigV1(BaseModel): 16 | """Schema for PROJECT config values.""" 17 | 18 | model_config = ConfigDict( 19 | frozen=True, extra="allow" 20 | ) # make immutable and allow extra fields 21 | 22 | SCHEMA_VERSION: Literal["0.1"] 23 | PROJECT_ID: int 24 | PROJECT_ENV: Literal["dev", "test", "prod"] 25 | USER_CERT: _UserCert 26 | USER_TAG: Optional[str] = None 27 | -------------------------------------------------------------------------------- /docs/pydantic/load_config.py: -------------------------------------------------------------------------------- 1 | """Demoing how to use Pydantic to get schema-valid config from a YAML file.""" 2 | 3 | from pathlib import Path 4 | from pprint import pprint 5 | from typing import Any 6 | 7 | import yaml 8 | from pydantic import ValidationError 9 | 10 | from config_schema import ConfigV1 11 | 12 | 13 | def get_config(file: Path = Path.cwd() / "config.yaml") -> ConfigV1: 14 | """Get validated config as an instance of the data model.""" 15 | with open(file) as f: 16 | raw_config: dict[str, Any] = yaml.safe_load(f) 17 | return ConfigV1(**raw_config) 18 | 19 | 20 | def get_config_as_dict(file: Path = Path.cwd() / "config.yaml") -> dict[str, Any]: 21 | """Get config as a dictionary that has been validated against the data model.""" 22 | with open(file) as f: 23 | raw_config: dict[str, Any] = yaml.safe_load(f) 24 | ConfigV1.model_validate(raw_config) 25 | return raw_config 26 | 27 | 28 | if __name__ == "__main__": 29 | try: 30 | print("\n(1) config as ConfigV1 object:") 31 | print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") 32 | config = get_config() 33 | pprint(config.model_dump(), indent=2) 34 | 35 | print("\n(2) config as dict:") 36 | print("~~~~~~~~~~~~~~~~~~~") 37 | config_dict = get_config_as_dict() 38 | pprint(config_dict, indent=2) 39 | 40 | except ValidationError as e: 41 | print(e) 42 | -------------------------------------------------------------------------------- /docs/pydantic/requirements.txt: -------------------------------------------------------------------------------- 1 | pydantic==2.6.* 2 | PyYAML==6.0.* 3 | -------------------------------------------------------------------------------- /docs/pytorch/requirements.txt: -------------------------------------------------------------------------------- 1 | matplotlib==3.6.2 2 | numpy==1.23.5 3 | pandas==1.5.2 4 | pytorch-lightning==1.8.5 5 | seaborn==0.12.1 6 | torch==1.13.0 7 | torchvision==0.14.0 8 | tqdm==4.64.1 9 | -------------------------------------------------------------------------------- /docs/sitemap.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /docs/sitemap.xml.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AlexIoannides/notes-and-demos/535e9d35ac19395442e7f92feda10326f4a3b7ad/docs/sitemap.xml.gz -------------------------------------------------------------------------------- /docs/spacy/requirements.txt: -------------------------------------------------------------------------------- 1 | en-core-web-sm @ https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.0.0/en_core_web_sm-3.0.0-py3-none-any.whl 2 | spacy==3.0.6 3 | -------------------------------------------------------------------------------- /docs/sqlalchemy/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = migrations 6 | 7 | # template used to generate migration files 8 | # file_template = %%(rev)s_%%(slug)s 9 | 10 | # sys.path path, will be prepended to sys.path if present. 11 | # defaults to the current working directory. 12 | prepend_sys_path = . 13 | 14 | # timezone to use when rendering the date within the migration file 15 | # as well as the filename. 16 | # If specified, requires the python-dateutil library that can be 17 | # installed by adding `alembic[tz]` to the pip requirements 18 | # string value is passed to dateutil.tz.gettz() 19 | # leave blank for localtime 20 | # timezone = 21 | 22 | # max length of characters to apply to the 23 | # "slug" field 24 | # truncate_slug_length = 40 25 | 26 | # set to 'true' to run the environment during 27 | # the 'revision' command, regardless of autogenerate 28 | # revision_environment = false 29 | 30 | # set to 'true' to allow .pyc and .pyo files without 31 | # a source .py file to be detected as revisions in the 32 | # versions/ directory 33 | # sourceless = false 34 | 35 | # version location specification; This defaults 36 | # to migrations/versions. When using multiple version 37 | # directories, initial revisions must be specified with --version-path. 38 | # The path separator used here should be the separator specified by "version_path_separator" 39 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions 40 | 41 | # version path separator; As mentioned above, this is the character used to split 42 | # version_locations. Valid values are: 43 | # 44 | # version_path_separator = : 45 | # version_path_separator = ; 46 | # version_path_separator = space 47 | version_path_separator = os # default: use os.pathsep 48 | 49 | # the output encoding used when revision files 50 | # are written from script.py.mako 51 | # output_encoding = utf-8 52 | 53 | sqlalchemy.url = sqlite:///data.db 54 | 55 | 56 | [post_write_hooks] 57 | # post_write_hooks defines scripts or Python functions that are run 58 | # on newly generated revision scripts. See the documentation for further 59 | # detail and examples 60 | 61 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 62 | # hooks = black 63 | # black.type = console_scripts 64 | # black.entrypoint = black 65 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 66 | 67 | # Logging configuration 68 | [loggers] 69 | keys = root,sqlalchemy,alembic 70 | 71 | [handlers] 72 | keys = console 73 | 74 | [formatters] 75 | keys = generic 76 | 77 | [logger_root] 78 | level = WARN 79 | handlers = console 80 | qualname = 81 | 82 | [logger_sqlalchemy] 83 | level = WARN 84 | handlers = 85 | qualname = sqlalchemy.engine 86 | 87 | [logger_alembic] 88 | level = INFO 89 | handlers = 90 | qualname = alembic 91 | 92 | [handler_console] 93 | class = StreamHandler 94 | args = (sys.stderr,) 95 | level = NOTSET 96 | formatter = generic 97 | 98 | [formatter_generic] 99 | format = %(levelname)-5.5s [%(name)s] %(message)s 100 | datefmt = %H:%M:%S 101 | -------------------------------------------------------------------------------- /docs/sqlalchemy/migrations/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /docs/sqlalchemy/migrations/env.py: -------------------------------------------------------------------------------- 1 | from logging.config import fileConfig 2 | 3 | from sqlalchemy import engine_from_config 4 | from sqlalchemy import pool 5 | 6 | from alembic import context 7 | 8 | # this is the Alembic Config object, which provides 9 | # access to the values within the .ini file in use. 10 | config = context.config 11 | 12 | # Interpret the config file for Python logging. 13 | # This line sets up loggers basically. 14 | fileConfig(config.config_file_name) 15 | 16 | # add your model's MetaData object here 17 | # for 'autogenerate' support 18 | from models import Base 19 | target_metadata = Base.metadata 20 | 21 | # other values from the config, defined by the needs of env.py, 22 | # can be acquired: 23 | # my_important_option = config.get_main_option("my_important_option") 24 | # ... etc. 25 | 26 | 27 | def run_migrations_offline(): 28 | """Run migrations in 'offline' mode. 29 | 30 | This configures the context with just a URL 31 | and not an Engine, though an Engine is acceptable 32 | here as well. By skipping the Engine creation 33 | we don't even need a DBAPI to be available. 34 | 35 | Calls to context.execute() here emit the given string to the 36 | script output. 37 | 38 | """ 39 | url = config.get_main_option("sqlalchemy.url") 40 | context.configure( 41 | url=url, 42 | target_metadata=target_metadata, 43 | literal_binds=True, 44 | dialect_opts={"paramstyle": "named"}, 45 | ) 46 | 47 | with context.begin_transaction(): 48 | context.run_migrations() 49 | 50 | 51 | def run_migrations_online(): 52 | """Run migrations in 'online' mode. 53 | 54 | In this scenario we need to create an Engine 55 | and associate a connection with the context. 56 | 57 | """ 58 | connectable = engine_from_config( 59 | config.get_section(config.config_ini_section), 60 | prefix="sqlalchemy.", 61 | poolclass=pool.NullPool, 62 | ) 63 | 64 | with connectable.connect() as connection: 65 | context.configure( 66 | connection=connection, target_metadata=target_metadata 67 | ) 68 | 69 | with context.begin_transaction(): 70 | context.run_migrations() 71 | 72 | 73 | if context.is_offline_mode(): 74 | run_migrations_offline() 75 | else: 76 | run_migrations_online() 77 | -------------------------------------------------------------------------------- /docs/sqlalchemy/migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /docs/sqlalchemy/migrations/versions/c31efd831ee7_initial_migration.py: -------------------------------------------------------------------------------- 1 | """Initial Migration 2 | 3 | Revision ID: c31efd831ee7 4 | Revises: 5 | Create Date: 2021-09-15 19:37:00.295751 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'c31efd831ee7' 14 | down_revision = None 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('address', 22 | sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), 23 | sa.Column('street', sa.String(), nullable=False), 24 | sa.Column('city', sa.String(), nullable=False), 25 | sa.Column('postcode', sa.String(), nullable=False), 26 | sa.PrimaryKeyConstraint('id') 27 | ) 28 | op.create_table('person', 29 | sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), 30 | sa.Column('address_id', sa.Integer(), nullable=True), 31 | sa.Column('name', sa.String(), nullable=False), 32 | sa.Column('age', sa.Float(), nullable=False), 33 | sa.ForeignKeyConstraint(['address_id'], ['address.id'], ), 34 | sa.PrimaryKeyConstraint('id') 35 | ) 36 | # ### end Alembic commands ### 37 | 38 | 39 | def downgrade(): 40 | # ### commands auto generated by Alembic - please adjust! ### 41 | op.drop_table('person') 42 | op.drop_table('address') 43 | # ### end Alembic commands ### 44 | -------------------------------------------------------------------------------- /docs/sqlalchemy/models.py: -------------------------------------------------------------------------------- 1 | """ 2 | All data Models can be found here. 3 | """ 4 | from typing import Any, Dict 5 | 6 | from sqlalchemy import ( 7 | Column, 8 | Float, 9 | Integer, 10 | String, 11 | ForeignKey 12 | ) 13 | from sqlalchemy.ext.declarative import declarative_base 14 | from sqlalchemy.orm import relationship 15 | 16 | Base = declarative_base() 17 | 18 | 19 | class Person(Base): 20 | """Personal information.""" 21 | 22 | __tablename__ = "person" 23 | 24 | id = Column(Integer, primary_key=True, autoincrement=True) 25 | address_id = Column(Integer, ForeignKey("address.id")) 26 | name = Column(String, nullable=False) 27 | age = Column(Float, nullable=False) 28 | 29 | address = relationship("Address", back_populates="person") 30 | 31 | def dict(self) -> Dict[str, Any]: 32 | return { 33 | "id": self.id, 34 | "address": self.address_id, 35 | "name": self.name, 36 | "age": self.age 37 | } 38 | 39 | 40 | class Address(Base): 41 | """Address information.""" 42 | 43 | __tablename__ = "address" 44 | 45 | id = Column(Integer, primary_key=True, autoincrement=True) 46 | street = Column(String, nullable=False) 47 | city = Column(String, nullable=False) 48 | postcode = Column(String, nullable=False) 49 | 50 | person = relationship("Person", back_populates="address") 51 | 52 | def dict(self) -> Dict[str, Any]: 53 | return { 54 | "id": self.id, 55 | "street": self.street, 56 | "city": self.city, 57 | "postcode": self.postcode 58 | } 59 | 60 | -------------------------------------------------------------------------------- /docs/sqlalchemy/requirements.txt: -------------------------------------------------------------------------------- 1 | SQLAlchemy==1.4.23 2 | alembic==1.7.1 3 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Study Notes & Demos 2 | copyright: Copyright © 2021-2023 Alex Ioannides 3 | repo_url: https://github.com/AlexIoannides/notes-and-demos 4 | repo_name: alexioannides/notes-and-demos 5 | docs_dir: demos 6 | site_dir: docs 7 | 8 | nav: 9 | - About: README.md 10 | - Home @ alexioannides.com: 'https://alexioannides.com' 11 | - Data Science: 12 | - PyTorch: 13 | - pytorch/README.md 14 | - Demo Requirements: pytorch/docs/demo_requirements.md 15 | - Tensor Manipulation: pytorch/tensors.ipynb 16 | - Creating Custom Data Loaders: pytorch/datasets.ipynb 17 | - Linear Regression with SGD: pytorch/linear_regression_sgd.ipynb 18 | - Logistic Regression with SGD: pytorch/logistic_regression_sgd.ipynb 19 | - Deep Learning: pytorch/MNIST.ipynb 20 | - PyTorch Lightning: pytorch/MNIST_pytorch_lightning.ipynb 21 | - JAX: 22 | - jax/README.md 23 | - Demo Requirements: jax/docs/demo_requirements.md 24 | - Introduction to JAX: jax/introduction_to_jax.ipynb 25 | - Linear Regression with SGD: jax/linear_regression.ipynb 26 | - Deep Learning with Flax: jax/mnist_with_flax_and_optax.ipynb 27 | - SpaCy: 28 | - spacy/README.md 29 | - Demo Requirements: spacy/docs/demo_requirements.md 30 | - NLP Pipelines for Beginners: spacy/spacy_101.ipynb 31 | - ML Engineering: 32 | - DVC: 33 | - dvc/README.md 34 | - Demo Requirements: dvc/docs/demo_requirements.md 35 | - Dataset Versioning: dvc/data_and_model_versioning.ipynb 36 | - DVC Pipelines: 37 | - dvc-pipelines/README.md 38 | - Demo Requirements: dvc-pipelines/docs/demo_requirements.md 39 | - Example Pipeline: dvc-pipelines/docs/example_pipeline.md 40 | - Tracking Pipeline Artefacts: dvc-pipelines/dvc_pipelines.ipynb 41 | - MLflow: 42 | - mlflow/README.md 43 | - Demo Requirements: mlflow/docs/demo_requirements.md 44 | - mnaging the ML Lifecycle: mlflow/mlflow_basics.ipynb 45 | - Data Engineering: 46 | - Dagster: 47 | - dagster/README.md 48 | - Demo Requirements: dagster/docs/demo_requirements.md 49 | - Example Pipeline: dagster/docs/example_pipeline.md 50 | - Testing Pipelines: dagster/docs/testing_pipelines.md 51 | - Repos & Worskpaces: dagster/docs/repos_and_workspaces.md 52 | - Ibis: 53 | - ibis/README.md 54 | - Demo Requirements: ibis/docs/demo_requirements.md 55 | - Intertcting with Data: ibis/ibis_introduction.ipynb 56 | - SQL Alchemy: 57 | - sqlalchemy/README.md 58 | - Demo Requirements: sqlalchemy/docs/demo_requirements.md 59 | - Defining Data Models: sqlalchemy/docs/data_models.md 60 | - ORM Basics: sqlalchemy/sql-alchemy-basics.ipynb 61 | - Software Engineering: 62 | - Pydantic: 63 | - pydantic/README.md 64 | - Demo Requirements: pydantic/docs/demo_requirements.md 65 | - Defining a Data Model: pydantic/docs/define_data_model.md 66 | - Validating Runtime Data: pydantic/docs/validating_runtime_data.md 67 | 68 | plugins: 69 | - mkdocs-jupyter: 70 | include: ["*.ipynb"] 71 | 72 | theme: 73 | name: material 74 | palette: 75 | scheme: slate 76 | primary: green 77 | features: 78 | - navigation.indexes 79 | - navigation.instant 80 | - navigation.sections 81 | - navigation.top 82 | icon: 83 | logo: octicons/terminal-24 84 | 85 | markdown_extensions: 86 | - admonition 87 | - attr_list 88 | - def_list 89 | - toc: 90 | permalink: true 91 | toc_depth: 2 92 | - pymdownx.arithmatex: 93 | generic: true 94 | - pymdownx.details 95 | - pymdownx.emoji: 96 | emoji_index: !!python/name:materialx.emoji.twemoji 97 | emoji_generator: !!python/name:materialx.emoji.to_svg 98 | - pymdownx.highlight 99 | - pymdownx.inlinehilite 100 | - pymdownx.snippets 101 | - pymdownx.superfences 102 | - pymdownx.tabbed 103 | - pymdownx.tasklist: 104 | custom_checkbox: true 105 | 106 | extra: 107 | social: 108 | - icon: fontawesome/brands/github 109 | link: https://github.com/AlexIoannides 110 | - icon: fontawesome/brands/twitter 111 | link: https://twitter.com/ioannides_alex 112 | - icon: fontawesome/brands/linkedin 113 | link: https://www.linkedin.com/in/alexioannides/ 114 | -------------------------------------------------------------------------------- /requirements_mkdocs.txt: -------------------------------------------------------------------------------- 1 | mkdocs==1.5.* 2 | mkdocs-jupyter==0.24.* 3 | mkdocs-material==9.5.* 4 | black[jupyter]==24.1.1 5 | --------------------------------------------------------------------------------