├── .coveragerc ├── .gitattributes ├── .github └── workflows │ └── main.yml ├── .gitignore ├── Makefile ├── README.rst ├── about-this-workflow.txt ├── afwf_fts_anything ├── __init__.py ├── _version.py ├── cache.py ├── compat.py ├── dataset.py ├── exc.py ├── handlers │ ├── __init__.py │ └── fts.py ├── helpers.py ├── paths.py ├── setting.py ├── tests │ ├── __init__.py │ └── helper.py └── workflow.py ├── bin ├── automation │ ├── alfred.py │ ├── config.py │ ├── deps.py │ ├── emoji.py │ ├── helpers.py │ ├── logger.py │ ├── paths.py │ ├── runtime.py │ ├── tests.py │ └── venv.py ├── debug_script_filter.py ├── s01_1_virtualenv_venv_create.py ├── s01_2_venv_remove.py ├── s02_1_pip_install.py ├── s02_2_pip_install_dev.py ├── s02_3_pip_install_test.py ├── s02_4_pip_install_doc.py ├── s02_5_pip_install_all.py ├── s02_6_poetry_export.py ├── s02_7_poetry_lock.py ├── s03_1_run_unit_test.py ├── s03_2_run_cov_test.py ├── s05_1_build_wf.py └── s05_2_refresh_code.py ├── codecov.yml ├── docs ├── developer-guide │ ├── 01-About-This-Best-Practice.rst │ ├── 02-Initialize-a-New-Project.rst │ ├── 03-Development-Guide.rst │ ├── 04-Debug-Guide.rst │ ├── 05-Unit-Test-Guide.rst │ └── 06-Release-Guide.rst └── user-guide │ ├── 01-How-it-Works.rst │ ├── 02-How-to-Use.rst │ └── images │ ├── alfred-item.png │ ├── alfred-workflow-configuration.png │ └── alfred-workflow-diagram.png ├── icon.png ├── info.plist ├── main.py ├── poetry.lock ├── poetry.toml ├── pyproject.toml ├── release-history.rst ├── requirements-automation.txt ├── requirements-dev.txt ├── requirements-doc.txt ├── requirements-main.txt ├── requirements-test.txt ├── setup.py └── tests ├── .gitignore ├── all.py ├── handlers ├── .gitignore ├── all.py └── test_handler_fts.py ├── movie-data.json ├── movie-icon └── movie-icon.png ├── movie-setting.json ├── test_dataset.py ├── test_helpers.py ├── test_import.py └── test_setting.py /.coveragerc: -------------------------------------------------------------------------------- 1 | # Coverage.py is a tool for measuring code coverage of Python programs. 2 | # for more info: https://coverage.readthedocs.io/en/latest/config.html 3 | [run] 4 | omit = 5 | afwf_fts_anything/tests/* 6 | afwf_fts_anything/_version.py 7 | afwf_fts_anything/compat.py 8 | 9 | [report] 10 | # Regexes for lines to exclude from consideration 11 | exclude_lines = 12 | # Have to re-enable the standard pragma 13 | pragma: no cover 14 | 15 | # Don't complain about missing debug-only code: 16 | def __repr__ 17 | if self\.debug 18 | 19 | # Don't complain if tests don't hit defensive assertion code: 20 | raise AssertionError 21 | raise NotImplementedError 22 | 23 | # Don't complain if non-runnable code isn't run: 24 | if 0: 25 | if __name__ == .__main__.: 26 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | # comprehensive github action yml reference: https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions 2 | 3 | --- 4 | name: CI 5 | 6 | on: 7 | push: # any push event to master will trigger this 8 | branches: ["main"] 9 | pull_request: # any pull request to master will trigger this 10 | branches: ["main"] 11 | workflow_dispatch: # allows you to manually trigger run 12 | 13 | jobs: 14 | tests: 15 | name: "${{ matrix.os }} Python ${{ matrix.python-version }}" 16 | runs-on: "${{ matrix.os }}" # for all available VM runtime, see this: https://docs.github.com/en/free-pro-team@latest/actions/reference/specifications-for-github-hosted-runners 17 | env: # define environment variables 18 | USING_COVERAGE: "3.7,3.8,3.9,3.10,3.11" 19 | strategy: 20 | matrix: 21 | os: ["ubuntu-latest",] 22 | # os: ["ubuntu-latest", ] # for debug only 23 | python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] 24 | # python-version: ["3.7", ] 25 | steps: 26 | - uses: "actions/checkout@v3" # https://github.com/marketplace/actions/checkout 27 | - uses: "actions/setup-python@v4" # https://github.com/marketplace/actions/setup-python 28 | with: 29 | python-version: "${{ matrix.python-version }}" 30 | 31 | - name: "Install dependencies on Linux" 32 | run: | 33 | set -xe 34 | python -VV 35 | python -m site 36 | python -m pip install --upgrade pip setuptools wheel virtualenv codecov 37 | pip install . 38 | pip install -r requirements-test.txt 39 | 40 | - name: "Run pytest" 41 | run: "python -m pytest tests --cov=afwf_fts_anything" 42 | 43 | - name: "Upload coverage to Codecov" 44 | if: "contains(env.USING_COVERAGE, matrix.python-version)" 45 | uses: "codecov/codecov-action@v3" # https://github.com/marketplace/actions/codecov 46 | with: 47 | fail_ci_if_error: true -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # User Settings 2 | afwf_fts_anything.egg-info/ 3 | afwf_fts_anything-*/ 4 | tmp/ 5 | not-exists-data.json.temp.zip 6 | 7 | .poetry-lock-hash.json 8 | .current-env-name.json 9 | 10 | # Byte-compiled / optimized / DLL files 11 | __pycache__/ 12 | *.py[cod] 13 | *$py.class 14 | 15 | # C extensions 16 | *.so 17 | 18 | # Distribution / packaging 19 | .Python 20 | env/ 21 | build/ 22 | develop-eggs/ 23 | dist/ 24 | downloads/ 25 | eggs/ 26 | .eggs/ 27 | lib/ 28 | lib64/ 29 | parts/ 30 | sdist/ 31 | var/ 32 | *.egg-info/ 33 | .installed.cfg 34 | *.egg 35 | 36 | # PyCharm 37 | .idea/ 38 | 39 | # PyInstaller 40 | # Usually these files are written by a python script from a template 41 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 42 | *.manifest 43 | *.spec 44 | 45 | # Installer logs 46 | pip-log.txt 47 | pip-delete-this-directory.txt 48 | 49 | # Unit test / coverage reports 50 | htmlcov/ 51 | .tox/ 52 | .coverage 53 | .coverage.* 54 | .cache 55 | .pytest_cache/ 56 | nosetests.xml 57 | coverage.xml 58 | *,cover 59 | .hypothesis/ 60 | 61 | # Translations 62 | *.mo 63 | *.pot 64 | 65 | # Django stuff: 66 | *.log 67 | local_settings.py 68 | 69 | # Flask instance folder 70 | instance/ 71 | 72 | # Scrapy stuff: 73 | .scrapy 74 | 75 | # Sphinx documentation 76 | docs/build/ 77 | 78 | # PyBuilder 79 | target/ 80 | 81 | # IPython Notebook 82 | .ipynb_checkpoints 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # celery beat schedule file 88 | celerybeat-schedule 89 | 90 | # dotenv 91 | .env 92 | 93 | # virtualenv 94 | .venv/ 95 | venv/ 96 | ENV/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | 101 | # Rope project settings 102 | .ropeproject 103 | 104 | # ========================= 105 | # Operating System Files 106 | # ========================= 107 | 108 | # OSX 109 | # ========================= 110 | 111 | .DS_Store 112 | .AppleDouble 113 | .LSOverride 114 | 115 | # Thumbnails 116 | ._* 117 | 118 | # Files that might appear in the root of a volume 119 | .DocumentRevisions-V100 120 | .fseventsd 121 | .Spotlight-V100 122 | .TemporaryItems 123 | .Trashes 124 | .VolumeIcon.icns 125 | 126 | # Directories potentially created on remote AFP share 127 | .AppleDB 128 | .AppleDesktop 129 | Network Trash Folder 130 | Temporary Items 131 | .apdisk 132 | 133 | # Windows 134 | # ========================= 135 | 136 | # Windows image file caches 137 | Thumbs.db 138 | ehthumbs.db 139 | 140 | # Folder config file 141 | Desktop.ini 142 | 143 | # Recycle Bin used on file shares 144 | $RECYCLE.BIN/ 145 | 146 | # Windows Installer files 147 | *.cab 148 | *.msi 149 | *.msm 150 | *.msp 151 | 152 | # Windows shortcuts 153 | *.lnk -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | help: ## ** Show this help message 4 | @perl -nle'print $& if m{^[a-zA-Z_-]+:.*?## .*$$}' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-40s\033[0m %s\n", $$1, $$2}' 5 | 6 | 7 | venv-create: ## ** Create Virtual Environment 8 | python ./bin/s01_1_virtualenv_venv_create.py 9 | 10 | 11 | venv-remove: ## ** Remove Virtual Environment 12 | python ./bin/s01_2_venv_remove.py 13 | 14 | 15 | install: ## ** Install main dependencies and Package itself 16 | python ./bin/s02_1_pip_install.py 17 | 18 | 19 | install-dev: ## Install Development Dependencies 20 | python ./bin/s02_2_pip_install_dev.py 21 | 22 | 23 | install-test: ## Install Test Dependencies 24 | python ./bin/s02_3_pip_install_test.py 25 | 26 | 27 | install-doc: ## Install Document Dependencies 28 | python ./bin/s02_4_pip_install_doc.py 29 | 30 | 31 | install-all: ## Install All Dependencies 32 | python ./bin/s02_5_pip_install_all.py 33 | 34 | 35 | poetry-export: ## Export requirements-*.txt from poetry.lock file 36 | python ./bin/s02_6_poetry_export.py 37 | 38 | 39 | poetry-lock: ## Resolve dependencies using poetry, update poetry.lock file 40 | python ./bin/s02_7_poetry_lock.py 41 | 42 | 43 | test: install install-test test-only ## ** Run test 44 | 45 | 46 | test-only: ## Run test without checking test dependencies 47 | ./.venv/bin/python ./bin/s03_1_run_unit_test.py 48 | 49 | 50 | cov: install install-test cov-only ## ** Run code coverage test 51 | 52 | 53 | cov-only: ## Run code coverage test without checking test dependencies 54 | ./.venv/bin/python ./bin/s03_2_run_cov_test.py 55 | 56 | 57 | build-wf: ## ** Build Alfred Workflow release from source code 58 | python ./bin/s05_1_build_wf.py 59 | 60 | 61 | refresh-code: ## ** Refresh Alfred Workflow source code 62 | python ./bin/s05_2_refresh_code.py 63 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://github.com/MacHu-GWU/afwf_fts_anything-project/workflows/CI/badge.svg 2 | :target: https://github.com/MacHu-GWU/afwf_fts_anything-project/actions?query=workflow:CI 3 | 4 | .. image:: https://codecov.io/gh/MacHu-GWU/afwf_fts_anything-project/branch/main/graph/badge.svg 5 | :target: https://codecov.io/gh/MacHu-GWU/afwf_fts_anything-project 6 | 7 | .. image:: https://img.shields.io/badge/release_history--None.svg?style=social 8 | :target: https://github.com/MacHu-GWU/afwf_fts_anything-project/blob/main/release-history.rst 9 | 10 | .. image:: https://img.shields.io/badge/STAR_Me_on_GitHub!--None.svg?style=social 11 | :target: https://github.com/MacHu-GWU/afwf_fts_anything-project 12 | 13 | ------ 14 | 15 | .. image:: https://img.shields.io/badge/Link-GitHub-blue.svg 16 | :target: https://github.com/MacHu-GWU/afwf_fts_anything-project 17 | 18 | .. image:: https://img.shields.io/badge/Link-Submit_Issue-blue.svg 19 | :target: https://github.com/MacHu-GWU/afwf_fts_anything-project/issues 20 | 21 | .. image:: https://img.shields.io/badge/Link-Request_Feature-blue.svg 22 | :target: https://github.com/MacHu-GWU/afwf_fts_anything-project/issues 23 | 24 | .. image:: https://img.shields.io/badge/Link-Download-blue.svg 25 | :target: https://github.com/MacHu-GWU/afwf_fts_anything-project/releases 26 | 27 | 28 | The Alfred Workflow: Full Text Search Anything 29 | ============================================================================== 30 | .. contents:: 31 | :depth: 1 32 | :local: 33 | 34 | 35 | Introduction 36 | ------------------------------------------------------------------------------ 37 | ``afwf_fts_anything`` is an `Alfred Workflow `_ allows you to do full-text search on your own dataset, and use the result to open url, open file, run script, or basically do anything. Typically, you need to setup expansive `elasticsearch `_ server, learn how to do data ingestion, learn search API, and build your own Alfred workflow. ``afwf_fts_anything`` removes all the blockers and let you just focus on your dataset and search configuration. 38 | 39 | **Demo** 40 | 41 | .. image:: https://user-images.githubusercontent.com/6800411/50622795-1fc45580-0ede-11e9-878c-64e2ab6292b1.gif 42 | 43 | Sample Data Set, IMDB Top 250 movies (content of ``movie.json``): 44 | 45 | .. code-block:: javascript 46 | 47 | [ 48 | { 49 | "movie_id": 1, 50 | "title": "The Shawshank Redemption", 51 | "description": "Two imprisoned men bond over a number of years, finding solace and eventual redemption through acts of common decency.", 52 | "genres": "Drama", 53 | "rating": 9.2, 54 | "url": "https://www.imdb.com/title/tt0111161" 55 | }, 56 | { 57 | "movie_id": 2, 58 | "title": "The Godfather", 59 | "description": "The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son.", 60 | "genres": "Crime, Drama", 61 | "rating": 9.2, 62 | "url": "https://www.imdb.com/title/tt0068646" 63 | }, 64 | { 65 | "movie_id": 3, 66 | "title": "The Dark Knight", 67 | "description": "When the menace known as the Joker wreaks havoc and chaos on the people of Gotham, Batman must accept one of the greatest psychological and physical tests of his ability to fight injustice.", 68 | "genres": "Action, Crime, Drama", 69 | "rating": 9.0, 70 | "url": "https://www.imdb.com/title/tt0468569" 71 | }, 72 | { 73 | "movie_id": 4, 74 | "title": "12 Angry Men", 75 | "description": "The jury in a New York City murder trial is frustrated by a single member whose skeptical caution forces them to more carefully consider the evidence before jumping to a hasty verdict.", 76 | "genres": "Crime, Drama", 77 | "rating": 9.0, 78 | "url": "https://www.imdb.com/title/tt0050083" 79 | }, 80 | { 81 | "movie_id": 5, 82 | "title": "Schindler's List", 83 | "description": "In German-occupied Poland during World War II, industrialist Oskar Schindler gradually becomes concerned for his Jewish workforce after witnessing their persecution by the Nazis.", 84 | "genres": "Biography, Drama, History", 85 | "rating": 8.9, 86 | "url": "https://www.imdb.com/title/tt0108052" 87 | }, 88 | { 89 | "movie_id": 6, 90 | "title": "The Lord of the Rings: The Return of the King", 91 | "description": "Gandalf and Aragorn lead the World of Men against Sauron's army to draw his gaze from Frodo and Sam as they approach Mount Doom with the One Ring.", 92 | "genres": "Action, Adventure, Drama", 93 | "rating": 8.9, 94 | "url": "https://www.imdb.com/title/tt0167260" 95 | }, 96 | { 97 | "movie_id": 7, 98 | "title": "Pulp Fiction", 99 | "description": "The lives of two mob hitmen, a boxer, a gangster and his wife, and a pair of diner bandits intertwine in four tales of violence and redemption.", 100 | "genres": "Crime, Drama", 101 | "rating": 8.8, 102 | "url": "https://www.imdb.com/title/tt0110912" 103 | }, 104 | { 105 | "movie_id": 8, 106 | "title": "Fight Club", 107 | "description": "An insomniac office worker and a devil-may-care soap maker form an underground fight club that evolves into much more.", 108 | "genres": "Drama", 109 | "rating": 8.7, 110 | "url": "https://www.imdb.com/title/tt0137523" 111 | }, 112 | { 113 | "movie_id": 9, 114 | "title": "Saving Private Ryan", 115 | "description": "Following the Normandy Landings, a group of U.S. soldiers go behind enemy lines to retrieve a paratrooper whose brothers have been killed in action.", 116 | "genres": "Drama, War", 117 | "rating": 8.6, 118 | "url": "https://www.imdb.com/title/tt0120815" 119 | } 120 | ] 121 | 122 | Sample search settings (content of ``movie-setting.json``): 123 | 124 | .. code-block:: javascript 125 | 126 | { 127 | // define how you want to search this dataset 128 | "fields": [ 129 | { 130 | "name": "movie_id", 131 | "type_is_store": true 132 | }, 133 | { 134 | "name": "title", 135 | "type_is_store": true, 136 | "type_is_ngram_words": true, 137 | "ngram_maxsize": 10, 138 | "ngram_minsize": 2, 139 | "weight": 2.0 140 | }, 141 | { 142 | "name": "description", 143 | "type_is_store": true, 144 | "type_is_phrase": true 145 | }, 146 | { 147 | "name": "genres", 148 | "type_is_store": true, 149 | "type_is_keyword": true, 150 | "keyword_lowercase": true, 151 | "weight": 1.5 152 | }, 153 | { 154 | "name": "rating", 155 | "type_is_store": true, 156 | "type_is_numeric": true, 157 | "is_sortable": true, 158 | "is_sort_ascending": false 159 | } 160 | ], 161 | "title_field": "{title} ({genres}) rate {rating}", // title on Alfred drop down menu 162 | "subtitle_field": "{description}", // subtitle on Alfred drop down menu 163 | "arg_field": "{url}", // argument for other workflow component 164 | "autocomplete_field": "{title}", // tab auto complete behavior 165 | "icon_field": "movie-icon.png" 166 | } 167 | 168 | Note: 169 | 170 | ``afwf_fts_anything`` support comments in json, you don't have to remove it to use. 171 | 172 | 173 | User Guide 174 | ------------------------------------------------------------------------------ 175 | - `How it Works <./docs/user-guide/01-How-it-Works.rst>`_ 176 | - `How to Use <./docs/user-guide/02-How-to-Use.rst>`_ 177 | 178 | 179 | Projects based on ``afwf_fts_anything`` 180 | ------------------------------------------------------------------------------ 181 | - Search `AWS CloudFormation Resource and Property Reference `_, quickly jump to Official AWS CloudFormation Resource and Property Document: https://github.com/MacHu-GWU/alfred-cloudformation-resource-property-ref 182 | - Search `Terraform AWS Resource Reference `_, quickly jump to Official Terraform AWS Resource Document: https://github.com/MacHu-GWU/alfred-terraform-resource-property-ref 183 | - Search `AWS Python Boto3 `_ API Reference: https://github.com/MacHu-GWU/alfred-python-boto3-ref 184 | -------------------------------------------------------------------------------- /about-this-workflow.txt: -------------------------------------------------------------------------------- 1 | afwf_fts_anything is an Alfred Workflow allows you to do full-text search on your own dataset, and use the result to open url, open file, run script, or basically do anything. Typically, you need to setup expansive elasticsearch server, learn how to do data ingestion, learn search API, and build your own Alfred workflow. afwf_fts_anything removes all the blockers and let you just focus on your dataset and search configuration. 2 | 3 | See more at: https://github.com/MacHu-GWU/afwf_fts_anything-project 4 | -------------------------------------------------------------------------------- /afwf_fts_anything/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from ._version import __version__ 4 | 5 | __license__ = "MIT" 6 | __author__ = "Sanhe Hu" 7 | __author_email__ = "husanhe@gmail.com" 8 | __github_username__ = "MacHu-GWU" 9 | __chore__ = "dc2ba0d33e28cbfd762ab8579bcb8483" 10 | 11 | try: 12 | from .workflow import wf 13 | except ImportError: # pragma: no cover 14 | pass -------------------------------------------------------------------------------- /afwf_fts_anything/_version.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.2.1" 2 | 3 | if __name__ == "__main__": # pragma: no cover 4 | print(__version__) 5 | -------------------------------------------------------------------------------- /afwf_fts_anything/cache.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Disk cache for Alfred Workflow. 5 | """ 6 | 7 | from diskcache import Cache 8 | 9 | from .paths import dir_cache 10 | 11 | cache = Cache(dir_cache.abspath) 12 | -------------------------------------------------------------------------------- /afwf_fts_anything/compat.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | try: 4 | from cached_property import cached_property 5 | except ImportError: 6 | from functools import cached_property 7 | -------------------------------------------------------------------------------- /afwf_fts_anything/dataset.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import typing as T 4 | import os 5 | import json 6 | from zipfile import ZipFile 7 | 8 | import attr 9 | from attrs_mate import AttrsClass 10 | from pathlib_mate import Path 11 | import requests 12 | from whoosh import fields, qparser, query, sorting 13 | from whoosh.index import open_dir, create_in, FileIndex 14 | 15 | from .paths import dir_project_home, dir_cache 16 | from .compat import cached_property 17 | from .cache import cache 18 | from .setting import Setting 19 | 20 | 21 | @attr.s 22 | class Dataset(AttrsClass): 23 | """ 24 | A Dataset is a search scope of your full-text-search application. 25 | 26 | It has to have a unique name, which is used in the Alfred Workflow script filter 27 | command to locate the setting and the data. 28 | 29 | It has to have three files in the project home directory ``${HOME}/.alfred-afwf/afwf_fts_anything/``: 30 | 31 | - ``${name}-setting.json``: the setting file, which contains the search setting of this dataset. 32 | - ``${name}-data.json``: the data file, which contains the data you want to search. 33 | this file can be user generated, or downloaded from the internet 34 | - ``${name}-whoosh_index``: the index directory, which contains the whoosh index of this dataset. 35 | the folder is automatically generated based on your setting and data. 36 | - ``${name}-icon``: the icon directory, which contains the icon for Alfred. 37 | """ 38 | # fmt: off 39 | name: str = AttrsClass.ib_str() 40 | path_setting: T.Optional[Path] = AttrsClass.ib_generic(type_=Path, nullable=True, default=None) 41 | path_data: T.Optional[Path] = AttrsClass.ib_generic(type_=Path, nullable=True, default=None) 42 | dir_index: T.Optional[Path] = AttrsClass.ib_generic(type_=Path, nullable=True, default=None) 43 | dir_icon: T.Optional[Path] = AttrsClass.ib_generic(type_=Path, nullable=True, default=None) 44 | # fmt: on 45 | 46 | @property 47 | def _path_setting(self) -> Path: 48 | """ 49 | The path to the setting file. 50 | """ 51 | if self.path_setting is not None: 52 | return self.path_setting 53 | return dir_project_home / f"{self.name}-setting.json" 54 | 55 | @property 56 | def _path_data(self) -> Path: 57 | """ 58 | The path to the data file. 59 | """ 60 | if self.path_data is not None: 61 | return self.path_data 62 | return dir_project_home / f"{self.name}-data.json" 63 | 64 | @property 65 | def _dir_index(self) -> Path: 66 | """ 67 | The path to the whoosh index directory. 68 | """ 69 | if self.dir_index is not None: 70 | return self.dir_index 71 | return dir_project_home / f"{self.name}-whoosh_index" 72 | 73 | @property 74 | def _dir_icon(self) -> Path: 75 | """ 76 | The path to the icon directory. 77 | """ 78 | if self.dir_icon is not None: 79 | return self.dir_icon 80 | return dir_project_home / f"{self.name}-icon" 81 | 82 | @cached_property 83 | def setting(self) -> Setting: 84 | """ 85 | Access the setting object that is parsed from the setting file. 86 | """ 87 | return Setting.from_json_file(self._path_setting) 88 | 89 | @cached_property 90 | def schema(self) -> fields.Schema: 91 | """ 92 | Access the whoosh schema based on the setting. 93 | """ 94 | return self.setting.create_whoosh_schema() 95 | 96 | def download_data(self): # pragma: no cover 97 | """ 98 | Download the data from the internet if 99 | """ 100 | if self.setting.data_url is None: 101 | raise ValueError( 102 | "You cannot download data because 'data_url' " 103 | f"is not defined in the setting file '{self._path_setting}'." 104 | ) 105 | response = requests.get(self.setting.data_url) 106 | 107 | # write to temp file first, then move to the data file for atomic write 108 | if self.setting.data_url.endswith(".zip"): 109 | # download to *.temp.zip first 110 | path_temp = Path(str(self._path_data) + ".temp.zip") 111 | path_temp.write_bytes(response.content) 112 | # unzip to tmp/ folder 113 | dir_temp = path_temp.change(new_basename="tmp") 114 | dir_temp.mkdir_if_not_exists() 115 | with ZipFile(path_temp.abspath, "r") as zf: 116 | zf.extractall(dir_temp.abspath) 117 | # move the data file to the right location 118 | for path in dir_temp.select_by_ext(".json"): 119 | path.moveto( 120 | new_abspath=path_temp.parent.joinpath( 121 | path.relative_to(dir_temp) 122 | ).abspath, 123 | overwrite=True, 124 | ) 125 | # delete the tmp/ folder 126 | dir_temp.remove_if_exists() 127 | else: 128 | path_temp = Path(str(self._path_data) + ".temp") 129 | path_temp.write_text(response.text) 130 | path_temp.moveto(new_abspath=self._path_data, overwrite=True) 131 | 132 | def get_data(self) -> T.List[T.Dict[str, T.Any]]: 133 | """ 134 | Get the data from the data file. If data file does not exist, download it first. 135 | """ 136 | if not self._path_data.exists(): # pragma: no cover 137 | self.download_data() 138 | return json.loads(self._path_data.read_text()) 139 | 140 | def get_index(self) -> FileIndex: 141 | if self._dir_index.exists(): 142 | idx = open_dir(self._dir_index.abspath) 143 | else: 144 | self._dir_index.mkdir(parents=True, exist_ok=True) 145 | idx = create_in(dirname=self._dir_index.abspath, schema=self.schema) 146 | return idx 147 | 148 | def remove_index(self): 149 | """ 150 | Remove the whoosh index diretory. 151 | """ 152 | self._dir_index.remove_if_exists() 153 | 154 | def build_index( 155 | self, 156 | data: T.List[T.Dict[str, T.Any]], 157 | multi_thread: bool = False, 158 | rebuild: bool = False, 159 | ): 160 | if rebuild is True: 161 | self.remove_index() 162 | idx = self.get_index() 163 | if multi_thread: # pragma: no cover 164 | writer = idx.writer(procs=os.cpu_count()) 165 | else: 166 | writer = idx.writer() 167 | 168 | for row in data: 169 | doc = { 170 | field_name: row.get(field_name) 171 | for field_name in self.setting.field_names 172 | } 173 | writer.add_document(**doc) 174 | writer.commit() 175 | 176 | def clear_cache(self): # pragma: no cover 177 | dir_cache.remove_if_exists() 178 | 179 | @cache.memoize(expire=5) 180 | def search(self, query_str, limit=20) -> T.List[dict]: 181 | """ 182 | Use full-text search for result. 183 | """ 184 | idx = self.get_index() 185 | q = query.And( 186 | [ 187 | qparser.MultifieldParser( 188 | self.setting.searchable_fields, 189 | schema=self.schema, 190 | ).parse(query_str), 191 | ] 192 | ) 193 | search_kwargs = dict( 194 | q=q, 195 | limit=limit, 196 | ) 197 | if len(self.setting.sortable_fields): 198 | multi_facet = sorting.MultiFacet() 199 | for field_name in self.setting.sortable_fields: 200 | field = self.setting.fields_mapper[field_name] 201 | multi_facet.add_field(field_name, reverse=not field.is_sort_ascending) 202 | search_kwargs["sortedby"] = multi_facet 203 | 204 | with idx.searcher() as searcher: 205 | doc_list = [hit.fields() for hit in searcher.search(**search_kwargs)] 206 | return doc_list 207 | -------------------------------------------------------------------------------- /afwf_fts_anything/exc.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | class MalformedSettingError(ValueError): 4 | pass 5 | 6 | 7 | class GetDataError(Exception): 8 | pass 9 | 10 | 11 | class BuildIndexError(Exception): 12 | pass 13 | -------------------------------------------------------------------------------- /afwf_fts_anything/handlers/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | -------------------------------------------------------------------------------- /afwf_fts_anything/handlers/fts.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import typing as T 4 | import afwf 5 | import attr 6 | from pathlib_mate import Path 7 | 8 | from ..dataset import Dataset 9 | from ..exc import GetDataError, BuildIndexError 10 | 11 | 12 | @attr.define 13 | class Handler(afwf.Handler): 14 | def build_index(self, dataset: Dataset): 15 | """ 16 | Build whoosh index if not exists. 17 | """ 18 | # if index already exists skip it 19 | if dataset._dir_index.exists() is False: 20 | # try to build index, if anything wrong, clear the whoosh index 21 | try: 22 | data = dataset.get_data() 23 | except Exception as e: # pragma: no cover 24 | raise GetDataError(f"GetDataError, {e}") 25 | try: 26 | dataset.build_index(data) 27 | except Exception as e: # pragma: no cover 28 | dataset._dir_index.remove_if_exists() 29 | raise BuildIndexError(f"BuildIndexError, {e}") 30 | 31 | def main( 32 | self, 33 | dataset_name: str, 34 | query_str: str, 35 | path_setting: T.Optional[Path] = None, 36 | path_data: T.Optional[Path] = None, 37 | dir_index: T.Optional[Path] = None, 38 | ) -> afwf.ScriptFilter: 39 | sf = afwf.ScriptFilter() 40 | 41 | # prompt 42 | if len(query_str) == 0: 43 | item = afwf.Item( 44 | title=f"Full text search {dataset_name!r} dataset", 45 | subtitle=f"Please enter a query ...", 46 | ) 47 | sf.items.append(item) 48 | return sf 49 | 50 | kwargs = dict( 51 | name=dataset_name, 52 | path_setting=path_setting, 53 | path_data=path_data, 54 | dir_index=dir_index, 55 | ) 56 | cleaned_kwargs = {k: v for k, v in kwargs.items() if v is not None} 57 | dataset = Dataset(**cleaned_kwargs) 58 | 59 | if query_str == "?": 60 | item = afwf.Item( 61 | title=f"Open {dataset_name!r} dataset folder location", 62 | subtitle=f"hit 'Enter' to open folder location", 63 | ) 64 | item.set_icon(afwf.IconFileEnum.question) 65 | item.reveal_file_in_finder(dataset._path_setting.abspath) 66 | sf.items.append(item) 67 | return sf 68 | 69 | self.build_index(dataset) 70 | 71 | # happy path 72 | doc_list = dataset.search(query_str) 73 | setting = dataset.setting 74 | for doc in doc_list: 75 | arg = setting.format_arg(doc) 76 | item = afwf.Item( 77 | title=setting.format_title(doc), 78 | subtitle=setting.format_subtitle(doc), 79 | arg=arg, 80 | autocomplete=setting.format_autocomplete(doc), 81 | ) 82 | item.open_url(url=arg) 83 | icon = setting.format_icon(doc) 84 | if icon is not None: 85 | # use absolute path 86 | if icon.startswith("/"): 87 | item.set_icon(icon) 88 | # use relative path 89 | else: 90 | item.set_icon(dataset._dir_icon.joinpath(icon).abspath) 91 | sf.items.append(item) 92 | 93 | # found no result 94 | if len(sf.items) == 0: 95 | item = afwf.Item( 96 | title=f"No result found for query: {query_str!r}", 97 | subtitle="hit 'Tab' to enter a new query", 98 | autocomplete=" ", 99 | ) 100 | item.set_icon(afwf.IconFileEnum.error) 101 | sf.items.append(item) 102 | 103 | return sf 104 | 105 | def parse_query(self, query: str): 106 | afwf.log_debug_info(f"receive query: {query!r}") 107 | # strip all delimiter except ?, because ? is used for special action 108 | q = afwf.QueryParser(delimiter=list(" ~`!@#$%^&*()-_=+{}[]\\|:;\"'<>,./")).parse(query) 109 | afwf.log_debug_info(f"trimmed parts: {q.trimmed_parts}") 110 | dataset_name = q.trimmed_parts[0] 111 | query_str = " ".join(q.trimmed_parts[1:]) 112 | return dict( 113 | dataset_name=dataset_name, 114 | query_str=query_str, 115 | ) 116 | 117 | 118 | handler = Handler(id="fts") 119 | -------------------------------------------------------------------------------- /afwf_fts_anything/helpers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import typing as T 4 | 5 | 6 | def is_no_overlap(list_of_container: T.List[list]) -> bool: 7 | """ 8 | Test if there's no common item in several set. 9 | """ 10 | return ( 11 | sum([len(container) for container in list_of_container]) 12 | == len(set.union(*[set(container) for container in list_of_container])) 13 | ) 14 | -------------------------------------------------------------------------------- /afwf_fts_anything/paths.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from pathlib_mate import Path 4 | 5 | dir_python_lib = Path.dir_here(__file__) 6 | dir_project_root = dir_python_lib.parent 7 | 8 | PACKAGE_NAME = dir_python_lib.basename 9 | 10 | # ------------------------------------------------------------------------------ 11 | # Alfred Related 12 | # ------------------------------------------------------------------------------ 13 | dir_home = Path.home() 14 | dir_project_home = dir_home / ".alfred-afwf" / PACKAGE_NAME 15 | dir_project_home.mkdir_if_not_exists() 16 | 17 | dir_cache = dir_project_home / ".cache" 18 | 19 | # ------------------------------------------------------------------------------ 20 | # Virtual Environment Related 21 | # ------------------------------------------------------------------------------ 22 | dir_venv = dir_project_root / ".venv" 23 | dir_venv_bin = dir_venv / "bin" 24 | 25 | # virtualenv executable paths 26 | bin_pytest = dir_venv_bin / "pytest" 27 | 28 | # test related 29 | dir_htmlcov = dir_project_root / "htmlcov" 30 | path_cov_index_html = dir_htmlcov / "index.html" 31 | dir_unit_test = dir_project_root / "tests" 32 | dir_int_test = dir_project_root / "tests_int" 33 | 34 | path_setting = dir_unit_test / "movie-setting.json" 35 | path_data = dir_unit_test / "movie-data.json" 36 | dir_index = dir_unit_test / "movie-whoosh_index" 37 | dir_icon = dir_unit_test / "movie-icon" 38 | -------------------------------------------------------------------------------- /afwf_fts_anything/setting.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | This module implements the abstraction of the dataset settings, and convert the 5 | settings to ``whoosh.fields.Schema`` Class. 6 | """ 7 | 8 | import typing as T 9 | from collections import OrderedDict 10 | 11 | import re 12 | import afwf 13 | import attr 14 | from attrs_mate import AttrsClass 15 | from pathlib_mate import Path 16 | from superjson import json 17 | import whoosh.fields 18 | 19 | from .helpers import is_no_overlap 20 | from .compat import cached_property 21 | from .exc import MalformedSettingError 22 | 23 | 24 | @attr.s 25 | class Field(AttrsClass): 26 | """ 27 | Defines how do you want to store / index this field for full text search: 28 | 29 | :param name: the name of the field 30 | :param type_is_store: if True, the value is only stored but not indexed for 31 | search. Usually it can be used to dynamically construct value for argument 32 | (the action when you press enter), or for auto complete (the action 33 | when you press tab) 34 | :param type_is_ngram: if True, the value is index using ngram. It matches 35 | any character shorter than N characters. 36 | https://whoosh.readthedocs.io/en/latest/ngrams.html. 37 | :param type_is_ngram_words: similar to type_is_ngram, but it tokenizes 38 | text into words before index. It matches any character shorter than N characters. 39 | https://whoosh.readthedocs.io/en/latest/api/fields.html#whoosh.fields.NGRAMWORDS. 40 | :param type_is_phrase: if True, the value is indexed using phrase. Only 41 | case-insensitive phrase will be matched. See 42 | https://whoosh.readthedocs.io/en/latest/schema.html#built-in-field-types 43 | :param type_is_keyword: if True, the value is indexed using keyword. The 44 | keyword has to be exactly matched. See 45 | https://whoosh.readthedocs.io/en/latest/schema.html#built-in-field-types 46 | :param type_is_numeric: if True, the value is indexed using number. The 47 | number field is not used for searching, it is only used for sorting. See 48 | https://whoosh.readthedocs.io/en/latest/schema.html#built-in-field-types 49 | :param ngram_minsize: minimal number of character to match, default is 2. 50 | :param ngram_maxsize: maximum number of character to match, default is 10. 51 | :param keyword_lowercase: for keyword type field, is the match case-sensitive? 52 | default True (not sensitive). 53 | :param keyword_commas: is the delimiter of keyword is comma or space? 54 | :param weight: the weight of the field for sorting in the search result. 55 | default is 1.0. 56 | :param is_sortable: is the field will be used for sorting? If True, the field 57 | has to be stored. 58 | :param is_sort_ascending: is the field will be used for sort ascending? 59 | """ 60 | 61 | name: str = attr.ib() 62 | type_is_store: bool = attr.ib(default=False) 63 | type_is_ngram: bool = attr.ib(default=False) 64 | type_is_ngram_words: bool = attr.ib(default=False) 65 | type_is_phrase: bool = attr.ib(default=False) 66 | type_is_keyword: bool = attr.ib(default=False) 67 | type_is_numeric: bool = attr.ib(default=False) 68 | ngram_minsize: bool = attr.ib(default=2) 69 | ngram_maxsize: bool = attr.ib(default=10) 70 | keyword_lowercase: bool = attr.ib(default=True) 71 | keyword_commas: bool = attr.ib(default=True) 72 | weight: float = attr.ib(default=1.0) 73 | is_sortable: bool = attr.ib(default=False) 74 | is_sort_ascending: bool = attr.ib(default=True) 75 | 76 | def __attrs_post_init__(self): 77 | # do some validation 78 | flag = sum( 79 | [ 80 | self.type_is_ngram, 81 | self.type_is_ngram_words, 82 | self.type_is_phrase, 83 | self.type_is_keyword, 84 | self.type_is_numeric, 85 | ] 86 | ) 87 | if flag <= 1: 88 | pass 89 | else: 90 | msg = ( 91 | f"you have to specify one and only one index type for column {self.name!r}, " 92 | f"valid types are: ngram, ngram_words, phrase, keyword, numeric." 93 | ) 94 | raise MalformedSettingError(msg) 95 | 96 | if self.is_sortable is True and self.type_is_store is False: 97 | msg = f"you have to use store field for sorting by {self.name!r}!" 98 | raise MalformedSettingError(msg) 99 | 100 | 101 | p = re.compile(r"\{([A-Za-z0-9_]+)\}") 102 | 103 | 104 | @attr.s 105 | class Setting(AttrsClass): 106 | """ 107 | Defines how you want to index your dataset. 108 | 109 | :param fields: list of :class:`Field` objects, defines how you want to search. 110 | :param title_field: which field is used as ``WorkflowItem.title``. It displays 111 | as the big title in alfred drop down menu. 112 | :param subtitle_field: which field is used as ``WorkflowItem.subtitle``. 113 | :param arg_field: which field is used as ``WorkflowItem.arg``. 114 | :param autocomplete_field: which field is used as ``WorkflowItem.autocomplete``. 115 | :param icon_field: which field is used as ``WorkflowItem.icon``. 116 | 117 | :param data_url: the url of the data set json, it can be a local file path or 118 | :param skip_post_init: implementation reserved attribute. 119 | """ 120 | 121 | fields: T.List[Field] = Field.ib_list_of_nested() 122 | 123 | title_field: T.Optional[str] = AttrsClass.ib_str(default=None) 124 | subtitle_field: T.Optional[str] = AttrsClass.ib_str(default=None) 125 | arg_field: T.Optional[str] = AttrsClass.ib_str(default=None) 126 | autocomplete_field: T.Optional[str] = AttrsClass.ib_str(default=None) 127 | icon_field: T.Optional[str] = AttrsClass.ib_str(default=None) 128 | 129 | data_url: T.Optional[str] = AttrsClass.ib_str(default=None) 130 | 131 | skip_post_init = attr.ib(default=False) 132 | 133 | def _check_fields_name(self): 134 | if len(set(self.field_names)) != len(self.fields): 135 | msg = f"you have duplicate field names in your fields: {self.field_names}" 136 | raise MalformedSettingError(msg) 137 | 138 | def _check_fields_index_type(self): # pragma: no cover 139 | if not is_no_overlap( 140 | [ 141 | self.ngram_fields, 142 | self.phrase_fields, 143 | self.keyword_fields, 144 | ] 145 | ): 146 | msg = ( 147 | "`ngram_fields`, `phrase_fields` and `keyword_fields` " 148 | "should not have any overlaps!" 149 | ) 150 | raise MalformedSettingError(msg) 151 | 152 | def _check_title_field(self): 153 | if self.title_field is None: 154 | if "title" in self.field_names: 155 | if self.fields_mapper["title"].type_is_store is False: 156 | msg = "the title field is not a stored field!" 157 | raise MalformedSettingError(msg) 158 | else: 159 | msg = ( 160 | f"when title_field is not defined, " 161 | f"you have to have a field called 'title' in your data fields, " 162 | f"here's your data fields: {self.field_names}" 163 | ) 164 | raise MalformedSettingError(msg) 165 | else: 166 | for key in re.findall(p, self.title_field): 167 | if key in self.fields_mapper: 168 | if self.fields_mapper[key].type_is_store is False: 169 | msg = ( 170 | f"your title_field = {self.title_field!r} " 171 | f"contains a field name {key!r}, " 172 | f"but this field is not stored: {self.fields_mapper[key]}" 173 | ) 174 | raise MalformedSettingError(msg) 175 | else: 176 | msg = ( 177 | f"your title_field = {self.title_field!r} " 178 | f"contains a field name {key!r}, " 179 | f"but it is not defined in your fields: {self.field_names}" 180 | ) 181 | raise MalformedSettingError(msg) 182 | 183 | def __attrs_post_init__(self): 184 | # do some validation 185 | if self.skip_post_init is False: 186 | self._check_fields_name() 187 | self._check_fields_index_type() 188 | self._check_title_field() 189 | 190 | @cached_property 191 | def fields_mapper(self) -> T.Dict[str, Field]: 192 | return {field.name: field for field in self.fields} 193 | 194 | @cached_property 195 | def store_fields(self) -> T.List[str]: 196 | return [field.name for field in self.fields if field.type_is_store] 197 | 198 | @cached_property 199 | def ngram_fields(self) -> T.List[str]: 200 | return [field.name for field in self.fields if field.type_is_ngram or field.type_is_ngram_words] 201 | 202 | @cached_property 203 | def phrase_fields(self) -> T.List[str]: 204 | return [field.name for field in self.fields if field.type_is_phrase] 205 | 206 | @cached_property 207 | def keyword_fields(self) -> T.List[str]: 208 | return [field.name for field in self.fields if field.type_is_keyword] 209 | 210 | @cached_property 211 | def numeric_fields(self) -> T.List[str]: 212 | return [field.name for field in self.fields if field.type_is_numeric] 213 | 214 | @cached_property 215 | def searchable_fields(self) -> T.List[str]: 216 | return ( 217 | self.ngram_fields 218 | + self.phrase_fields 219 | + self.keyword_fields 220 | + self.numeric_fields 221 | ) 222 | 223 | @cached_property 224 | def sortable_fields(self) -> T.List[str]: 225 | return [field.name for field in self.fields if field.is_sortable] 226 | 227 | @property 228 | def field_names(self) -> T.List[str]: 229 | return [field.name for field in self.fields] 230 | 231 | @classmethod 232 | def from_json_file(cls, path: T.Union[str, Path]) -> "Setting": # pragma: no cover 233 | return cls.from_dict( 234 | json.loads( 235 | Path(path).read_text(), 236 | ignore_comments=True, 237 | ) 238 | ) 239 | 240 | def create_whoosh_schema(self) -> whoosh.fields.Schema: 241 | """ 242 | Dynamically create whoosh.fields.SchemaClass schema object. 243 | It defines how you index your dataset. 244 | """ 245 | schema_classname = "WhooshSchema" 246 | schema_classname = str(schema_classname) 247 | attrs = OrderedDict() 248 | for field in self.fields: 249 | if field.type_is_ngram: 250 | whoosh_field = whoosh.fields.NGRAM( 251 | stored=field.type_is_store, 252 | minsize=field.ngram_minsize, 253 | maxsize=field.ngram_maxsize, 254 | field_boost=field.weight, 255 | sortable=field.is_sortable, 256 | ) 257 | elif field.type_is_ngram_words: 258 | whoosh_field = whoosh.fields.NGRAMWORDS( 259 | stored=field.type_is_store, 260 | minsize=field.ngram_minsize, 261 | maxsize=field.ngram_maxsize, 262 | field_boost=field.weight, 263 | sortable=field.is_sortable, 264 | ) 265 | elif field.type_is_phrase: 266 | whoosh_field = whoosh.fields.TEXT( 267 | stored=field.type_is_store, 268 | field_boost=field.weight, 269 | sortable=field.is_sortable, 270 | ) 271 | elif field.type_is_keyword: 272 | whoosh_field = whoosh.fields.KEYWORD( 273 | stored=field.type_is_store, 274 | lowercase=field.keyword_lowercase, 275 | commas=field.keyword_commas, 276 | field_boost=field.weight, 277 | sortable=field.is_sortable, 278 | ) 279 | elif field.type_is_numeric: 280 | whoosh_field = whoosh.fields.NUMERIC( 281 | stored=field.type_is_store, 282 | field_boost=field.weight, 283 | sortable=field.is_sortable, 284 | ) 285 | elif field.type_is_store: 286 | whoosh_field = whoosh.fields.STORED() 287 | else: # pragma: no cover 288 | raise NotImplementedError 289 | attrs[field.name] = whoosh_field 290 | SchemaClass = type(schema_classname, (whoosh.fields.SchemaClass,), attrs) 291 | schema = SchemaClass() 292 | return schema 293 | 294 | def format_title(self, data: T.Dict[str, T.Any]) -> str: # pragma: no cover 295 | if self.title_field is None: 296 | return data.get("title") 297 | else: 298 | return self.title_field.format(**data) 299 | 300 | def format_subtitle( 301 | self, data: T.Dict[str, T.Any] 302 | ) -> T.Optional[str]: # pragma: no cover 303 | if self.subtitle_field is None: 304 | return data.get("subtitle") 305 | else: 306 | return self.subtitle_field.format(**data) 307 | 308 | def format_arg( 309 | self, data: T.Dict[str, T.Any] 310 | ) -> T.Optional[str]: # pragma: no cover 311 | if self.arg_field is None: 312 | return data.get("arg") 313 | else: 314 | return self.arg_field.format(**data) 315 | 316 | def format_autocomplete( 317 | self, data: T.Dict[str, T.Any] 318 | ) -> T.Optional[str]: # pragma: no cover 319 | if self.autocomplete_field is None: 320 | return data.get("autocomplete") 321 | else: 322 | return self.autocomplete_field.format(**data) 323 | 324 | def format_icon( 325 | self, data: T.Dict[str, T.Any] 326 | ) -> T.Optional[str]: # pragma: no cover 327 | if self.icon_field is None: 328 | return data.get("icon") 329 | else: 330 | return self.icon_field.format(**data) 331 | -------------------------------------------------------------------------------- /afwf_fts_anything/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from .helper import run_cov_test 4 | -------------------------------------------------------------------------------- /afwf_fts_anything/tests/helper.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import subprocess 4 | 5 | from ..paths import dir_project_root, dir_htmlcov, path_cov_index_html, bin_pytest 6 | 7 | 8 | def _run_cov_test( 9 | bin_pytest: str, 10 | script: str, 11 | module: str, 12 | root_dir: str, 13 | htmlcov_dir: str, 14 | ): 15 | """ 16 | A simple wrapper around pytest + coverage cli command. 17 | 18 | :param bin_pytest: the path to pytest executable 19 | :param script: the path to test script 20 | :param module: the dot notation to the python module you want to calculate 21 | coverage 22 | :param root_dir: the dir to dump coverage results binary file 23 | :param htmlcov_dir: the dir to dump HTML output 24 | """ 25 | args = [ 26 | bin_pytest, 27 | "-s", "--tb=native", 28 | f"--rootdir={root_dir}", 29 | f"--cov={module}", 30 | "--cov-report", "term-missing", 31 | "--cov-report", f"html:{htmlcov_dir}", 32 | script, 33 | ] 34 | subprocess.run(args) 35 | 36 | 37 | def run_cov_test(script: str, module: str, preview: bool=False): 38 | _run_cov_test( 39 | bin_pytest=f"{bin_pytest}", 40 | script=script, 41 | module=module, 42 | root_dir=f"{dir_project_root}", 43 | htmlcov_dir=f"{dir_htmlcov}", 44 | ) 45 | if preview: 46 | subprocess.run(["open", f"{path_cov_index_html}"]) 47 | -------------------------------------------------------------------------------- /afwf_fts_anything/workflow.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import afwf 4 | 5 | from .handlers import ( 6 | fts, 7 | ) 8 | 9 | wf = afwf.Workflow() 10 | wf.register(fts.handler) 11 | -------------------------------------------------------------------------------- /bin/automation/alfred.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import subprocess 4 | from .config import config 5 | from .paths import ( 6 | PACKAGE_NAME, 7 | bin_pip, 8 | dir_project_root, 9 | path_git_repo_main_py, 10 | path_git_repo_info_plist, 11 | ) 12 | from .deps import _try_poetry_export 13 | from .logger import logger 14 | 15 | path_workflow_info_plist = config.dir_workflow / "info.plist" 16 | path_workflow_main_py = config.dir_workflow / "main.py" 17 | dir_workflow_lib = config.dir_workflow / "lib" 18 | 19 | 20 | @logger.pretty_log() 21 | def build_wf(): 22 | """ 23 | Build Alfred Workflow release from source code. Basically it creates: 24 | 25 | - user.workflow.../main.py 26 | - user.workflow.../lib 27 | - ${dir_project_root}/info.plist 28 | """ 29 | # delete user.workflow.../main.py 30 | path_workflow_main_py.remove_if_exists() 31 | # delete user.workflow.../lib 32 | dir_workflow_lib.remove_if_exists() 33 | # delete ${dir_project_roo}/info.plist 34 | path_git_repo_info_plist.remove_if_exists() 35 | 36 | # create user.workflow.../main.py 37 | path_git_repo_main_py.copyto(path_workflow_main_py) 38 | 39 | # create user.workflow.../lib/ 40 | _try_poetry_export() 41 | with dir_project_root.temp_cwd(): 42 | args = [ 43 | f"{bin_pip}", 44 | "install", 45 | f"{dir_project_root}", 46 | f"--target={dir_workflow_lib}", 47 | ] 48 | subprocess.run(args) 49 | 50 | # create info.plist 51 | path_workflow_info_plist.copyto(path_git_repo_info_plist) 52 | 53 | 54 | @logger.pretty_log() 55 | def refresh_code(): 56 | """ 57 | This shell script only re-build the main.py and the source code 58 | to Alfred Workflow preference directory, without install any dependencies 59 | 60 | It allows developer to quickly test the latest code with real Alfred UI 61 | You should run this script everything you update your source code 62 | """ 63 | # delete user.workflow.../main.py 64 | path_workflow_main_py.remove_if_exists() 65 | # delete user.workflow.../lib/${PACKAGE_NAME}/ 66 | dir_workflow_lib.joinpath(PACKAGE_NAME).remove_if_exists() 67 | # delete user.workflow.../lib/${PACKAGE_NAME}-${VERSION}.dist-info/ 68 | for p in dir_workflow_lib.iterdir(): 69 | if p.basename.startswith(f"{PACKAGE_NAME}-") and p.basename.endswith( 70 | ".dist-info" 71 | ): 72 | p.remove_if_exists() 73 | 74 | # create user.workflow.../main.py 75 | path_git_repo_main_py.copyto(path_workflow_main_py) 76 | _try_poetry_export() 77 | with dir_project_root.temp_cwd(): 78 | args = [ 79 | f"{bin_pip}", 80 | "install", 81 | f"{dir_project_root}", 82 | "--no-dependencies", 83 | f"--target={dir_workflow_lib}", 84 | ] 85 | subprocess.run(args) 86 | 87 | 88 | @logger.pretty_log( 89 | start_msg="🪲 Debug Script Filter", 90 | end_msg="🪲 End 'Debug Script Filter', elapsed = {elapsed} sec", 91 | ) 92 | def debug( 93 | bin_python: str, 94 | handler_id: str, 95 | query: str, 96 | ): 97 | """ 98 | This is a utility function to debug an Alfred Workflow. 99 | 100 | :param bin_python: 101 | :param handler_id: 102 | :param query: 103 | """ 104 | args = [ 105 | bin_python, 106 | "main.py", 107 | f"{handler_id} {query}", 108 | ] 109 | cmd = " ".join(args) 110 | logger.info(f"run: {cmd}") 111 | with config.dir_workflow.temp_cwd(): 112 | subprocess.run(args) 113 | -------------------------------------------------------------------------------- /bin/automation/config.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Automation config management. 5 | """ 6 | 7 | import dataclasses 8 | from pathlib_mate import Path 9 | 10 | 11 | @dataclasses.dataclass 12 | class AutomationConfig: 13 | python_version: str = dataclasses.field() 14 | dir_workflow: Path = dataclasses.field() 15 | 16 | 17 | config = AutomationConfig( 18 | python_version="3.8", 19 | dir_workflow=Path( 20 | "/Users/sanhehu/Documents/Alfred-Setting/Alfred.alfredpreferences/workflows/user.workflow.029AD850-D41F-4B53-B495-35061A408298", 21 | ), 22 | ) 23 | -------------------------------------------------------------------------------- /bin/automation/deps.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Dependencies management 5 | """ 6 | 7 | import typing as T 8 | import json 9 | import subprocess 10 | from pathlib_mate import Path 11 | 12 | from .paths import ( 13 | dir_project_root, 14 | bin_pip, 15 | path_requirements_main, 16 | path_requirements_dev, 17 | path_requirements_test, 18 | path_requirements_doc, 19 | path_requirements_automation, 20 | path_poetry_lock, 21 | path_poetry_lock_hash_json, 22 | ) 23 | from .logger import logger 24 | from .helpers import sha256_of_bytes 25 | from .runtime import IS_CI 26 | 27 | 28 | @logger.pretty_log( 29 | start_msg="⏱ 💾 Install main dependencies and Package itself", 30 | end_msg="⏰ End 'Install main dependencies and Package itself', elapsed = {elapsed} sec", 31 | ) 32 | def poetry_install(): 33 | """ 34 | ``poetry install`` 35 | """ 36 | subprocess.run(["poetry", "install"], check=True) 37 | 38 | 39 | @logger.pretty_log( 40 | start_msg="⏱ 💾 Install dev dependencies", 41 | end_msg="⏰ End 'Install dev dependencies', elapsed = {elapsed} sec", 42 | ) 43 | def poetry_install_dev(): 44 | """ 45 | ``poetry install --with dev`` 46 | """ 47 | subprocess.run(["poetry", "install", "--with", "dev"], check=True) 48 | 49 | 50 | @logger.pretty_log( 51 | start_msg="⏱ 💾 Install test dependencies", 52 | end_msg="⏰ End 'Install test dependencies', elapsed = {elapsed} sec", 53 | ) 54 | def poetry_install_test(): 55 | """ 56 | ``poetry install --with test`` 57 | """ 58 | subprocess.run(["poetry", "install", "--with", "test"], check=True) 59 | 60 | 61 | @logger.pretty_log( 62 | start_msg="⏱ 💾 Install doc dependencies", 63 | end_msg="⏰ End 'Install doc dependencies', elapsed = {elapsed} sec", 64 | ) 65 | def poetry_install_doc(): 66 | """ 67 | ``poetry install --with doc`` 68 | """ 69 | subprocess.run(["poetry", "install", "--with", "doc"], check=True) 70 | 71 | 72 | @logger.pretty_log( 73 | start_msg="⏱ 💾 Install all dependencies for dev, test, doc", 74 | end_msg="⏰ End 'Install all dependencies for dev, test, doc', elapsed = {elapsed} sec", 75 | ) 76 | def poetry_install_all(): 77 | """ 78 | ``poetry install --with dev,test,doc`` 79 | """ 80 | subprocess.run(["poetry", "install", "--with", "dev,test,doc"], check=True) 81 | 82 | 83 | def do_we_need_poetry_export(poetry_lock_hash: str) -> bool: 84 | """ 85 | Compare the given poetry.lock file cache to the value stored in the 86 | ``poetry.lock.hash`` file. If matches, then we don't need to do poetry export. 87 | Otherwise, we should do poetry export. 88 | 89 | :param poetry_lock_hash: the sha256 hash of the ``poetry.lock`` file 90 | """ 91 | if path_poetry_lock_hash_json.exists(): 92 | cached_poetry_lock_hash = json.loads(path_poetry_lock_hash_json.read_text())[ 93 | "hash" 94 | ] 95 | return poetry_lock_hash != cached_poetry_lock_hash 96 | else: 97 | return True 98 | 99 | 100 | def _poetry_export_group(group: str, path: Path): 101 | """ 102 | Export dependency group to given path. Usually a requirements.txt file. 103 | """ 104 | subprocess.run( 105 | [ 106 | "poetry", 107 | "export", 108 | "--format", 109 | "requirements.txt", 110 | "--output", 111 | f"{path}", 112 | "--only", 113 | group, 114 | ], 115 | check=True, 116 | ) 117 | 118 | 119 | def _poetry_export(poetry_lock_hash: str): 120 | path_list = [ 121 | path_requirements_main, 122 | path_requirements_dev, 123 | path_requirements_test, 124 | path_requirements_doc, 125 | ] 126 | for path in path_list: 127 | path.remove_if_exists() 128 | 129 | logger.info(f"export to {path_requirements_main.name}") 130 | subprocess.run( 131 | [ 132 | "poetry", 133 | "export", 134 | "--format", 135 | "requirements.txt", 136 | "--output", 137 | f"{path_requirements_main}", 138 | ], 139 | check=True, 140 | ) 141 | 142 | for group, path in [ 143 | ("dev", path_requirements_dev), 144 | ("test", path_requirements_test), 145 | ("doc", path_requirements_doc), 146 | ]: 147 | logger.info(f"export to {path.name}") 148 | _poetry_export_group(group, path) 149 | 150 | path_poetry_lock_hash_json.write_text( 151 | json.dumps( 152 | { 153 | "hash": poetry_lock_hash, 154 | "description": "DON'T edit this file manually!", 155 | }, 156 | indent=4, 157 | ) 158 | ) 159 | 160 | 161 | @logger.pretty_log( 162 | start_msg="⏱ Export resolved dependency to requirements-***.txt file", 163 | end_msg="⏰ End 'Export resolved dependency to requirements-***.txt file', elapsed = {elapsed} sec", 164 | ) 165 | def poetry_export(): 166 | # calculate the poetry.lock file 167 | poetry_lock_hash = sha256_of_bytes(path_poetry_lock.read_bytes()) 168 | if do_we_need_poetry_export(poetry_lock_hash) is False: 169 | logger.info("already did, do nothing") 170 | return 171 | _poetry_export(poetry_lock_hash) 172 | 173 | 174 | def _try_poetry_export(): 175 | """ 176 | Run poetry export when needed. 177 | 178 | This function will be used by other functions to export the dependencies, 179 | then we can do pip install. 180 | """ 181 | poetry_lock_hash = sha256_of_bytes(path_poetry_lock.read_bytes()) 182 | if do_we_need_poetry_export(poetry_lock_hash): 183 | _poetry_export(poetry_lock_hash) 184 | 185 | 186 | @logger.pretty_log( 187 | start_msg="⏱ Resolve Dependencies Tree", 188 | end_msg="⏰ End 'Resolve Dependencies Tree', elapsed = {elapsed} sec", 189 | ) 190 | def poetry_lock(): 191 | """ 192 | cmd: ``poetry lock`` 193 | """ 194 | with dir_project_root.temp_cwd(): 195 | subprocess.run(["poetry", "lock"]) 196 | 197 | 198 | def _quite_pip_install_in_ci(args: T.List[str]): 199 | if IS_CI: 200 | args.append("--quiet") 201 | 202 | 203 | @logger.pretty_log( 204 | start_msg="⏱ 💾 Install main dependencies and Package itself", 205 | end_msg="⏰ End 'Install main dependencies and Package itself', elapsed = {elapsed} sec", 206 | ) 207 | def pip_install(): 208 | """ 209 | cmd: ``pip install -e . --no-deps`` 210 | """ 211 | _try_poetry_export() 212 | 213 | subprocess.run( 214 | [f"{bin_pip}", "install", "-e", f"{dir_project_root}", "--no-deps"], 215 | check=True, 216 | ) 217 | 218 | args = [f"{bin_pip}", "install", "-r", f"{path_requirements_main}"] 219 | _quite_pip_install_in_ci(args) 220 | subprocess.run( 221 | args, 222 | check=True, 223 | ) 224 | 225 | 226 | @logger.pretty_log( 227 | start_msg="⏱ 💾 Install dev dependencies", 228 | end_msg="⏰ End 'Install dev dependencies', elapsed = {elapsed} sec", 229 | ) 230 | def pip_install_dev(): 231 | """ 232 | cmd: ``pip install -r requirements-dev.txt`` 233 | """ 234 | _try_poetry_export() 235 | 236 | args = [f"{bin_pip}", "install", "-r", f"{path_requirements_dev}"] 237 | _quite_pip_install_in_ci(args) 238 | subprocess.run( 239 | args, 240 | check=True, 241 | ) 242 | 243 | 244 | @logger.pretty_log( 245 | start_msg="⏱ 💾 Install test dependencies", 246 | end_msg="⏰ End 'Install test dependencies', elapsed = {elapsed} sec", 247 | ) 248 | def pip_install_test(): 249 | """ 250 | cmd: ``pip install -r requirements-test.txt`` 251 | """ 252 | _try_poetry_export() 253 | 254 | args = [f"{bin_pip}", "install", "-r", f"{path_requirements_test}"] 255 | _quite_pip_install_in_ci(args) 256 | subprocess.run( 257 | args, 258 | check=True, 259 | ) 260 | 261 | 262 | @logger.pretty_log( 263 | start_msg="⏱ 💾 Install doc dependencies", 264 | end_msg="⏰ End 'Install doc dependencies', elapsed = {elapsed} sec", 265 | ) 266 | def pip_install_doc(): 267 | """ 268 | cmd: ``pip install -r requirements-doc.txt`` 269 | """ 270 | _try_poetry_export() 271 | 272 | args = [f"{bin_pip}", "install", "-r", f"{path_requirements_doc}"] 273 | _quite_pip_install_in_ci(args) 274 | subprocess.run( 275 | args, 276 | check=True, 277 | ) 278 | 279 | 280 | @logger.pretty_log( 281 | start_msg="⏱ 💾 Install automation dependencies", 282 | end_msg="⏰ End 'Install automation dependencies', elapsed = {elapsed} sec", 283 | ) 284 | def pip_install_automation(): 285 | """ 286 | cmd: ``pip install -r requirements-automation.txt`` 287 | """ 288 | args = [f"{bin_pip}", "install", "-r", f"{path_requirements_automation}"] 289 | _quite_pip_install_in_ci(args) 290 | subprocess.run( 291 | args, 292 | check=True, 293 | ) 294 | 295 | 296 | @logger.pretty_log( 297 | start_msg="⏱ 💾 Install all dependencies for dev, test, doc, automation", 298 | end_msg="⏰ End 'Install all dependencies for dev, test, doc, automation', elapsed = {elapsed} sec", 299 | ) 300 | def pip_install_all(): 301 | """ 302 | cmd: ``pip install -r requirements-dev.txt`` 303 | """ 304 | _try_poetry_export() 305 | 306 | subprocess.run( 307 | [f"{bin_pip}", "install", "-e", f"{dir_project_root}", "--no-deps"], 308 | check=True, 309 | ) 310 | 311 | args = [f"{bin_pip}", "install", "-r", f"{path_requirements_main}"] 312 | _quite_pip_install_in_ci(args) 313 | subprocess.run( 314 | args, 315 | check=True, 316 | ) 317 | 318 | args = [f"{bin_pip}", "install", "-r", f"{path_requirements_dev}"] 319 | _quite_pip_install_in_ci(args) 320 | subprocess.run( 321 | args, 322 | check=True, 323 | ) 324 | 325 | args = [f"{bin_pip}", "install", "-r", f"{path_requirements_test}"] 326 | _quite_pip_install_in_ci(args) 327 | subprocess.run( 328 | args, 329 | check=True, 330 | ) 331 | 332 | args = [f"{bin_pip}", "install", "-r", f"{path_requirements_doc}"] 333 | _quite_pip_install_in_ci(args) 334 | subprocess.run( 335 | args, 336 | check=True, 337 | ) 338 | 339 | args = [f"{bin_pip}", "install", "-r", f"{path_requirements_automation}"] 340 | _quite_pip_install_in_ci(args) 341 | subprocess.run( 342 | args, 343 | check=True, 344 | ) 345 | -------------------------------------------------------------------------------- /bin/automation/emoji.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Enumerate useful, UTF8 emoji characters. 5 | 6 | Full list is here: https://unicode.org/emoji/charts/full-emoji-list.html 7 | """ 8 | 9 | import enum 10 | 11 | 12 | class Emoji(str, enum.Enum): 13 | start_timer = "⏱" 14 | end_timer = "⏰" 15 | go = "▶️" 16 | stop = "⏹️" 17 | error = "🔥" 18 | 19 | relax = "🌴" 20 | python = "🐍" 21 | 22 | test = "🧪" 23 | install = "💾" 24 | build = "🪜" 25 | deploy = "🚀️" 26 | delete = "🗑️" 27 | tada = "🎉" 28 | 29 | cloudformation = "☁️" 30 | awslambda = "🟧" 31 | 32 | template = "📋" 33 | computer = "💻" 34 | package = "📦" 35 | 36 | factory = "🏭" 37 | no_entry = "🚫" 38 | warning = "⚠️" 39 | 40 | thumb_up = "👍" 41 | thumb_down = "👎" 42 | attention = "👉" 43 | 44 | happy_face = "😀" 45 | hot_face = "🥵" 46 | anger = "💢" 47 | 48 | red_circle = "🔴" 49 | green_circle = "🟢" 50 | yellow_circle = "🟡" 51 | blue_circle = "🔵" 52 | 53 | red_square = "🟥" 54 | green_square = "🟩" 55 | yellow_square = "🟨" 56 | blue_square = "🟦" 57 | 58 | succeeded = "✅" 59 | failed = "❌" 60 | 61 | arrow_up = "⬆️" 62 | arrow_down = "⬇️" 63 | arrow_left = "⬅️" 64 | arrow_right = "➡️" 65 | -------------------------------------------------------------------------------- /bin/automation/helpers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | .. note:: 5 | 6 | This module is "ZERO-DEPENDENCY". 7 | """ 8 | 9 | import hashlib 10 | 11 | 12 | def sha256_of_bytes(b: bytes) -> str: 13 | sha256 = hashlib.sha256() 14 | sha256.update(b) 15 | return sha256.hexdigest() 16 | -------------------------------------------------------------------------------- /bin/automation/logger.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Enhance the default logger, print visual ascii effect for better readability. 5 | 6 | Usage:: 7 | 8 | from logger import logger 9 | 10 | .. note:: 11 | 12 | This module is "ZERO-DEPENDENCY". 13 | """ 14 | 15 | import typing as T 16 | import sys 17 | import enum 18 | import logging 19 | import contextlib 20 | from functools import wraps 21 | from datetime import datetime 22 | 23 | _logger = logging.getLogger("automation") 24 | _logger.setLevel(logging.INFO) 25 | 26 | stream_handler = logging.StreamHandler(stream=sys.stdout) 27 | stream_handler.setLevel(logging.INFO) 28 | formatter = logging.Formatter( 29 | "[User] %(message)s", 30 | datefmt="%Y-%m-%d %H:%m:%S", 31 | ) 32 | stream_handler.setFormatter(formatter) 33 | 34 | _logger.addHandler(stream_handler) 35 | 36 | tab = " " * 2 37 | pipe = "| " 38 | 39 | 40 | def format_indent(msg: str, indent: int = 0, nest: int = 0) -> str: 41 | """ 42 | Format message with indentation and nesting. 43 | 44 | Example:: 45 | 46 | >>> format_indent("hello") 47 | '[User] | hello' 48 | >>> format_indent("hello", indent=1) 49 | '[User] | hello' 50 | >>> format_indent("hello", nest=1) 51 | '[User] | | hello' 52 | >>> format_indent("hello", indent=1, nest=1) 53 | '[User] | | hello' 54 | """ 55 | return f"{pipe * (nest + 1)}{tab * indent}{msg}" 56 | 57 | 58 | def debug(msg: str, indent: int = 0, nest: int = 0): 59 | _logger.debug(format_indent(msg, indent, nest)) 60 | 61 | 62 | def info(msg: str, indent: int = 0, nest: int = 0): 63 | _logger.info(format_indent(msg, indent, nest)) 64 | 65 | 66 | def warning(msg: str, indent: int = 0, nest: int = 0): 67 | _logger.warning(format_indent(msg, indent, nest)) 68 | 69 | 70 | def error(msg: str, indent: int = 0, nest: int = 0): 71 | _logger.error(format_indent(msg, indent, nest)) 72 | 73 | 74 | def critical(msg: str, indent: int = 0, nest: int = 0): 75 | _logger.critical(format_indent(msg, indent, nest)) 76 | 77 | 78 | # visual printer 79 | class AlignEnum(str, enum.Enum): 80 | left = "<" 81 | right = ">" 82 | middle = "^" 83 | 84 | 85 | def format_ruler( 86 | msg: str, 87 | char: str = "-", 88 | align: AlignEnum = AlignEnum.middle, 89 | length: int = 80, 90 | left_padding: int = 5, 91 | right_padding: int = 5, 92 | corner: str = "", 93 | nest: int = 0, 94 | ) -> str: 95 | """ 96 | Format message with a horizontal ruler. 97 | 98 | :param msg: the message to print 99 | :param char: the character to use as ruler 100 | :param align: left, middle, right alignment of the message 101 | :param length: the total number of character of the ruler 102 | :param left_padding: the number of ruler character to pad on the left 103 | :param right_padding: the number of ruler character to pad on the right 104 | :param corner: the character to use as corner 105 | :param nest: the number of pipe to print before the ruler 106 | 107 | Example:: 108 | 109 | >>> format_ruler("Hello") 110 | '[User] ------------------------------------ Hello -------------------------------------' 111 | >>> format_ruler("Hello", length=40) 112 | '[User] ---------------- Hello -----------------' 113 | >>> format_ruler("Hello", char="=") 114 | '[User] ==================================== Hello =====================================' 115 | >>> format_ruler("Hello", corner="+") 116 | '[User] +----------------------------------- Hello ------------------------------------+' 117 | >>> format_ruler("Hello", align=AlignEnum.left) 118 | '[User] ----- Hello --------------------------------------------------------------------' 119 | >>> format_ruler("Hello", align=AlignEnum.right) 120 | '[User] -------------------------------------------------------------------- Hello -----' 121 | >>> format_ruler("Hello", left_padding=10) 122 | '[User] --------------------------------------- Hello ----------------------------------' 123 | >>> format_ruler("Hello", right_padding=10) 124 | '[User] ---------------------------------- Hello ---------------------------------------' 125 | """ 126 | length = length - len(corner) * 2 - left_padding - right_padding - nest * 2 127 | msg = f" {msg} " 128 | left_pad = char * left_padding 129 | right_pad = char * right_padding 130 | s = f"{pipe * nest}{corner}{left_pad}{msg:{char}{align}{length}}{right_pad}{corner}" 131 | return s 132 | 133 | 134 | def ruler( 135 | msg: str, 136 | char: str = "-", 137 | align: AlignEnum = AlignEnum.middle, 138 | length: int = 80, 139 | left_padding: int = 5, 140 | right_padding: int = 5, 141 | corner: str = "", 142 | nest: int = 0, 143 | ): 144 | _logger.info( 145 | format_ruler( 146 | msg, char, align, length, left_padding, right_padding, corner, nest 147 | ) 148 | ) 149 | 150 | 151 | def decohints(decorator: T.Callable) -> T.Callable: 152 | """ 153 | fix pycharm type hint bug for decorator. 154 | """ 155 | return decorator 156 | 157 | 158 | class NestedLogger: 159 | def __init__(self): 160 | self.logger = _logger 161 | self.nest = 0 162 | 163 | def debug( 164 | self, 165 | msg: str, 166 | indent: int = 0, 167 | nest: T.Optional[int] = None, 168 | ): 169 | nest = self.nest if nest is None else nest 170 | self.logger.debug(format_indent(msg, indent, nest)) 171 | 172 | def info( 173 | self, 174 | msg: str, 175 | indent: int = 0, 176 | nest: T.Optional[int] = None, 177 | ): 178 | nest = self.nest if nest is None else nest 179 | self.logger.info(format_indent(msg, indent, nest)) 180 | 181 | def warning( 182 | self, 183 | msg: str, 184 | indent: int = 0, 185 | nest: T.Optional[int] = None, 186 | ): 187 | nest = self.nest if nest is None else nest 188 | self.logger.warning(format_indent(msg, indent, nest)) 189 | 190 | def error( 191 | self, 192 | msg: str, 193 | indent: int = 0, 194 | nest: T.Optional[int] = None, 195 | ): 196 | nest = self.nest if nest is None else nest 197 | self.logger.error(format_indent(msg, indent, nest)) 198 | 199 | def critical( 200 | self, 201 | msg: str, 202 | indent: int = 0, 203 | nest: T.Optional[int] = None, 204 | ): 205 | nest = self.nest if nest is None else nest 206 | self.logger.critical(format_indent(msg, indent, nest)) 207 | 208 | def ruler( 209 | self, 210 | msg: str, 211 | char: str = "-", 212 | align: AlignEnum = AlignEnum.left, 213 | length: int = 80, 214 | left_padding: int = 5, 215 | right_padding: int = 5, 216 | corner: str = "+", 217 | nest: T.Optional[int] = None, 218 | ): 219 | nest = self.nest if nest is None else nest 220 | self.logger.info( 221 | format_ruler( 222 | msg, char, align, length, left_padding, right_padding, corner, nest 223 | ) 224 | ) 225 | 226 | @contextlib.contextmanager 227 | def nested(self, nest: int): 228 | current_nest = self.nest 229 | self.nest = nest 230 | try: 231 | yield self 232 | finally: 233 | self.nest = current_nest 234 | 235 | def pretty_log( 236 | self, 237 | start_msg: str = "Start {func_name}()", 238 | error_msg: str = "Error, elapsed = {elapsed} sec", 239 | end_msg: str = "End {func_name}(), elapsed = {elapsed} sec", 240 | char: str = "-", 241 | align: AlignEnum = AlignEnum.left, 242 | length: int = 80, 243 | left_padding: int = 5, 244 | right_padding: int = 5, 245 | corner: str = "+", 246 | nest: int = 0, 247 | ): 248 | """ 249 | Pretty print ruler when a function start, error, end. 250 | 251 | Example: 252 | 253 | .. code-block:: python 254 | 255 | @nested_logger.pretty_log(nest=1) 256 | def my_func2(name: str): 257 | time.sleep(1) 258 | nested_logger.info(f"{name} do something in my func 2") 259 | 260 | @nested_logger.pretty_log() 261 | def my_func1(name: str): 262 | time.sleep(1) 263 | nested_logger.info(f"{name} do something in my func 1") 264 | my_func2(name="bob") 265 | 266 | my_func1(name="alice") 267 | 268 | The output looks like:: 269 | 270 | [User] +----- Start my_func1() ------------------------------------+ 271 | [User] | 272 | [User] | alice do something in my func 1 273 | [User] | +----- Start my_func2() ----------------------------------+ 274 | [User] | | 275 | [User] | | bob do something in my func 2 276 | [User] | | 277 | [User] | +----- End my_func2(), elapsed = 1 sec -------------------+ 278 | [User] | 279 | [User] +----- End my_func1(), elapsed = 2 sec ---------------------+ 280 | """ 281 | 282 | @decohints 283 | def deco(func): 284 | @wraps(func) 285 | def wrapper(*args, **kwargs): 286 | st = datetime.utcnow() 287 | self.ruler( 288 | msg=start_msg.format(func_name=func.__name__), 289 | char=char, 290 | align=align, 291 | length=length, 292 | left_padding=left_padding, 293 | right_padding=right_padding, 294 | corner=corner, 295 | nest=nest, 296 | ) 297 | self.info("", nest=nest) 298 | current_nest = self.nest 299 | self.nest = nest 300 | 301 | try: 302 | result = func(*args, **kwargs) 303 | except Exception as e: 304 | et = datetime.utcnow() 305 | elapsed = int((et - st).total_seconds()) 306 | self.info("", nest=nest) 307 | self.ruler( 308 | msg=error_msg.format(elapsed=elapsed), 309 | char=char, 310 | align=align, 311 | length=length, 312 | left_padding=left_padding, 313 | right_padding=right_padding, 314 | corner=corner, 315 | nest=nest, 316 | ) 317 | raise e 318 | 319 | et = datetime.utcnow() 320 | elapsed = int((et - st).total_seconds()) 321 | self.info("", nest=nest) 322 | self.ruler( 323 | msg=end_msg.format(func_name=func.__name__, elapsed=elapsed), 324 | char=char, 325 | align=align, 326 | length=length, 327 | left_padding=left_padding, 328 | right_padding=right_padding, 329 | corner=corner, 330 | nest=nest, 331 | ) 332 | self.nest = current_nest 333 | return result 334 | 335 | return wrapper 336 | 337 | return deco 338 | 339 | 340 | logger = NestedLogger() 341 | 342 | 343 | if __name__ == "__main__": 344 | import time 345 | 346 | def test_ruler(): 347 | ruler("Hello") 348 | ruler("Hello", length=40) 349 | ruler("Hello", char="=") 350 | ruler("Hello", corner="+") 351 | ruler("Hello", align=AlignEnum.left) 352 | ruler("Hello", align=AlignEnum.right) 353 | ruler("Hello", left_padding=10) 354 | ruler("Hello", right_padding=10) 355 | 356 | def test_nested_logger_nested_context_manager(): 357 | with logger.nested(0): 358 | logger.ruler("nested 0 start") 359 | logger.info("nested 0") 360 | 361 | with logger.nested(1): 362 | logger.ruler("nested 1 start") 363 | logger.info("nested 1") 364 | logger.ruler("nested 1 end") 365 | 366 | logger.ruler("nested 0 end") 367 | 368 | def test_nested_logger_pretty_log_decorator(): 369 | @logger.pretty_log(nest=1) 370 | def my_func2(name: str): 371 | time.sleep(1) 372 | logger.info(f"{name} do something in my func 2") 373 | 374 | @logger.pretty_log() 375 | def my_func1(name: str): 376 | time.sleep(1) 377 | logger.info(f"{name} do something in my func 1") 378 | my_func2(name="bob") 379 | 380 | my_func1(name="alice") 381 | 382 | # test_ruler() 383 | # test_nested_logger_nested_context_manager() 384 | # test_nested_logger_pretty_log_decorator() 385 | -------------------------------------------------------------------------------- /bin/automation/paths.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Enum important path on the local file systems for this project. 5 | 6 | .. note:: 7 | 8 | This module is "ZERO-DEPENDENCY". 9 | """ 10 | 11 | from pathlib_mate import Path 12 | 13 | dir_project_root = Path(__file__).absolute().parent.parent.parent 14 | 15 | assert dir_project_root.joinpath("Makefile").exists() is True 16 | 17 | dir_python_lib = None 18 | for p in dir_project_root.iterdir(): 19 | if p.joinpath("_version.py").exists(): 20 | dir_python_lib = p 21 | 22 | PACKAGE_NAME = dir_python_lib.basename 23 | 24 | # ------------------------------------------------------------------------------ 25 | # Virtual Environment Related 26 | # ------------------------------------------------------------------------------ 27 | dir_venv = dir_project_root / ".venv" 28 | dir_venv_bin = dir_venv / "bin" 29 | 30 | # virtualenv executable paths 31 | bin_python = dir_venv_bin / "python" 32 | bin_pip = dir_venv_bin / "pip" 33 | bin_pytest = dir_venv_bin / "pytest" 34 | 35 | # ------------------------------------------------------------------------------ 36 | # Test Related 37 | # ------------------------------------------------------------------------------ 38 | dir_tests = dir_project_root / "tests" 39 | 40 | dir_htmlcov = dir_project_root / "htmlcov" 41 | 42 | # ------------------------------------------------------------------------------ 43 | # Poetry Related 44 | # ------------------------------------------------------------------------------ 45 | path_requirements_main = dir_project_root / "requirements-main.txt" 46 | path_requirements_dev = dir_project_root / "requirements-dev.txt" 47 | path_requirements_test = dir_project_root / "requirements-test.txt" 48 | path_requirements_doc = dir_project_root / "requirements-doc.txt" 49 | path_requirements_automation = dir_project_root / "requirements-automation.txt" 50 | 51 | path_poetry_lock = dir_project_root / "poetry.lock" 52 | path_poetry_lock_hash_json = dir_project_root / ".poetry-lock-hash.json" 53 | 54 | # ------------------------------------------------------------------------------ 55 | # Env Related 56 | # ------------------------------------------------------------------------------ 57 | path_current_env_name_json = dir_project_root / ".current-env-name.json" 58 | 59 | # ------------------------------------------------------------------------------ 60 | # Build Related 61 | # ------------------------------------------------------------------------------ 62 | dir_build = dir_project_root / "build" 63 | dir_dist = dir_project_root / "dist" 64 | 65 | # ------------------------------------------------------------------------------ 66 | # Alfred Related 67 | # ------------------------------------------------------------------------------ 68 | path_git_repo_info_plist = dir_project_root / "info.plist" 69 | path_git_repo_main_py = dir_project_root / "main.py" 70 | -------------------------------------------------------------------------------- /bin/automation/runtime.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | **"Runtime" Definition** 5 | 6 | Runtime is where you execute your code. For example, if this code is running 7 | in a CI build environment, then the runtime is "ci". If this code is running 8 | on your local laptop, then the runtime is "local". If this code is running on 9 | AWS Lambda, then the runtime is "lbd" 10 | 11 | This module automatically detect what is the current runtime. 12 | 13 | .. note:: 14 | 15 | This module is "ZERO-DEPENDENCY". 16 | """ 17 | 18 | import os 19 | 20 | from .logger import logger 21 | 22 | 23 | class RuntimeEnum: 24 | """ 25 | This code will only be run either from local laptop or CI environment. 26 | It won't be run from Lambda Function. For EC2, it considers EC2 the 27 | same as your local laptop. 28 | """ 29 | 30 | local = "local" 31 | ci = "ci" 32 | 33 | 34 | emoji_mapper = { 35 | RuntimeEnum.local: "💻", 36 | RuntimeEnum.ci: "🔨", 37 | } 38 | 39 | 40 | # In alfred workflow project, we usually use GitHub Action as the CI build environment 41 | # See https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables 42 | if "CI" in os.environ: 43 | CURRENT_RUNTIME = RuntimeEnum.ci 44 | IS_CI = True 45 | IS_LOCAL = False 46 | else: 47 | CURRENT_RUNTIME = RuntimeEnum.local 48 | IS_CI = False 49 | IS_LOCAL = True 50 | 51 | 52 | def print_runtime_info(): 53 | logger.info(f"Current runtime is {emoji_mapper[CURRENT_RUNTIME]} {CURRENT_RUNTIME!r}") 54 | -------------------------------------------------------------------------------- /bin/automation/tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import subprocess 4 | 5 | from .paths import ( 6 | bin_pytest, 7 | dir_tests, 8 | dir_htmlcov, 9 | PACKAGE_NAME, 10 | ) 11 | from .config import config 12 | from .logger import logger 13 | 14 | 15 | @logger.pretty_log( 16 | start_msg="⏱ 🧪 Run Unit Test", 17 | end_msg="⏰ End 'Run Unit Test', elapsed = {elapsed} sec", 18 | ) 19 | def run_unit_test(): 20 | try: 21 | args = [ 22 | f"{bin_pytest}", 23 | f"{dir_tests}", 24 | "-s", 25 | ] 26 | subprocess.run(args, check=True) 27 | logger.info("✅ Unit Test Succeeded!") 28 | except Exception as e: 29 | logger.error("🔥 Unit Test Failed!") 30 | raise e 31 | 32 | 33 | @logger.pretty_log( 34 | start_msg="⏱ 🧪 Run Code Coverage Test", 35 | end_msg="⏰ End 'Run Code Coverage Test', elapsed = {elapsed} sec", 36 | ) 37 | def run_cov_test(): 38 | args = [ 39 | f"{bin_pytest}", 40 | f"{dir_tests}", 41 | "-s", 42 | f"--cov={PACKAGE_NAME}", 43 | "--cov-report", 44 | "term-missing", 45 | "--cov-report", 46 | f"html:{dir_htmlcov}", 47 | ] 48 | try: 49 | subprocess.run(args, check=True) 50 | logger.info("✅ Code Coverage Test Succeeded!") 51 | except Exception as e: 52 | logger.error("🔥 Code Coverage Test Failed!") 53 | raise e 54 | -------------------------------------------------------------------------------- /bin/automation/venv.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Virtualenv management. 5 | 6 | .. note:: 7 | 8 | This module is "ZERO-DEPENDENCY". 9 | """ 10 | 11 | import subprocess 12 | 13 | from .paths import dir_venv 14 | from .config import config 15 | from .logger import logger 16 | 17 | 18 | @logger.pretty_log( 19 | start_msg="Create 🐍 Virtual Environment", 20 | end_msg="End 'Create Virtual Environment', elapsed = {elapsed} sec", 21 | ) 22 | def poetry_venv_create(): 23 | """ 24 | .. code-block:: bash 25 | 26 | $ poetry config virtualenvs.in-project true --local 27 | $ poetry env use python${X}.${Y} 28 | """ 29 | if not dir_venv.exists(): 30 | subprocess.run( 31 | ["poetry", "config", "virtualenvs.in-project", "true", "--local"], 32 | check=True, 33 | ) 34 | subprocess.run( 35 | ["poetry", "env", "use", f"python{config.python_version}"], 36 | check=True, 37 | ) 38 | logger.info("done") 39 | else: 40 | logger(f"{dir_venv} already exists, do nothing.") 41 | 42 | 43 | @logger.pretty_log( 44 | start_msg="Create 🐍 Virtual Environment", 45 | end_msg="End 'Create Virtual Environment', elapsed = {elapsed} sec", 46 | ) 47 | def virtualenv_venv_create(): 48 | """ 49 | .. code-block:: bash 50 | 51 | $ virtualenv -p python${X}.${Y} ./.venv 52 | """ 53 | if not dir_venv.exists(): 54 | subprocess.run( 55 | ["virtualenv", "-p", f"python{config.python_version}", f"{dir_venv}"], 56 | check=True, 57 | ) 58 | logger.info("done") 59 | else: 60 | logger.info(f"{dir_venv} already exists, do nothing.") 61 | 62 | 63 | @logger.pretty_log( 64 | start_msg="Remove 🐍 Virtual Environment", 65 | end_msg="End 'Remove Virtual Environment', elapsed = {elapsed} sec", 66 | ) 67 | def venv_remove(): 68 | """ 69 | .. code-block:: bash 70 | 71 | $ rm -r ./.venv 72 | """ 73 | if dir_venv.exists(): 74 | subprocess.run(["rm", "-r", f"{dir_venv}"]) 75 | logger.info(f"done! {dir_venv} is removed.") 76 | else: 77 | logger.info(f"{dir_venv} doesn't exists, do nothing.") 78 | -------------------------------------------------------------------------------- /bin/debug_script_filter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from automation.alfred import refresh_code, debug 4 | 5 | bin_python = "/Users/sanhehu/.pyenv/versions/3.8.11/bin/python" 6 | handler_id = "view_settings" 7 | query = "" 8 | 9 | # handler_id = "set_settings" 10 | # query = "email my@email.com" 11 | 12 | # handler_id = "set_setting_value" 13 | # query = "email my@email.com" 14 | 15 | refresh_code() 16 | debug(bin_python, handler_id, query) 17 | -------------------------------------------------------------------------------- /bin/s01_1_virtualenv_venv_create.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.venv import virtualenv_venv_create 5 | 6 | virtualenv_venv_create() 7 | -------------------------------------------------------------------------------- /bin/s01_2_venv_remove.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.venv import venv_remove 5 | 6 | venv_remove() 7 | -------------------------------------------------------------------------------- /bin/s02_1_pip_install.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.deps import pip_install 5 | 6 | pip_install() 7 | -------------------------------------------------------------------------------- /bin/s02_2_pip_install_dev.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.deps import pip_install_dev 5 | 6 | pip_install_dev() 7 | -------------------------------------------------------------------------------- /bin/s02_3_pip_install_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.deps import pip_install_test 5 | 6 | pip_install_test() 7 | -------------------------------------------------------------------------------- /bin/s02_4_pip_install_doc.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.deps import pip_install_doc 5 | 6 | pip_install_doc() 7 | -------------------------------------------------------------------------------- /bin/s02_5_pip_install_all.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.deps import pip_install_all 5 | 6 | pip_install_all() 7 | -------------------------------------------------------------------------------- /bin/s02_6_poetry_export.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.deps import poetry_export 5 | 6 | poetry_export() 7 | -------------------------------------------------------------------------------- /bin/s02_7_poetry_lock.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.deps import poetry_lock 5 | 6 | poetry_lock() 7 | -------------------------------------------------------------------------------- /bin/s03_1_run_unit_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.tests import run_unit_test 5 | 6 | run_unit_test() 7 | -------------------------------------------------------------------------------- /bin/s03_2_run_cov_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.tests import run_cov_test 5 | 6 | run_cov_test() 7 | -------------------------------------------------------------------------------- /bin/s05_1_build_wf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.alfred import build_wf 5 | 6 | build_wf() 7 | -------------------------------------------------------------------------------- /bin/s05_2_refresh_code.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from automation.alfred import refresh_code 5 | 6 | refresh_code() 7 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | branch: main 3 | require_ci_to_pass: yes 4 | 5 | coverage: 6 | precision: 2 7 | round: down 8 | range: "0...100" 9 | 10 | parsers: 11 | gcov: 12 | branch_detection: 13 | conditional: yes 14 | loop: yes 15 | method: no 16 | macro: no 17 | 18 | comment: 19 | layout: "reach,diff,flags,files,footer" 20 | behavior: default 21 | require_changes: no -------------------------------------------------------------------------------- /docs/developer-guide/01-About-This-Best-Practice.rst: -------------------------------------------------------------------------------- 1 | About This Best Practice 2 | ============================================================================== 3 | 4 | 5 | Overview 6 | ------------------------------------------------------------------------------ 7 | 该最佳实践是我个人在维护 10 多个 Alfred Workflow 的过程中总结出来的. 这当然不能算是 "最佳" 的方法, 但肯定是比较有效的方法. 该方法经过了大型项目的考验, 在非常复杂的项目中依然工作良好. 8 | 9 | 10 | Next 11 | ------------------------------------------------------------------------------ 12 | `Initialize a New Project <./02-Initialize-a-New-Project.rst>`_ 13 | -------------------------------------------------------------------------------- /docs/developer-guide/02-Initialize-a-New-Project.rst: -------------------------------------------------------------------------------- 1 | Initialize a New Project 2 | ============================================================================== 3 | 当你决定开始用 Python 开发一个属于你自己的 Alfred Workflow 的时候, 你可以参考本文来生成你的 Git 项目文件. 大大减少了你手动设置的工作量. 4 | 5 | 6 | Solution 7 | ------------------------------------------------------------------------------ 8 | **第一步, 创建一个新的 Git Repo** 9 | 10 | 1. 到这个开源 GitHub repo https://github.com/MacHu-GWU/cookiecutter-afwf, 这个 repo 是一个 Python Alfred Workflow 的模板. 将其 Clone 下来. 11 | 2. 进入这个配置文件 https://github.com/MacHu-GWU/cookiecutter-afwf/blob/main/cookiecutter-afwf.json, 填入你的项目相关的信息. 12 | 3. 给你的 Python 安装必要的依赖, 具体的值列在这里 https://github.com/MacHu-GWU/cookiecutter-afwf/blob/main/requirements.txt. 安装的命令是 ``pip install -r requirements.txt``. 13 | 4. 运行 https://github.com/MacHu-GWU/cookiecutter-afwf/blob/main/create_repo.py 脚本, 它会在 ``cookiecutter-afwf/tmp/`` 目录下生成你的项目文件夹. 运行的命令是 ``python create_repo.py``. 14 | 15 | **第二步, 对你的项目进行一些配置** 16 | 17 | 1. 到 ``pyproject.toml`` 中的 ``[tool.poetry.dependencies]`` 一栏中填入你的项目所需的依赖. 18 | 2. 没有了. 19 | 20 | 21 | Next 22 | ------------------------------------------------------------------------------ 23 | `Development Guide <./03-Development-Guide.rst>`_ 24 | -------------------------------------------------------------------------------- /docs/developer-guide/03-Development-Guide.rst: -------------------------------------------------------------------------------- 1 | Development Guide 2 | ============================================================================== 3 | 4 | 5 | Automation Scripts 6 | ------------------------------------------------------------------------------ 7 | 跟自动化脚本有关的文件有这些: 8 | 9 | - ``bin/``: 这个目录下存放的都是可执行的自动化脚本文件. 本质上都是用 Python 写的 shell script. 10 | - ``bin/automation``: 这是一个 Python 库, 按照模块化存放了一些自动化脚本的函数. 11 | - ``bin/s123_123_..._.py``: 一堆按照编号排序的自动化脚本, 都是开发工作流会用到的. 12 | - ``Makefile``: 项目的自动化脚本入口, 本质上是对 ``bin/s123_123_..._.py`` 的封装. 13 | 14 | 在项目根目录下打 ``make`` 命令就能弹出一堆命令: 15 | 16 | .. code-block:: make 17 | 18 | help ** Show this help message 19 | venv-create ** Create Virtual Environment 20 | venv-remove ** Remove Virtual Environment 21 | install ** Install main dependencies and Package itself 22 | install-dev Install Development Dependencies 23 | install-test Install Test Dependencies 24 | install-doc Install Document Dependencies 25 | install-all Install All Dependencies 26 | poetry-export Export requirements-*.txt from poetry.lock file 27 | poetry-lock Resolve dependencies using poetry, update poetry.lock file 28 | test ** Run test 29 | test-only Run test without checking test dependencies 30 | cov ** Run code coverage test 31 | cov-only Run code coverage test without checking test dependencies 32 | build-wf ** Build Alfred Workflow release from source code 33 | refresh-code ** Refresh Alfred Workflow source code 34 | 35 | 这里最常用到的几个是:: 36 | 37 | venv-create 38 | install-all 39 | poetry-lock 40 | cov 41 | build-wf 42 | refresh-code 43 | 44 | 45 | Automation Config 46 | ------------------------------------------------------------------------------ 47 | 在 ``bin/automation/config.py`` 文件中, 有一个配置对象 ``AutomationConfig``, 它记录了自动化脚本所需要的一些配置. 其中 ``dir_workflow`` 最为重要, 它定义了 Alfred Workflow 实际保存的位置. 这个位置你可以在 Alfred 中创建一个 Workflow, 然后右键点击, 选择 Open in Finder 就能找到了. 路径大该长这个样子 ``/path/to/Alfred.alfredpreferences/workflows/user.workflow.ABCD1234-A1B2-C3D4-E5F6-A1B2C3D4E5F6``. 这个路径会被构建的自动化脚本所使用, 将你的源代码拷贝到对应的位置, 以实现 "安装" 的效果. 48 | 49 | 50 | The Workflow Entry Point - main.py 51 | ------------------------------------------------------------------------------ 52 | 在项目的根目录下有一个 `main.py <../main.py>`_ 文件. 这个文件是 Alfred Workflow 的入口脚本. , 也是 Alfred Workflow 的主要逻辑. 请跳转到文件的内容查看注释了解它的功能. 53 | 54 | 55 | The Workflow Source Code 56 | ------------------------------------------------------------------------------ 57 | 在项目的根目录下有一个 `afwf_fts_anything <../afwf_fts_anything>`_ 目录. 这个目录就是你的 Alfred Workflow 的源码了. 其中 `workflow.py <../afwf_fts_anything/workflow.py>`_ 模块定义了 Workflow 的对象实例. 并且将所有需要用到的 ``Handler`` 都注册了. 而 `handlers <../afwf_fts_anything/handlers>`_ 子模块则包含了所有的 handlers 的实现. 我建议查看所有示例 handlers 的源码来了解如何编写常见的 handler 逻辑. 58 | 59 | - `error.py <../afwf_fts_anything/handlers/error.py>`_: 故意抛出异常, 用于测试异常处理逻辑. 60 | - `memorize_cache.py <../afwf_fts_anything/handlers/memorize_cache.py>`_: 一个带磁盘缓存的记忆函数. 61 | - `open_file.py <../afwf_fts_anything/handlers/open_file.py>`_: 可以用来选择并打开文件. 62 | - `open_url.py <../afwf_fts_anything/handlers/open_url.py>`_: 可以用来选择并在浏览器中打开网页. 63 | - `read_file.py <../afwf_fts_anything/handlers/read_file.py>`_: 对文件的内容进行读取. 64 | - `write_file.py <../afwf_fts_anything/handlers/write_file.py>`_: 对文件的内容进行写入. 65 | - `set_settings.py <../afwf_fts_anything/handlers/set_settings.py>`_: 对数据库进行写入. 66 | - `view_settings.py <../afwf_fts_anything/handlers/view_settings.py>`_: 对数据库进行读取. 67 | 68 | 69 | Next 70 | ------------------------------------------------------------------------------ 71 | `Debug Guide <./04-Debug-Guide.rst>`_ 72 | -------------------------------------------------------------------------------- /docs/developer-guide/04-Debug-Guide.rst: -------------------------------------------------------------------------------- 1 | Debug Guide 2 | ============================================================================== 3 | 4 | 5 | Summary 6 | ------------------------------------------------------------------------------ 7 | 在前一章节 Development Guide 中我们已经介绍了如何用单元测试来确保代码的正确性. 但是有一些功能是在你真正使用 Alfred Workflow 的时候才会遇到的. 而 Alfred Workflow 自带的 debug 工具过于简陋, 出错了也不会给我们任何视觉上的提示. 我们希望能在出错的时候显示我们平时运行 Python 程序时的 Traceback 信息. 下面我们来介绍这一问题的解决方案. 8 | 9 | 10 | The Solution 11 | ------------------------------------------------------------------------------ 12 | afwf 框架中的 `Workflow `_ 模块对主逻辑有封装. 这里有两种 debug 机制可以帮助我们排查问题: 13 | 14 | 1. 当主逻辑有错误时 (即 raise 了一个 Exception), 那么在 Python 的 traceback 日志则会被保存到 ``${HOME}/.alfred-afwf/last-error.txt`` 文件中, 每次出现错误都会覆盖这个文件. 15 | 2. 你的 `main.py `_ 文件中 ``wf.run(debug=True)`` 如果设置了 ``debug=True``, 那么你可以用 ``afwf.log_debug_info("message")`` 函数来记录一些调试信息. 这些 debug 日志会以 append only 的形式保存到 ``${HOME}/.alfred-afwf/debug.txt`` 中. 16 | 17 | 默认 afwf 框架会在出错时返回两个 Item, 一个可以一键打开 ``last-error.txt`` 文件, 一个可以一键打开 ``debug.txt`` 文件. 但是前提你要给你的每个 Script Filter 的后继 Action 连接一个 ``Utilities -> Conditional`` 的控件, 其中 if 的判断值设为 ``{var:_open_log_file}`` (注意下划线), 条件设置为 is equal to "y". 然后连接一个 ``Actions -> Open File`` 的控件, 其中 File 设为 ``{var:_open_log_file_path}`` (注意下划线). 这样就能使得的自动出现的两个用于 debug 的 items 能一键打开日志文件了. 18 | 19 | 20 | Next 21 | ------------------------------------------------------------------------------ 22 | `Unit Test Guide <./05-Unit-Test-Guide.rst>`_ -------------------------------------------------------------------------------- /docs/developer-guide/05-Unit-Test-Guide.rst: -------------------------------------------------------------------------------- 1 | Unit Test Guide 2 | ============================================================================== 3 | 4 | 5 | Overview 6 | ------------------------------------------------------------------------------ 7 | 对 Alfred Workflow 的源码进行单元测试一直是一个大难题. 这里我们利用了 `afwf `_ 框架中的 ``Handler`` 类. 每一个 ``Handler`` 类都有两个方法, ``def main(...)`` 和 ``def handler(...)``. 其中 ``main`` 实现了核心的业务逻辑, 其本质就是一个 Python 函数, 你就按照正常的方式进行输入输出的单元测试即可. 而 ``handler`` 只是对 ``main`` 的一层封装, 将 input box 中的 query str 解析成 arg 传给 ``main`` 方法而已. 因此我们可以将 parse query 的逻辑单独做单元测试. 这样单元测试就已经完成了 99% 的工作了. 剩下的 1% 就是对 Workflow 的 Integration Test 了. 8 | 9 | 10 | Example 11 | ------------------------------------------------------------------------------ 12 | 这里我们拿 `memorize_cache.py <../afwf_fts_anything/handlers/memorize_cache.py>`_ 为例. 它的核心逻辑是根据 Key 生成一个随机数, 并且缓存 5 秒. 返回的对象是一个 ``ScriptFilter`` 对象, 而这个生成的随机数则保存在 ``ScriptFilter.items`` 列表中. 13 | 14 | 我们再来看 `test_handler_memorize_cache.py <../tests/test_handler_memorize_cache.py>`_ 的测试用例. 测试用例仅仅是用同一个 key 调用了两次 ``main`` 函数, 然后比较 item 的 title 是不是一样. 是一样说明缓存起作用了. 15 | 16 | 17 | Next 18 | ------------------------------------------------------------------------------ 19 | `Release Guide <./06-Release-Guide.rst>`_ 20 | -------------------------------------------------------------------------------- /docs/developer-guide/06-Release-Guide.rst: -------------------------------------------------------------------------------- 1 | Release Guide 2 | ============================================================================== 3 | 4 | 5 | Summary 6 | ------------------------------------------------------------------------------ 7 | 我们希望能将开发好的 Workflow 发布出去供大家下载. 并且希望安装的过程尽可能的简单. 作为 Workflow 的开发者, 我们希望简化发布新版本的流程. 8 | 9 | 10 | Solution 11 | ------------------------------------------------------------------------------ 12 | 1. 首先运行一次 ``make cov`` 命令执行代码覆盖率单元测试, 确保 90% 以上的覆盖率. 不然你自己都说服不了自己你的 Workflow 会不会有严重的 Bug. 13 | 2. 然后运行一次 ``make build-wf`` 命令从源代码构建 Alfred Workflow. 保证你即将发布的 Workflow 的代码跟你的代码库中一致. 14 | 3. 在 Alfred 中输入 ``?Workflow`` 进入 Alfred Workflow 菜单. 15 | 4. 找到你的 Workflow, 点击右键呼出菜单, 然后点击 ``Export`` 进入导出页面. 16 | 5. 给你的 Workflow 加上一些 Metadata. 例如: 17 | - Bundle Id: 可以是 ``${GitHubUserName}-${ProjectName}`` 的格式 18 | - Version: 用 Semantic Version 的方式命名. 从 ``0.1.1`` 开始 19 | 6. 导出后将文件重命名为这种格式: ``${ProjectName}-${SemanticVersion}-${OS}_${PlatForm}.alfredworkflow``, 例如: ``afwf_fts_anything-0.1.1-macosx_arm64.alfredworkflow``. 其中 ``${Platform}`` 可以是 ``amd64`` 或是 ``arm64`` 或是 ``universal``. 20 | 7. 在 GitHub Repo 中的 Release 菜单里点击 ``Draft a new release``. 然后设置 Tag 为你的 Semantic Version, Release Title 也是一样. 然后在将你刚才创建的 ``.alfredworkflow`` 文件拖曳到 ``Attach binaries by dropping them here or selecting them.`` 区域上传. 21 | 8. 至此你的用户可以在 Release 中点击 ``.alfredworkflow`` 文件下载然后双击安装你的 Workflow 了. 22 | 23 | **全文完** 24 | -------------------------------------------------------------------------------- /docs/user-guide/01-How-it-Works.rst: -------------------------------------------------------------------------------- 1 | How it Works 2 | ============================================================================== 3 | 4 | .. contents:: 5 | :local: 6 | :depth: 1 7 | 8 | 9 | Core Concept 10 | ------------------------------------------------------------------------------ 11 | **Setting file** 12 | 13 | It is a JSON file the defines how you want your data been searched, and how to render the result in the Alfred drop down menu. We will explain the details later. You have to explicitly provide this file. 14 | 15 | **Data file** 16 | 17 | It is a JSON file that stores your searchable dataset. It has to be either explicitly provided, or it can be automatically downloaded from internet if the ``data_url`` is defined in the setting. It is an array of object. For example:: 18 | 19 | [ 20 | { 21 | "key1": "value1", 22 | "key2": "value2", 23 | ... 24 | }, 25 | { 26 | "key1": "value1", 27 | "key2": "value2", 28 | ... 29 | }, 30 | ... 31 | ] 32 | 33 | **Index directory** 34 | 35 | It is where the full-text search index is stored. The index will be automatically generated at the first time you search based on the data file and setting file. To reset the index, just delete the index directory. 36 | 37 | **Icon directory** 38 | 39 | It is an optional directory where you can put your ICON file to be used in the Alfred drop down menu. If the ICON is not defined, then Alfred Workflow icon will be used. 40 | 41 | **afwf_fts_anything home directory** 42 | 43 | It is a directory on your laptop that stores all your data files and setting files. It is at ``${HOME}/.alfred-afwf/afwf_fts_anything/``. 44 | 45 | **Dataset** 46 | 47 | It is an abstraction concept of your dataset. Each dataset has to have a name and ``afwf_fts_anything`` use the name to locate the data file and setting file. For example, if the dataset name is ``movie``, then: 48 | 49 | - the setting file is at ``${HOME}/.alfred-afwf/afwf_fts_anything/movie-setting.json``. 50 | - the data file is at ``${HOME}/.alfred-afwf/afwf_fts_anything/movie-data.json``. 51 | - the index directory is at ``${HOME}/.alfred-afwf/afwf_fts_anything/movie-whoosh_index/``. 52 | - the icon directory is at ``${HOME}/.alfred-afwf/afwf_fts_anything/icon/``. 53 | 54 | 55 | Setting 56 | ------------------------------------------------------------------------------ 57 | Setting file defines how you want your data been searched, and how to render the result in the Alfred drop down menu. 58 | 59 | **Define how you want to search** 60 | 61 | Field is the basic unit of search. You can define how you want the data to be matched. Field has the following attributes: 62 | 63 | :name : the name of the field 64 | :type_is_store : if True, the value is only stored but not indexed for search. Usually it can be used to dynamically construct value for argument (the action when you press enter), or for auto complete (the action when you press tab) 65 | :type_is_ngram : if True, the value is index using ngram. It matches any character shorter than N characters. https://whoosh.readthedocs.io/en/latest/ngrams.html. 66 | :type_is_ngram_words : similar to type_is_ngram, but it tokenizes text into words before index. It matches any character shorter than N characters. https://whoosh.readthedocs.io/en/latest/api/fields.html#whoosh.fields.NGRAMWORDS. 67 | :type_is_phrase : if True, the value is indexed using phrase. Only case-insensitive phrase will be matched. See https://whoosh.readthedocs.io/en/latest/schema.html#built-in-field-types 68 | :type_is_keyword : if True, the value is indexed using keyword. The keyword has to be exactly matched. See https://whoosh.readthedocs.io/en/latest/schema.html#built-in-field-types 69 | :type_is_numeric: if True, the value is indexed using number. The number field is not used for searching, it is only used for sorting. See https://whoosh.readthedocs.io/en/latest/schema.html#built-in-field-types 70 | :ngram_minsize : minimal number of character to match, default is 2. 71 | :ngram_maxsize : maximum number of character to match, default is 10. 72 | :keyword_lowercase : for keyword type field, is the match case-sensitive? default True (not sensitive). 73 | :keyword_commas : is the delimiter of keyword is comma or space? 74 | :weight : the weight of the field for sorting in the search result. default is 1.0. 75 | :is_sortable : is the field will be used for sorting? If True, the field has to be stored. 76 | :is_sort_ascending : is the field will be used for sort ascending? 77 | 78 | **NOTE** 79 | 80 | only one of ``type_is_ngram``, ``type_is_phrase``, ``type_is_keyword``, ``type_is_numeric`` can be True. 81 | 82 | **Define how you want to render the result** 83 | 84 | In Alfred Workflow drop down menu, it will show list of items. Each item has five attributes: 85 | 86 | - **title**: the title of the item, the font size is larger than subtitle. 87 | - **subtitle**: the subtitle of the item, the font size is smaller than title. 88 | - **arg**: it is the argument that will be passed to the next action when you press enter, you can use it to open a url, open a file, run a command, etc ... Also, it is the text you copied when you hit ``CMD + C``. 89 | - **autocomplete: it is the text that will be used for auto complete when you press ``Tab``. 90 | - **icon**: it is the icon of the item, it can be absolute path to the icon image, or the relative path related to the ``Icon directory``. 91 | 92 | .. image:: ./images/alfred-item.png 93 | 94 | 95 | Example 96 | ------------------------------------------------------------------------------ 97 | Let's consider the IMDB Top 250 movies data set, the content of ``${HOME}/.alfred-afwf/afwf_fts_anything/movie-data.json`` is: 98 | 99 | .. code-block:: javascript 100 | 101 | [ 102 | { 103 | "movie_id": 1, 104 | "title": "The Shawshank Redemption", 105 | "description": "Two imprisoned men bond over a number of years, finding solace and eventual redemption through acts of common decency.", 106 | "genres": "Drama", 107 | "rating": 9.2, 108 | "url": "https://www.imdb.com/title/tt0111161" 109 | }, 110 | { 111 | "movie_id": 2, 112 | "title": "The Godfather", 113 | "description": "The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son.", 114 | "genres": "Crime, Drama", 115 | "rating": 9.2, 116 | "url": "https://www.imdb.com/title/tt0068646" 117 | }, 118 | { 119 | "movie_id": 3, 120 | "title": "The Dark Knight", 121 | "description": "When the menace known as the Joker wreaks havoc and chaos on the people of Gotham, Batman must accept one of the greatest psychological and physical tests of his ability to fight injustice.", 122 | "genres": "Action, Crime, Drama", 123 | "rating": 9.0, 124 | "url": "https://www.imdb.com/title/tt0468569" 125 | }, 126 | { 127 | "movie_id": 4, 128 | "title": "12 Angry Men", 129 | "description": "The jury in a New York City murder trial is frustrated by a single member whose skeptical caution forces them to more carefully consider the evidence before jumping to a hasty verdict.", 130 | "genres": "Crime, Drama", 131 | "rating": 9.0, 132 | "url": "https://www.imdb.com/title/tt0050083" 133 | }, 134 | { 135 | "movie_id": 5, 136 | "title": "Schindler's List", 137 | "description": "In German-occupied Poland during World War II, industrialist Oskar Schindler gradually becomes concerned for his Jewish workforce after witnessing their persecution by the Nazis.", 138 | "genres": "Biography, Drama, History", 139 | "rating": 8.9, 140 | "url": "https://www.imdb.com/title/tt0108052" 141 | }, 142 | { 143 | "movie_id": 6, 144 | "title": "The Lord of the Rings: The Return of the King", 145 | "description": "Gandalf and Aragorn lead the World of Men against Sauron's army to draw his gaze from Frodo and Sam as they approach Mount Doom with the One Ring.", 146 | "genres": "Action, Adventure, Drama", 147 | "rating": 8.9, 148 | "url": "https://www.imdb.com/title/tt0167260" 149 | }, 150 | { 151 | "movie_id": 7, 152 | "title": "Pulp Fiction", 153 | "description": "The lives of two mob hitmen, a boxer, a gangster and his wife, and a pair of diner bandits intertwine in four tales of violence and redemption.", 154 | "genres": "Crime, Drama", 155 | "rating": 8.8, 156 | "url": "https://www.imdb.com/title/tt0110912" 157 | }, 158 | { 159 | "movie_id": 8, 160 | "title": "Fight Club", 161 | "description": "An insomniac office worker and a devil-may-care soap maker form an underground fight club that evolves into much more.", 162 | "genres": "Drama", 163 | "rating": 8.7, 164 | "url": "https://www.imdb.com/title/tt0137523" 165 | }, 166 | { 167 | "movie_id": 9, 168 | "title": "Saving Private Ryan", 169 | "description": "Following the Normandy Landings, a group of U.S. soldiers go behind enemy lines to retrieve a paratrooper whose brothers have been killed in action.", 170 | "genres": "Drama, War", 171 | "rating": 8.6, 172 | "url": "https://www.imdb.com/title/tt0120815" 173 | } 174 | ] 175 | 176 | And the search setting (content of ``${HOME}/.alfred-afwf/afwf_fts_anything/movie-setting.json``) is: 177 | 178 | .. code-block:: javascript 179 | 180 | { 181 | // define how you want to search this dataset 182 | "fields": [ 183 | { 184 | "name": "movie_id", 185 | "type_is_store": true 186 | }, 187 | { 188 | "name": "title", 189 | "type_is_store": true, 190 | "type_is_ngram": true, 191 | "ngram_maxsize": 10, 192 | "ngram_minsize": 2, 193 | "weight": 2.0 194 | }, 195 | { 196 | "name": "description", 197 | "type_is_store": true, 198 | "type_is_phrase": true 199 | }, 200 | { 201 | "name": "genres", 202 | "type_is_store": true, 203 | "type_is_keyword": true, 204 | "keyword_lowercase": true, 205 | "weight": 1.5 206 | }, 207 | { 208 | "name": "rating", 209 | "type_is_store": true, 210 | "type_is_numeric": true, 211 | "is_sortable": true, 212 | "is_sort_ascending": false 213 | }, 214 | { 215 | "name": "url", 216 | "type_is_store": true 217 | } 218 | ], 219 | "title_field": "{title} ({genres}) rate {rating}", // title on Alfred drop down menu 220 | "subtitle_field": "{description}", // subtitle on Alfred drop down menu 221 | "arg_field": "{url}", // argument for other workflow component 222 | "autocomplete_field": "{title}", // tab auto complete behavior 223 | "icon_field": "movie-icon.png" 224 | } 225 | 226 | In the setting, we defined that: 227 | 228 | - we only want to store ``movie_id``, it is not used in search. because we want to use ``CMD + C`` to copy the movie id. 229 | - we want to use 2~10 gram to search ``title``. For example, ``The Shawshank Redemption`` will be index as ``th``, ``he``, ``sh``, ``ha``, ``aw``, ..., ``the``, ``sha``, ``haw``, ... If you search ``aw``, this document will be matched. This is most user friendly but consume more disk. 230 | - we want to use phrase to search ``description``, in other word, the full word spelling has to be right. For example ``Two imprisoned men bond over a number of years, finding solace and eventual redemption through acts of common decency.`` will be index as ``two``, ``imprisoned``, ``men``, ``bond``, ... If you search ``two men``, this document will be matched. This is the most common search in full-text search in search engine. 231 | - we want to use keyword to search ``genres``. The query has to be exact match this field. For example, if you search ``drama``, then all ``drama`` movie will be matched. 232 | - we want to use ``rating`` for sorting. if multiple documents are matched, the one with higher rating will be shown first. 233 | - we want to use the string template ``{title} ({genres}) rate {rating}`` to construct the title. 234 | - we want to use the string template ``{description}`` to construct the subtitle. 235 | - we want to use the string template ``{url}`` to construct the arg. 236 | - we want to use the string template ``{title}`` to construct the autocomplete. 237 | - we want to use a custom icon image for search result. 238 | 239 | .. image:: ./images/alfred-item.png 240 | 241 | 242 | Alfred Workflow Configuration 243 | ------------------------------------------------------------------------------ 244 | Below is a sample workflow diagram. The left is the "Script Filter" definition, the right side has "Open File", "Reveal in Finder" and "Open Url". When you select an item and hit ``Enter``, the arg, which is the url, will be passed to the "Open Url" action and open the IMDB movie url in your default browser. 245 | 246 | .. image:: ./images/alfred-workflow-diagram.png 247 | 248 | **NOTE** 249 | 250 | The "Open File" and "Reveal in Finder" are `afwf Framework `_ related components. Although the ``afwf_fts_anything`` is based on ``afwf Framework``, but they are not related to ``afwf_fts_anything``. 251 | 252 | Below is a sample workflow configuration. You need to know: 253 | 254 | - ``fts movie`` is the keyword to trigger this workflow. 255 | - ``Argument Optional`` means that the fts takes either no argument or a search query. 256 | - ``Language`` has to be a bash, because we use bash to call Python script. 257 | - ``Script`` is the python command to run this workflow, ``/usr/bin/python3 main.py 'fts movie {query}'`` means that we use ``/usr/bin/python3`` to run this workflow, and the dataset name is ``movie``. If you want to use a custom Python interpreter, you can change it to ``/path/to/your/python``. But the Python interpreter has to be Python3.7+. Also, if you created your own dataset and setting, you could change it to ``/usr/bin/python3 main.py 'fts your_datset_name {query}'``. 258 | 259 | .. image:: ./images/alfred-workflow-configuration.png 260 | 261 | 262 | Next 263 | ------------------------------------------------------------------------------ 264 | Please move to the `How to Use <./02-How-to-Use.rst>`_ to see how to install and use it. 265 | -------------------------------------------------------------------------------- /docs/user-guide/02-How-to-Use.rst: -------------------------------------------------------------------------------- 1 | How to Use 2 | ============================================================================== 3 | 4 | 5 | Installation 6 | ------------------------------------------------------------------------------ 7 | - Make sure you have installed `Alfred 3+ `_. 8 | - Make sure you already purchased `Powerpack `_, you have to buy powerpack to use `Alfred Workflow `_ feature. 9 | - Go to `Release `_, download the latest ``afwf_fts_anything-${version}-macosx_universal.alfredworkflow``. And double click to import to Alfred Workflow. Since this project only uses pure Python library (no C library), so it is universal for both Intel and ARM MacOS. 10 | 11 | 12 | Test with The Sample Movie Dataset 13 | ------------------------------------------------------------------------------ 14 | - Go to `Release `_. Find the latest release. 15 | - Download the ``movie.zip`` and uncompress it at ``${HOME}/.alfred-afwf/afwf_fts_anything/``. You should see: 16 | - ``${HOME}/.alfred-afwf/afwf_fts_anything/movie-setting.json`` 17 | - ``${HOME}/.alfred-afwf/afwf_fts_anything/movie-data.json`` 18 | - ``${HOME}/.alfred-afwf/afwf_fts_anything/movie-icon/`` 19 | - Type ``fts movie god father`` or ``fts movie drama`` in Alfred input box. The first command may fail because it takes time to build the index. The second command should work. 20 | 21 | 22 | Bring your own Dataset 23 | ------------------------------------------------------------------------------ 24 | - Make sure you understand `How it Works <./01-How-it-Works.rst>`_. 25 | - Give your dataset a name. Let's say it is ``mydata``. 26 | - Put your setting file at ``${HOME}/.alfred-afwf/afwf_fts_anything/mydata-setting.json``. 27 | - Put your data file at ``${HOME}/.alfred-afwf/afwf_fts_anything/mydata-data.json``. 28 | - Copy and paste the existing ``fts movie`` Script Filter, update the configuration accordingly. You must update the ``Keyword`` and update the dataset name in the ``Script``. 29 | - Type ``fts mydata your_query_here`` in Alfred input to search. 30 | 31 | 32 | FAQ 33 | ------------------------------------------------------------------------------ 34 | - Q: Why use json, why not CSV? 35 | - A: json provides more flexibility and compatible with multi-line text, which CSV usually not. 36 | 37 | - Q: Why it still returns old data after I updated the dataset? 38 | - A: Just delete the ``${HOME}/.alfred-afwf/afwf_fts_anything/-whoosh_index`` directory. 39 | -------------------------------------------------------------------------------- /docs/user-guide/images/alfred-item.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MacHu-GWU/afwf_fts_anything-project/3e2b7e0d7c21ae75149c107d8c7f704bc93e4353/docs/user-guide/images/alfred-item.png -------------------------------------------------------------------------------- /docs/user-guide/images/alfred-workflow-configuration.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MacHu-GWU/afwf_fts_anything-project/3e2b7e0d7c21ae75149c107d8c7f704bc93e4353/docs/user-guide/images/alfred-workflow-configuration.png -------------------------------------------------------------------------------- /docs/user-guide/images/alfred-workflow-diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MacHu-GWU/afwf_fts_anything-project/3e2b7e0d7c21ae75149c107d8c7f704bc93e4353/docs/user-guide/images/alfred-workflow-diagram.png -------------------------------------------------------------------------------- /icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MacHu-GWU/afwf_fts_anything-project/3e2b7e0d7c21ae75149c107d8c7f704bc93e4353/icon.png -------------------------------------------------------------------------------- /info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | bundleid 6 | MacHu-GWU.afwf_fts_anything 7 | connections 8 | 9 | 27D78147-51C3-4E72-B805-0E11B1F3A6D8 10 | 11 | 12 | destinationuid 13 | 0954010F-CD86-448F-A32A-086CF90100D2 14 | modifiers 15 | 0 16 | modifiersubtext 17 | 18 | sourceoutputuid 19 | 5EA742F9-1BBA-4433-8613-BFBFF0D0E462 20 | vitoclose 21 | 22 | 23 | 24 | 55DEB335-2FC6-4FF9-8482-F949B27BFEE9 25 | 26 | 27 | destinationuid 28 | 27D78147-51C3-4E72-B805-0E11B1F3A6D8 29 | modifiers 30 | 0 31 | modifiersubtext 32 | 33 | vitoclose 34 | 35 | 36 | 37 | destinationuid 38 | 92ADA757-06DC-428F-AA48-7C16FD4BE487 39 | modifiers 40 | 0 41 | modifiersubtext 42 | 43 | vitoclose 44 | 45 | 46 | 47 | destinationuid 48 | EA372964-D319-44BA-AA38-CFB7EAEEF180 49 | modifiers 50 | 0 51 | modifiersubtext 52 | 53 | vitoclose 54 | 55 | 56 | 57 | 92ADA757-06DC-428F-AA48-7C16FD4BE487 58 | 59 | 60 | destinationuid 61 | 06224816-79FA-4D91-A314-0C8E6756D80B 62 | modifiers 63 | 0 64 | modifiersubtext 65 | 66 | sourceoutputuid 67 | 5EA742F9-1BBA-4433-8613-BFBFF0D0E462 68 | vitoclose 69 | 70 | 71 | 72 | EA372964-D319-44BA-AA38-CFB7EAEEF180 73 | 74 | 75 | destinationuid 76 | B86DCC60-B7BA-4FCB-A766-0282D4F57AEF 77 | modifiers 78 | 0 79 | modifiersubtext 80 | 81 | sourceoutputuid 82 | 5EA742F9-1BBA-4433-8613-BFBFF0D0E462 83 | vitoclose 84 | 85 | 86 | 87 | 88 | createdby 89 | Sanhe Hu 90 | description 91 | Full Text Search 92 | disabled 93 | 94 | name 95 | afwf_fts_anything 96 | objects 97 | 98 | 99 | config 100 | 101 | alfredfiltersresults 102 | 103 | alfredfiltersresultsmatchmode 104 | 0 105 | argumenttreatemptyqueryasnil 106 | 107 | argumenttrimmode 108 | 0 109 | argumenttype 110 | 1 111 | escaping 112 | 69 113 | keyword 114 | fts movie 115 | queuedelaycustom 116 | 3 117 | queuedelayimmediatelyinitially 118 | 119 | queuedelaymode 120 | 2 121 | queuemode 122 | 1 123 | runningsubtext 124 | 125 | script 126 | /usr/bin/python3 main.py 'fts movie {query}' 127 | scriptargtype 128 | 0 129 | scriptfile 130 | 131 | subtext 132 | Keyword: fts movie 133 | title 134 | Full Text Search for Movie Info 135 | type 136 | 0 137 | withspace 138 | 139 | 140 | type 141 | alfred.workflow.input.scriptfilter 142 | uid 143 | 55DEB335-2FC6-4FF9-8482-F949B27BFEE9 144 | version 145 | 3 146 | 147 | 148 | config 149 | 150 | openwith 151 | 152 | sourcefile 153 | {var:_open_log_file_path} 154 | 155 | type 156 | alfred.workflow.action.openfile 157 | uid 158 | 0954010F-CD86-448F-A32A-086CF90100D2 159 | version 160 | 3 161 | 162 | 163 | config 164 | 165 | conditions 166 | 167 | 168 | inputstring 169 | {var:_open_log_file} 170 | matchcasesensitive 171 | 172 | matchmode 173 | 0 174 | matchstring 175 | y 176 | outputlabel 177 | 178 | uid 179 | 5EA742F9-1BBA-4433-8613-BFBFF0D0E462 180 | 181 | 182 | elselabel 183 | else 184 | 185 | type 186 | alfred.workflow.utility.conditional 187 | uid 188 | 27D78147-51C3-4E72-B805-0E11B1F3A6D8 189 | version 190 | 1 191 | 192 | 193 | config 194 | 195 | path 196 | {var:reveal_file_in_finder_path} 197 | 198 | type 199 | alfred.workflow.action.revealfile 200 | uid 201 | 06224816-79FA-4D91-A314-0C8E6756D80B 202 | version 203 | 1 204 | 205 | 206 | config 207 | 208 | conditions 209 | 210 | 211 | inputstring 212 | {var:reveal_file_in_finder} 213 | matchcasesensitive 214 | 215 | matchmode 216 | 0 217 | matchstring 218 | y 219 | outputlabel 220 | 221 | uid 222 | 5EA742F9-1BBA-4433-8613-BFBFF0D0E462 223 | 224 | 225 | elselabel 226 | else 227 | 228 | type 229 | alfred.workflow.utility.conditional 230 | uid 231 | 92ADA757-06DC-428F-AA48-7C16FD4BE487 232 | version 233 | 1 234 | 235 | 236 | config 237 | 238 | browser 239 | 240 | spaces 241 | 242 | url 243 | {var:open_url_arg} 244 | utf8 245 | 246 | 247 | type 248 | alfred.workflow.action.openurl 249 | uid 250 | B86DCC60-B7BA-4FCB-A766-0282D4F57AEF 251 | version 252 | 1 253 | 254 | 255 | config 256 | 257 | conditions 258 | 259 | 260 | inputstring 261 | {var:open_url} 262 | matchcasesensitive 263 | 264 | matchmode 265 | 0 266 | matchstring 267 | y 268 | outputlabel 269 | 270 | uid 271 | 5EA742F9-1BBA-4433-8613-BFBFF0D0E462 272 | 273 | 274 | elselabel 275 | else 276 | 277 | type 278 | alfred.workflow.utility.conditional 279 | uid 280 | EA372964-D319-44BA-AA38-CFB7EAEEF180 281 | version 282 | 1 283 | 284 | 285 | readme 286 | afwf_fts_anything is an Alfred Workflow allows you to do full-text search on your own dataset, and use the result to open url, open file, run script, or basically do anything. Typically, you need to setup expansive elasticsearch server, learn how to do data ingestion, learn search API, and build your own Alfred workflow. afwf_fts_anything removes all the blockers and let you just focus on your dataset and search configuration. 287 | 288 | See more at: https://github.com/MacHu-GWU/afwf_fts_anything-project 289 | uidata 290 | 291 | 06224816-79FA-4D91-A314-0C8E6756D80B 292 | 293 | xpos 294 | 740 295 | ypos 296 | 115 297 | 298 | 0954010F-CD86-448F-A32A-086CF90100D2 299 | 300 | xpos 301 | 740 302 | ypos 303 | 10 304 | 305 | 27D78147-51C3-4E72-B805-0E11B1F3A6D8 306 | 307 | xpos 308 | 545 309 | ypos 310 | 35 311 | 312 | 55DEB335-2FC6-4FF9-8482-F949B27BFEE9 313 | 314 | xpos 315 | 35 316 | ypos 317 | 10 318 | 319 | 92ADA757-06DC-428F-AA48-7C16FD4BE487 320 | 321 | xpos 322 | 545 323 | ypos 324 | 140 325 | 326 | B86DCC60-B7BA-4FCB-A766-0282D4F57AEF 327 | 328 | xpos 329 | 740 330 | ypos 331 | 220 332 | 333 | EA372964-D319-44BA-AA38-CFB7EAEEF180 334 | 335 | xpos 336 | 545 337 | ypos 338 | 245 339 | 340 | 341 | variablesdontexport 342 | 343 | version 344 | 1.2.1 345 | webaddress 346 | https://github.com/MacHu-GWU/afwf_fts_anything-project 347 | 348 | 349 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | [CN] 5 | Alfred Workflow Script Filter 的入口主文件, 适用于所有的 Python Alfred Workflow 项目. 6 | 如果你不懂这个文件起什么作用, 最多你可以修改 ``debug=True``, 请不要修改其他部分. 7 | 8 | How it works: 9 | 10 | 这个文件只应该在 Alfred Workflow 真正的目录下被运行, 而不应该是在这个 Git repo 的开发目录 11 | 下被运行. 真正的目录路径长这个样子 12 | ``/path/to/Alfred.alfredpreferences/workflows/user.workflow.ABCD1234-A1B2-C3D4-E5F6-A1B2C3D4E5F6``. 13 | 14 | 这个文件的核心逻辑是, 根据文件本身的位置定位到 ``lib`` 目录所在的位置, 并将其加入到 ``sys.path`` 中, 15 | 这样 Python 依赖就可以被找到了. 然后再从你的 workflow 源码包中导入 Workflow 对象, 并运行它. 16 | """ 17 | 18 | import sys 19 | from pathlib import Path 20 | 21 | dir_here = Path(__file__).absolute().parent 22 | dir_lib = Path(dir_here, "lib") 23 | 24 | if dir_lib.exists(): 25 | sys.path = [str(dir_lib), ] + sys.path 26 | 27 | if __name__ == "__main__": 28 | from afwf_fts_anything import wf 29 | 30 | wf.run(debug=True) 31 | -------------------------------------------------------------------------------- /poetry.toml: -------------------------------------------------------------------------------- 1 | [virtualenvs] 2 | in-project = true 3 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "afwf_fts_anything" 3 | version = "1.1.1" 4 | description = "Allows you to do full-text search on your own dataset, and use the result to open url, open file, run script, or basically do anything." 5 | authors = ["Sanhe Hu "] 6 | 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.7" 10 | afwf = "0.3.1" 11 | attrs = "21.4.0" 12 | attrs_mate = "1.0.2" 13 | pathlib_mate = " 1.2.1" 14 | diskcache = "5.4.0" 15 | sqlitedict = "2.1.0" 16 | whoosh = "2.7.4" 17 | superjson = "1.0.2" 18 | requests = "2.27.1" 19 | cached-property = { version = "1.5.2", markers = "python_version <= '3.7'" } 20 | 21 | 22 | [tool.poetry.dev-dependencies] 23 | # Don't put anything here, explicit is better than implicit 24 | 25 | 26 | [tool.poetry.group.dev] 27 | optional = true 28 | 29 | 30 | [tool.poetry.group.dev.dependencies] 31 | twine = "3.8.0" 32 | wheel = "0.37.1" 33 | rich = "12.5.1" 34 | fire = "0.5.0" 35 | 36 | 37 | [tool.poetry.group.doc] 38 | optional = true 39 | 40 | 41 | [tool.poetry.group.doc.dependencies] 42 | 43 | 44 | [tool.poetry.group.test] 45 | optional = true 46 | 47 | 48 | [tool.poetry.group.test.dependencies] 49 | pytest = "6.2.5" 50 | pytest-cov = "2.12.1" 51 | 52 | 53 | [build-system] 54 | requires = ["poetry-core>=1.0.0"] 55 | build-backend = "poetry.core.masonry.api" 56 | -------------------------------------------------------------------------------- /release-history.rst: -------------------------------------------------------------------------------- 1 | .. _release_history: 2 | 3 | Release and Version History 4 | ============================================================================== 5 | 6 | 7 | Backlog 8 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 9 | **Features and Improvements** 10 | 11 | **Minor Improvements** 12 | 13 | **Bugfixes** 14 | 15 | **Miscellaneous** 16 | 17 | 18 | 1.2.1 (2024-01-02) 19 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 20 | **Features and Improvements** 21 | 22 | - Add ``ngram_words`` index type. It tokenizes the text into words before index. It is more accurate than ``ngram`` if you have delimiter in your text. 23 | 24 | **Minor Improvements** 25 | 26 | - Fix a bug that it also open the url even it is not a valid url item. 27 | 28 | 29 | 1.1.1 (2023-04-03) 30 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 31 | **Features and Improvements** 32 | 33 | - First API stable release. 34 | - drop support for Python2.7. 35 | - only support for Python3.7+. 36 | - adapt `afwf `_ framework 0.3.1. 37 | - allow field boost for search result sorting by weight. 38 | - allow sortable field for custom result sorting. 39 | - allow custom icon. 40 | - allow download ``.json`` or ``.json.zip`` data file from internet. 41 | - add ``?`` to search query to get help message. 42 | 43 | **Minor Improvements** 44 | 45 | - reword the user document. 46 | 47 | 48 | 0.0.3 (2021-07-25) 49 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 50 | **Miscellaneous** 51 | 52 | - Support the End of life Python2.7 53 | 54 | 55 | 0.0.2 (2020-05-23) 56 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 57 | **Features and Improvements** 58 | 59 | - Allow column specified index search settings 60 | 61 | 62 | 0.0.1 (2019-01-02) 63 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 64 | **Features and Improvements** 65 | 66 | - First release! 67 | - support ngram, phrase, keyword search. 68 | - easy to custom workflow item. 69 | -------------------------------------------------------------------------------- /requirements-automation.txt: -------------------------------------------------------------------------------- 1 | pathlib-mate==1.2.1 2 | poetry==1.2.2 3 | fire==0.5.0 4 | rich 5 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | bleach==6.0.0 ; python_version >= "3.7" and python_version < "4.0" \ 2 | --hash=sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414 \ 3 | --hash=sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4 4 | certifi==2022.12.7 ; python_version >= "3.7" and python_version < "4.0" \ 5 | --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ 6 | --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 7 | cffi==1.15.1 ; python_version >= "3.7" and python_version < "4.0" and sys_platform == "linux" \ 8 | --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ 9 | --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ 10 | --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ 11 | --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ 12 | --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ 13 | --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ 14 | --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ 15 | --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ 16 | --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ 17 | --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ 18 | --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ 19 | --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ 20 | --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ 21 | --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ 22 | --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ 23 | --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ 24 | --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ 25 | --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ 26 | --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ 27 | --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ 28 | --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ 29 | --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ 30 | --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ 31 | --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ 32 | --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ 33 | --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ 34 | --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ 35 | --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ 36 | --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ 37 | --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ 38 | --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ 39 | --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ 40 | --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ 41 | --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ 42 | --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ 43 | --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ 44 | --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ 45 | --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ 46 | --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ 47 | --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ 48 | --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ 49 | --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ 50 | --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ 51 | --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ 52 | --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ 53 | --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ 54 | --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ 55 | --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ 56 | --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ 57 | --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ 58 | --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ 59 | --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ 60 | --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ 61 | --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ 62 | --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ 63 | --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ 64 | --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ 65 | --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ 66 | --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ 67 | --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ 68 | --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ 69 | --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ 70 | --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ 71 | --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 72 | charset-normalizer==2.0.12 ; python_version >= "3.7" and python_version < "4.0" \ 73 | --hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \ 74 | --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df 75 | colorama==0.4.6 ; python_version >= "3.7" and python_version < "4.0" \ 76 | --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ 77 | --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 78 | commonmark==0.9.1 ; python_version >= "3.7" and python_version < "4.0" \ 79 | --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ 80 | --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 81 | cryptography==40.0.1 ; python_version >= "3.7" and python_version < "4.0" and sys_platform == "linux" \ 82 | --hash=sha256:0a4e3406cfed6b1f6d6e87ed243363652b2586b2d917b0609ca4f97072994405 \ 83 | --hash=sha256:1e0af458515d5e4028aad75f3bb3fe7a31e46ad920648cd59b64d3da842e4356 \ 84 | --hash=sha256:2803f2f8b1e95f614419926c7e6f55d828afc614ca5ed61543877ae668cc3472 \ 85 | --hash=sha256:28d63d75bf7ae4045b10de5413fb1d6338616e79015999ad9cf6fc538f772d41 \ 86 | --hash=sha256:32057d3d0ab7d4453778367ca43e99ddb711770477c4f072a51b3ca69602780a \ 87 | --hash=sha256:3a4805a4ca729d65570a1b7cac84eac1e431085d40387b7d3bbaa47e39890b88 \ 88 | --hash=sha256:63dac2d25c47f12a7b8aa60e528bfb3c51c5a6c5a9f7c86987909c6c79765554 \ 89 | --hash=sha256:650883cc064297ef3676b1db1b7b1df6081794c4ada96fa457253c4cc40f97db \ 90 | --hash=sha256:6f2bbd72f717ce33100e6467572abaedc61f1acb87b8d546001328d7f466b778 \ 91 | --hash=sha256:7c872413353c70e0263a9368c4993710070e70ab3e5318d85510cc91cce77e7c \ 92 | --hash=sha256:918cb89086c7d98b1b86b9fdb70c712e5a9325ba6f7d7cfb509e784e0cfc6917 \ 93 | --hash=sha256:9618a87212cb5200500e304e43691111570e1f10ec3f35569fdfcd17e28fd797 \ 94 | --hash=sha256:a805a7bce4a77d51696410005b3e85ae2839bad9aa38894afc0aa99d8e0c3160 \ 95 | --hash=sha256:cc3a621076d824d75ab1e1e530e66e7e8564e357dd723f2533225d40fe35c60c \ 96 | --hash=sha256:cd033d74067d8928ef00a6b1327c8ea0452523967ca4463666eeba65ca350d4c \ 97 | --hash=sha256:cf91e428c51ef692b82ce786583e214f58392399cf65c341bc7301d096fa3ba2 \ 98 | --hash=sha256:d36bbeb99704aabefdca5aee4eba04455d7a27ceabd16f3b3ba9bdcc31da86c4 \ 99 | --hash=sha256:d8aa3609d337ad85e4eb9bb0f8bcf6e4409bfb86e706efa9a027912169e89122 \ 100 | --hash=sha256:f5d7b79fa56bc29580faafc2ff736ce05ba31feaa9d4735048b0de7d9ceb2b94 101 | docutils==0.19 ; python_version >= "3.7" and python_version < "4.0" \ 102 | --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ 103 | --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc 104 | fire==0.5.0 ; python_version >= "3.7" and python_version < "4.0" \ 105 | --hash=sha256:a6b0d49e98c8963910021f92bba66f65ab440da2982b78eb1bbf95a0a34aacc6 106 | idna==3.4 ; python_version >= "3.7" and python_version < "4.0" \ 107 | --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ 108 | --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 109 | importlib-metadata==6.1.0 ; python_version >= "3.7" and python_version < "4.0" \ 110 | --hash=sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20 \ 111 | --hash=sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09 112 | importlib-resources==5.12.0 ; python_version >= "3.7" and python_version < "3.9" \ 113 | --hash=sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6 \ 114 | --hash=sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a 115 | jaraco-classes==3.2.3 ; python_version >= "3.7" and python_version < "4.0" \ 116 | --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ 117 | --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a 118 | jeepney==0.8.0 ; python_version >= "3.7" and python_version < "4.0" and sys_platform == "linux" \ 119 | --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ 120 | --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 121 | keyring==23.13.1 ; python_version >= "3.7" and python_version < "4.0" \ 122 | --hash=sha256:771ed2a91909389ed6148631de678f82ddc73737d85a927f382a8a1b157898cd \ 123 | --hash=sha256:ba2e15a9b35e21908d0aaf4e0a47acc52d6ae33444df0da2b49d41a46ef6d678 124 | more-itertools==9.1.0 ; python_version >= "3.7" and python_version < "4.0" \ 125 | --hash=sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d \ 126 | --hash=sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3 127 | pkginfo==1.9.6 ; python_version >= "3.7" and python_version < "4.0" \ 128 | --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ 129 | --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 130 | pycparser==2.21 ; python_version >= "3.7" and python_version < "4.0" and sys_platform == "linux" \ 131 | --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ 132 | --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 133 | pygments==2.14.0 ; python_version >= "3.7" and python_version < "4.0" \ 134 | --hash=sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297 \ 135 | --hash=sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717 136 | pywin32-ctypes==0.2.0 ; python_version >= "3.7" and python_version < "4.0" and sys_platform == "win32" \ 137 | --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \ 138 | --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98 139 | readme-renderer==37.3 ; python_version >= "3.7" and python_version < "4.0" \ 140 | --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ 141 | --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 142 | requests-toolbelt==0.10.1 ; python_version >= "3.7" and python_version < "4.0" \ 143 | --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ 144 | --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d 145 | requests==2.27.1 ; python_version >= "3.7" and python_version < "4.0" \ 146 | --hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \ 147 | --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d 148 | rfc3986==2.0.0 ; python_version >= "3.7" and python_version < "4.0" \ 149 | --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ 150 | --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c 151 | rich==12.5.1 ; python_version >= "3.7" and python_version < "4.0" \ 152 | --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ 153 | --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca 154 | secretstorage==3.3.3 ; python_version >= "3.7" and python_version < "4.0" and sys_platform == "linux" \ 155 | --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ 156 | --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 157 | six==1.16.0 ; python_version >= "3.7" and python_version < "4.0" \ 158 | --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ 159 | --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 160 | termcolor==2.2.0 ; python_version >= "3.7" and python_version < "4.0" \ 161 | --hash=sha256:91ddd848e7251200eac969846cbae2dacd7d71c2871e92733289e7e3666f48e7 \ 162 | --hash=sha256:dfc8ac3f350788f23b2947b3e6cfa5a53b630b612e6cd8965a015a776020b99a 163 | tqdm==4.65.0 ; python_version >= "3.7" and python_version < "4.0" \ 164 | --hash=sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5 \ 165 | --hash=sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671 166 | twine==3.8.0 ; python_version >= "3.7" and python_version < "4.0" \ 167 | --hash=sha256:8efa52658e0ae770686a13b675569328f1fba9837e5de1867bfe5f46a9aefe19 \ 168 | --hash=sha256:d0550fca9dc19f3d5e8eadfce0c227294df0a2a951251a4385797c8a6198b7c8 169 | typing-extensions==4.5.0 ; python_version >= "3.7" and python_version < "3.9" \ 170 | --hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \ 171 | --hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4 172 | urllib3==1.26.15 ; python_version >= "3.7" and python_version < "4.0" \ 173 | --hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \ 174 | --hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42 175 | webencodings==0.5.1 ; python_version >= "3.7" and python_version < "4.0" \ 176 | --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ 177 | --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 178 | wheel==0.37.1 ; python_version >= "3.7" and python_version < "4.0" \ 179 | --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ 180 | --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 181 | zipp==3.15.0 ; python_version >= "3.7" and python_version < "4.0" \ 182 | --hash=sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b \ 183 | --hash=sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556 184 | -------------------------------------------------------------------------------- /requirements-doc.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /requirements-main.txt: -------------------------------------------------------------------------------- 1 | afwf==0.3.1 ; python_version >= "3.7" and python_version < "4.0" \ 2 | --hash=sha256:2a02d0b880e0f5a8e3b20945ccc17646c2c16729598f1313b9d864f1decaac6d \ 3 | --hash=sha256:ef21890c6ba128ca90310223b992d5b017c89ff0d178c14cf8a84ebb291117be 4 | atomicwrites==1.4.1 ; python_version >= "3.7" and python_version < "4.0" \ 5 | --hash=sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11 6 | attrs-mate==1.0.2 ; python_version >= "3.7" and python_version < "4.0" \ 7 | --hash=sha256:8800efb8d0ae9501d1a4e611d749cc2d1b16cd1fa4cfb17a0f29fb656a70369f \ 8 | --hash=sha256:8e9c9931bea7cd31ffdf2461becf4ce4370e7af073ddb423b4788f387457f580 9 | attrs==21.4.0 ; python_version >= "3.7" and python_version < "4.0" \ 10 | --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ 11 | --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd 12 | cached-property==1.5.2 ; python_version == "3.7" \ 13 | --hash=sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130 \ 14 | --hash=sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0 15 | certifi==2022.12.7 ; python_version >= "3.7" and python_version < "4.0" \ 16 | --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ 17 | --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 18 | charset-normalizer==2.0.12 ; python_version >= "3.7" and python_version < "4.0" \ 19 | --hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \ 20 | --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df 21 | diskcache==5.4.0 ; python_version >= "3.7" and python_version < "4.0" \ 22 | --hash=sha256:8879eb8c9b4a2509a5e633d2008634fb2b0b35c2b36192d89655dbde02419644 \ 23 | --hash=sha256:af3ec6d7f167bbef7b6c33d9ee22f86d3e8f2dd7131eb7c4703d8d91ccdc0cc4 24 | idna==3.4 ; python_version >= "3.7" and python_version < "4.0" \ 25 | --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ 26 | --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 27 | pathlib-mate==1.2.1 ; python_version >= "3.7" and python_version < "4.0" \ 28 | --hash=sha256:0cd0a8b742941efdc5a3b3c46e75da6430c3f869fe6a5fb490025c0bc39a5c2d \ 29 | --hash=sha256:6ef576ccefac39a8a1bb4b7d93fcd50f73113a34ddd01cc46ca149b947f7dc0e 30 | requests==2.27.1 ; python_version >= "3.7" and python_version < "4.0" \ 31 | --hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \ 32 | --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d 33 | six==1.16.0 ; python_version >= "3.7" and python_version < "4.0" \ 34 | --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ 35 | --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 36 | sqlitedict==2.1.0 ; python_version >= "3.7" and python_version < "4.0" \ 37 | --hash=sha256:03d9cfb96d602996f1d4c2db2856f1224b96a9c431bdd16e78032a72940f9e8c 38 | superjson==1.0.2 ; python_version >= "3.7" and python_version < "4.0" \ 39 | --hash=sha256:24cfe20d419ec4e6be4487831eec9d762a9fb3b7b4a5a1c32a31a14f99996e26 \ 40 | --hash=sha256:5b93fb43a89187a8ce777c7ffbdaadcb6ecda16bf024e3518248642e5875d940 41 | urllib3==1.26.15 ; python_version >= "3.7" and python_version < "4.0" \ 42 | --hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \ 43 | --hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42 44 | whoosh==2.7.4 ; python_version >= "3.7" and python_version < "4.0" \ 45 | --hash=sha256:7ca5633dbfa9e0e0fa400d3151a8a0c4bec53bd2ecedc0a67705b17565c31a83 \ 46 | --hash=sha256:aa39c3c3426e3fd107dcb4bde64ca1e276a65a889d9085a6e4b54ba82420a852 \ 47 | --hash=sha256:e0857375f63e9041e03fedd5b7541f97cf78917ac1b6b06c1fcc9b45375dda69 48 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | atomicwrites==1.4.1 ; python_version >= "3.7" and python_version < "4.0" and sys_platform == "win32" \ 2 | --hash=sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11 3 | attrs==21.4.0 ; python_version >= "3.7" and python_version < "4.0" \ 4 | --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ 5 | --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd 6 | colorama==0.4.6 ; python_version >= "3.7" and python_version < "4.0" and sys_platform == "win32" \ 7 | --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ 8 | --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 9 | coverage==7.2.2 ; python_version >= "3.7" and python_version < "4.0" \ 10 | --hash=sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d \ 11 | --hash=sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4 \ 12 | --hash=sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e \ 13 | --hash=sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab \ 14 | --hash=sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90 \ 15 | --hash=sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6 \ 16 | --hash=sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731 \ 17 | --hash=sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540 \ 18 | --hash=sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2 \ 19 | --hash=sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292 \ 20 | --hash=sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5 \ 21 | --hash=sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b \ 22 | --hash=sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2 \ 23 | --hash=sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0 \ 24 | --hash=sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57 \ 25 | --hash=sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3 \ 26 | --hash=sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140 \ 27 | --hash=sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84 \ 28 | --hash=sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988 \ 29 | --hash=sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67 \ 30 | --hash=sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d \ 31 | --hash=sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2 \ 32 | --hash=sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5 \ 33 | --hash=sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9 \ 34 | --hash=sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8 \ 35 | --hash=sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd \ 36 | --hash=sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6 \ 37 | --hash=sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be \ 38 | --hash=sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88 \ 39 | --hash=sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25 \ 40 | --hash=sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137 \ 41 | --hash=sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968 \ 42 | --hash=sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9 \ 43 | --hash=sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef \ 44 | --hash=sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54 \ 45 | --hash=sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512 \ 46 | --hash=sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005 \ 47 | --hash=sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f \ 48 | --hash=sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149 \ 49 | --hash=sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d \ 50 | --hash=sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8 \ 51 | --hash=sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7 \ 52 | --hash=sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5 \ 53 | --hash=sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016 \ 54 | --hash=sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69 \ 55 | --hash=sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212 \ 56 | --hash=sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc \ 57 | --hash=sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8 \ 58 | --hash=sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d \ 59 | --hash=sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd \ 60 | --hash=sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169 61 | importlib-metadata==6.1.0 ; python_version >= "3.7" and python_version < "3.8" \ 62 | --hash=sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20 \ 63 | --hash=sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09 64 | iniconfig==2.0.0 ; python_version >= "3.7" and python_version < "4.0" \ 65 | --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ 66 | --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 67 | packaging==23.0 ; python_version >= "3.7" and python_version < "4.0" \ 68 | --hash=sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2 \ 69 | --hash=sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97 70 | pluggy==1.0.0 ; python_version >= "3.7" and python_version < "4.0" \ 71 | --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ 72 | --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 73 | py==1.11.0 ; python_version >= "3.7" and python_version < "4.0" \ 74 | --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ 75 | --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 76 | pytest-cov==2.12.1 ; python_version >= "3.7" and python_version < "4.0" \ 77 | --hash=sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a \ 78 | --hash=sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7 79 | pytest==6.2.5 ; python_version >= "3.7" and python_version < "4.0" \ 80 | --hash=sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89 \ 81 | --hash=sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134 82 | toml==0.10.2 ; python_version >= "3.7" and python_version < "4.0" \ 83 | --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ 84 | --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f 85 | typing-extensions==4.5.0 ; python_version >= "3.7" and python_version < "3.8" \ 86 | --hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \ 87 | --hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4 88 | zipp==3.15.0 ; python_version >= "3.7" and python_version < "3.8" \ 89 | --hash=sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b \ 90 | --hash=sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556 91 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | The setup script is the centre of all activity in building, distributing, 5 | and installing modules using the Distutils. It is required for ``pip install``. 6 | See more: https://docs.python.org/2/distutils/setupscript.html 7 | """ 8 | 9 | from __future__ import print_function 10 | import os 11 | from setuptools import setup, find_packages 12 | 13 | # --- import your package --- 14 | import afwf_fts_anything as package 15 | 16 | if __name__ == "__main__": 17 | # --- Automatically generate setup parameters --- 18 | # Your package name 19 | PKG_NAME = package.__name__ 20 | 21 | # Version number, VERY IMPORTANT! 22 | VERSION = package.__version__ 23 | 24 | PACKAGES, INCLUDE_PACKAGE_DATA, PACKAGE_DATA, PY_MODULES = ( 25 | None, 26 | None, 27 | None, 28 | None, 29 | ) 30 | 31 | # It's a directory style package 32 | if os.path.exists(__file__[:-8] + PKG_NAME): 33 | # Include all sub packages in package directory 34 | PACKAGES = [PKG_NAME] + [ 35 | "%s.%s" % (PKG_NAME, i) for i in find_packages(PKG_NAME) 36 | ] 37 | 38 | # Include everything in package directory 39 | INCLUDE_PACKAGE_DATA = True 40 | PACKAGE_DATA = { 41 | "": ["*.*"], 42 | } 43 | 44 | # It's a single script style package 45 | elif os.path.exists(__file__[:-8] + PKG_NAME + ".py"): 46 | PY_MODULES = [ 47 | PKG_NAME, 48 | ] 49 | 50 | def read_requirements_file(path): 51 | """ 52 | Read requirements.txt, ignore comments 53 | """ 54 | requires = list() 55 | f = open(path, "rb") 56 | for line in f.read().decode("utf-8").split("\n"): 57 | line = line.strip() 58 | if "#" in line: 59 | line = line[: line.find("#")].strip() 60 | if line: 61 | requires.append(line) 62 | return requires 63 | 64 | try: 65 | REQUIRES = read_requirements_file("requirements-main.txt") 66 | except: 67 | print("'requirements-main.txt' not found!") 68 | REQUIRES = list() 69 | 70 | setup( 71 | name=PKG_NAME, 72 | version=VERSION, 73 | packages=PACKAGES, 74 | include_package_data=INCLUDE_PACKAGE_DATA, 75 | package_data=PACKAGE_DATA, 76 | py_modules=PY_MODULES, 77 | ) 78 | -------------------------------------------------------------------------------- /tests/.gitignore: -------------------------------------------------------------------------------- 1 | movie-whoosh_index/ 2 | -------------------------------------------------------------------------------- /tests/all.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | if __name__ == "__main__": 4 | import pytest 5 | 6 | pytest.main(["-s", "--tb=native"]) -------------------------------------------------------------------------------- /tests/handlers/.gitignore: -------------------------------------------------------------------------------- 1 | tmp/ 2 | not-exists-data.json.temp.zip 3 | -------------------------------------------------------------------------------- /tests/handlers/all.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | if __name__ == "__main__": 4 | import pytest 5 | 6 | pytest.main(["-s", "--tb=native"]) -------------------------------------------------------------------------------- /tests/handlers/test_handler_fts.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import pytest 4 | 5 | from afwf_fts_anything.exc import GetDataError, BuildIndexError 6 | from afwf_fts_anything.paths import path_setting, path_data, dir_index, dir_icon 7 | from afwf_fts_anything.dataset import Dataset 8 | from afwf_fts_anything.handlers.fts import handler 9 | 10 | 11 | class TestHandler: 12 | def test_build_index(self): 13 | dataset = Dataset( 14 | name="movie", 15 | path_setting=path_setting, 16 | path_data=path_data, 17 | dir_index=dir_index, 18 | dir_icon=dir_icon, 19 | ) 20 | handler.build_index(dataset) 21 | 22 | dataset = Dataset( 23 | name="not-exists", 24 | path_setting=path_setting, 25 | path_data=path_data.change("not-exists-data.json"), 26 | dir_index=dir_index.change(new_basename="not-exists"), 27 | ) 28 | with pytest.raises(GetDataError): 29 | handler.build_index(dataset) 30 | 31 | def test_parse_query(self): 32 | assert handler.parse_query("movie ") == dict(dataset_name="movie", query_str="") 33 | assert handler.parse_query("movie ") == dict( 34 | dataset_name="movie", query_str="" 35 | ) 36 | assert handler.parse_query("movie ?") == dict( 37 | dataset_name="movie", query_str="?" 38 | ) 39 | assert handler.parse_query("movie hello world") == dict( 40 | dataset_name="movie", query_str="hello world" 41 | ) 42 | assert handler.parse_query("movie hello , world") == dict( 43 | dataset_name="movie", query_str="hello world" 44 | ) 45 | 46 | def test_main(self): 47 | sf = handler.main( 48 | dataset_name="movie", 49 | query_str="", 50 | path_setting=path_setting, 51 | path_data=path_data, 52 | ) 53 | assert sf.items[0].title == "Full text search 'movie' dataset" 54 | 55 | sf = handler.main( 56 | dataset_name="movie", 57 | query_str="?", 58 | path_setting=path_setting, 59 | path_data=path_data, 60 | ) 61 | assert sf.items[0].title == "Open 'movie' dataset folder location" 62 | 63 | sf = handler.main( 64 | dataset_name="movie", 65 | query_str="God Father", 66 | path_setting=path_setting, 67 | path_data=path_data, 68 | ) 69 | assert sf.items[0].arg == "2" 70 | 71 | sf = handler.main( 72 | dataset_name="movie", 73 | query_str="this movie doesn't exists, don't ever try it", 74 | path_setting=path_setting, 75 | path_data=path_data, 76 | ) 77 | assert sf.items[0].title.startswith("No result found for query:") 78 | 79 | 80 | if __name__ == "__main__": 81 | from afwf_fts_anything.tests import run_cov_test 82 | 83 | run_cov_test(__file__, "afwf_fts_anything.handlers.fts", preview=False) 84 | -------------------------------------------------------------------------------- /tests/movie-data.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "movie_id": 1, 4 | "title": "The Shawshank Redemption", 5 | "description": "Two imprisoned men bond over a number of years, finding solace and eventual redemption through acts of common decency.", 6 | "genres": "Drama", 7 | "rating": 9.2, 8 | "url": "https://www.imdb.com/title/tt0111161" 9 | }, 10 | { 11 | "movie_id": 2, 12 | "title": "The Godfather", 13 | "description": "The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son.", 14 | "genres": "Crime, Drama", 15 | "rating": 9.2, 16 | "url": "https://www.imdb.com/title/tt0068646" 17 | }, 18 | { 19 | "movie_id": 3, 20 | "title": "The Dark Knight", 21 | "description": "When the menace known as the Joker wreaks havoc and chaos on the people of Gotham, Batman must accept one of the greatest psychological and physical tests of his ability to fight injustice.", 22 | "genres": "Action, Crime, Drama", 23 | "rating": 9.0, 24 | "url": "https://www.imdb.com/title/tt0468569" 25 | }, 26 | { 27 | "movie_id": 4, 28 | "title": "12 Angry Men", 29 | "description": "The jury in a New York City murder trial is frustrated by a single member whose skeptical caution forces them to more carefully consider the evidence before jumping to a hasty verdict.", 30 | "genres": "Crime, Drama", 31 | "rating": 9.0, 32 | "url": "https://www.imdb.com/title/tt0050083" 33 | }, 34 | { 35 | "movie_id": 5, 36 | "title": "Schindler's List", 37 | "description": "In German-occupied Poland during World War II, industrialist Oskar Schindler gradually becomes concerned for his Jewish workforce after witnessing their persecution by the Nazis.", 38 | "genres": "Biography, Drama, History", 39 | "rating": 8.9, 40 | "url": "https://www.imdb.com/title/tt0108052" 41 | }, 42 | { 43 | "movie_id": 6, 44 | "title": "The Lord of the Rings: The Return of the King", 45 | "description": "Gandalf and Aragorn lead the World of Men against Sauron's army to draw his gaze from Frodo and Sam as they approach Mount Doom with the One Ring.", 46 | "genres": "Action, Adventure, Drama", 47 | "rating": 8.9, 48 | "url": "https://www.imdb.com/title/tt0167260" 49 | }, 50 | { 51 | "movie_id": 7, 52 | "title": "Pulp Fiction", 53 | "description": "The lives of two mob hitmen, a boxer, a gangster and his wife, and a pair of diner bandits intertwine in four tales of violence and redemption.", 54 | "genres": "Crime, Drama", 55 | "rating": 8.8, 56 | "url": "https://www.imdb.com/title/tt0110912" 57 | }, 58 | { 59 | "movie_id": 8, 60 | "title": "Fight Club", 61 | "description": "An insomniac office worker and a devil-may-care soap maker form an underground fight club that evolves into much more.", 62 | "genres": "Drama", 63 | "rating": 8.7, 64 | "url": "https://www.imdb.com/title/tt0137523" 65 | }, 66 | { 67 | "movie_id": 9, 68 | "title": "Saving Private Ryan", 69 | "description": "Following the Normandy Landings, a group of U.S. soldiers go behind enemy lines to retrieve a paratrooper whose brothers have been killed in action.", 70 | "genres": "Drama, War", 71 | "rating": 8.6, 72 | "url": "https://www.imdb.com/title/tt0120815" 73 | } 74 | ] -------------------------------------------------------------------------------- /tests/movie-icon/movie-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MacHu-GWU/afwf_fts_anything-project/3e2b7e0d7c21ae75149c107d8c7f704bc93e4353/tests/movie-icon/movie-icon.png -------------------------------------------------------------------------------- /tests/movie-setting.json: -------------------------------------------------------------------------------- 1 | { 2 | // define how you want to search this dataset 3 | "fields": [ 4 | { 5 | "name": "movie_id", 6 | "type_is_store": true 7 | }, 8 | { 9 | "name": "title", 10 | "type_is_store": true, 11 | "type_is_ngram": true, 12 | "ngram_maxsize": 10, 13 | "ngram_minsize": 2, 14 | "weight": 2.0 15 | }, 16 | { 17 | "name": "description", 18 | "type_is_store": true, 19 | "type_is_phrase": true 20 | }, 21 | { 22 | "name": "genres", 23 | "type_is_store": true, 24 | "type_is_keyword": true, 25 | "keyword_lowercase": true, 26 | "weight": 1.5 27 | }, 28 | { 29 | "name": "rating", 30 | "type_is_store": true, 31 | "type_is_numeric": true, 32 | "is_sortable": true, 33 | "is_sort_ascending": false 34 | }, 35 | { 36 | "name": "url", 37 | "type_is_store": true 38 | } 39 | ], 40 | "data_url": "https://github.com/MacHu-GWU/afwf_fts_anything-project/releases/download/1.1.1/movie-data.json.zip", 41 | "title_field": "{title} ({genres}) rate {rating}", // title on Alfred drop down menu 42 | "subtitle_field": "{description}", // subtitle on Alfred drop down menu 43 | "arg_field": "{movie_id}", // argument for other workflow component 44 | "autocomplete_field": "{title}", // tab auto complete behavior 45 | "icon_field": "movie-icon.png" 46 | } -------------------------------------------------------------------------------- /tests/test_dataset.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from afwf_fts_anything.paths import path_setting, path_data, dir_index, dir_icon 4 | from afwf_fts_anything.dataset import Dataset 5 | 6 | 7 | class TestDataset: 8 | def test_property(self): 9 | dataset = Dataset(name="movie") 10 | assert dataset._path_data.basename == "movie-data.json" 11 | assert dataset._path_setting.basename == "movie-setting.json" 12 | assert dataset._dir_index.basename == "movie-whoosh_index" 13 | assert dataset._dir_icon.basename == "movie-icon" 14 | 15 | def test_indexing(self): 16 | dataset = Dataset( 17 | name="movie", 18 | path_setting=path_setting, 19 | path_data=path_data, 20 | dir_index=dir_index, 21 | dir_icon=dir_icon, 22 | ) 23 | dataset.build_index(data=dataset.get_data(), rebuild=True) 24 | 25 | # cache should work 26 | for _ in range(3): 27 | doc_list = dataset.search("god father") 28 | assert doc_list[0]["movie_id"] == 2 29 | 30 | doc_list = dataset.search("drama", limit=3) 31 | assert [doc["movie_id"] for doc in doc_list] == [1, 2, 3] 32 | 33 | 34 | if __name__ == "__main__": 35 | from afwf_fts_anything.tests import run_cov_test 36 | 37 | run_cov_test(__file__, "afwf_fts_anything.dataset", preview=False) 38 | -------------------------------------------------------------------------------- /tests/test_helpers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | from afwf_fts_anything.helpers import is_no_overlap 5 | 6 | 7 | def test_is_no_overlap(): 8 | assert is_no_overlap([[1, 2], [3, 4]]) is True 9 | assert is_no_overlap([[1, 2], [2, 3]]) is False 10 | 11 | 12 | if __name__ == "__main__": 13 | from afwf_fts_anything.tests import run_cov_test 14 | 15 | run_cov_test(__file__, "afwf_fts_anything.helpers", preview=False) 16 | -------------------------------------------------------------------------------- /tests/test_import.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import os 4 | import pytest 5 | import afwf_fts_anything 6 | 7 | 8 | def test_import(): 9 | _ = afwf_fts_anything.wf 10 | 11 | 12 | if __name__ == "__main__": 13 | basename = os.path.basename(__file__) 14 | pytest.main([basename, "-s", "--tb=native"]) 15 | -------------------------------------------------------------------------------- /tests/test_setting.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import pytest 4 | from afwf_fts_anything.exc import MalformedSettingError 5 | from afwf_fts_anything.setting import Field, Setting 6 | 7 | 8 | class TestField: 9 | def test(self): 10 | with pytest.raises(MalformedSettingError): 11 | Field( 12 | name="weight", 13 | type_is_store=False, 14 | is_sortable=True, 15 | ) 16 | 17 | 18 | class TestSetting: 19 | def test(self): 20 | setting = Setting( 21 | fields=[ 22 | Field( 23 | name="movie_id", 24 | type_is_store=True, 25 | ), 26 | Field( 27 | name="title", 28 | type_is_store=True, 29 | type_is_ngram=True, 30 | ), 31 | Field( 32 | name="description", 33 | type_is_store=True, 34 | type_is_phrase=True, 35 | ), 36 | Field( 37 | name="genres", 38 | type_is_store=True, 39 | type_is_keyword=True, 40 | ), 41 | Field( 42 | name="rating", 43 | type_is_store=True, 44 | type_is_numeric=True, 45 | is_sortable=True, 46 | is_sort_ascending=False, 47 | ), 48 | Field( 49 | name="url", 50 | type_is_store=True, 51 | ), 52 | ], 53 | title_field="Movie Title: {title} [{genres}]", 54 | subtitle_field="{description}", 55 | arg_field="{url}", 56 | autocomplete_field="{title}", 57 | ) 58 | schema = setting.create_whoosh_schema() 59 | assert len(setting.store_fields) == 6 60 | assert len(setting.searchable_fields) == 4 61 | assert len(setting.sortable_fields) == 1 62 | 63 | data = { 64 | "title": "The Godfather", 65 | "description": "The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son.", 66 | "genres": "Crime, Drama", 67 | "url": "https://www.imdb.com/title/tt0068646/", 68 | } 69 | assert setting.format_title(data) == "Movie Title: The Godfather [Crime, Drama]" 70 | assert setting.format_subtitle(data) == data["description"] 71 | assert setting.format_arg(data) == data["url"] 72 | assert setting.format_autocomplete(data) == data["title"] 73 | 74 | # from dict 75 | setting = Setting.from_dict( 76 | { 77 | "fields": [ 78 | { 79 | "name": "movie_id", 80 | "type_is_store": True, 81 | }, 82 | { 83 | "name": "title", 84 | "type_is_store": True, 85 | "type_is_ngram": True, 86 | }, 87 | { 88 | "name": "description", 89 | "type_is_store": True, 90 | "type_is_phrase": True, 91 | }, 92 | { 93 | "name": "genres", 94 | "type_is_store": True, 95 | "type_is_keyword": True, 96 | }, 97 | { 98 | "name": "rating", 99 | "type_is_store": True, 100 | "type_is_numeric": True, 101 | "is_sortable": True, 102 | "is_sort_ascending": False, 103 | }, 104 | { 105 | "name": "url", 106 | "type_is_store": True, 107 | }, 108 | ], 109 | "title_field": "Movie Title: {title} [{genres}]", 110 | "subtitle_field": "{description}", 111 | "arg_field": "{url}", 112 | } 113 | ) 114 | assert len(setting.store_fields) == 6 115 | assert len(setting.searchable_fields) == 4 116 | assert len(setting.sortable_fields) == 1 117 | 118 | # you have duplicate field names in your fields: ['field1', 'field1'] 119 | with pytest.raises(MalformedSettingError): 120 | Setting(fields=[Field(name="field1"), Field(name="field1")]) 121 | 122 | # you have to specify one and only one index type for column 'title', valid types are: ngram, phrase, keyword. 123 | with pytest.raises(MalformedSettingError): 124 | Setting( 125 | fields=[ 126 | Field( 127 | name="title", 128 | type_is_keyword=True, 129 | type_is_phrase=True, 130 | ) 131 | ], 132 | ) 133 | 134 | # when title_field is not defined, you have to have a field called 'title' in your data fields, here's your data fields: [] 135 | with pytest.raises(MalformedSettingError): 136 | Setting(fields=[]) 137 | 138 | # the title field is not a stored field! 139 | with pytest.raises(MalformedSettingError): 140 | Setting( 141 | fields=[ 142 | Field( 143 | name="title", 144 | type_is_store=False, 145 | ) 146 | ] 147 | ) 148 | 149 | # your title_field = 'Movie Title: {the_movie_title}, {another_movie_title}' contains a field name 'the_movie_title', but it is not defined in your fields: ['movie_title'] 150 | with pytest.raises(MalformedSettingError): 151 | Setting( 152 | fields=[ 153 | Field( 154 | name="movie_title", 155 | ) 156 | ], 157 | title_field="Movie Title: {the_movie_title}, {another_movie_title}", 158 | ) 159 | 160 | # our title_field = 'Movie Title: {movie_title}' contains a field name 'movie_title', but this field is not stored: Field(name='movie_title', type_is_store=False, type_is_ngram=False, type_is_phrase=False, type_is_keyword=False, ngram_minsize=2, ngram_maxsize=10, keyword_lowercase=True, keyword_commas=True) 161 | with pytest.raises(MalformedSettingError): 162 | Setting( 163 | fields=[ 164 | Field( 165 | name="movie_title", 166 | type_is_store=False, 167 | ) 168 | ], 169 | title_field="Movie Title: {movie_title}", 170 | ) 171 | 172 | 173 | if __name__ == "__main__": 174 | from afwf_fts_anything.tests import run_cov_test 175 | 176 | run_cov_test(__file__, "afwf_fts_anything.setting", preview=False) 177 | --------------------------------------------------------------------------------