├── tests ├── __init__.py ├── test_query.py ├── test_result.py ├── fixtures.py ├── base.py ├── conftest.py ├── test_utils.py ├── test_script.py ├── test_function.py ├── test_pagination.py ├── test_codec.py ├── test_actions.py └── test_types.py ├── tests_integ ├── __init__.py ├── asyncio │ ├── __init__.py │ ├── test_bulk.py │ ├── conftest.py │ ├── test_pagination.py │ ├── test_search.py │ └── test_index.py ├── general │ ├── __init__.py │ ├── conftest.py │ └── test_indexing.py └── conftest.py ├── .python-version ├── elasticmagic ├── ext │ ├── __init__.py │ ├── asyncio │ │ ├── __init__.py │ │ ├── pagination │ │ │ ├── __init__.py │ │ │ └── flask.py │ │ ├── search.py │ │ ├── index.py │ │ └── cluster.py │ ├── queryfilter │ │ ├── __init__.py │ │ └── codec.py │ └── pagination │ │ ├── __init__.py │ │ └── flask.py ├── version.py ├── __init__.py ├── datastructures.py ├── util.py ├── function.py ├── actions.py ├── attribute.py ├── result.py ├── index.py ├── cluster.py └── types.py ├── requirements_lint.txt ├── .coveragerc ├── pytest.ini ├── docs ├── aggregations.rst ├── search_api.rst ├── index.rst ├── quick_start.rst ├── make.bat ├── Makefile └── conf.py ├── .gitignore ├── .github ├── gh-tox-envs.py └── workflows │ └── python.yaml ├── .bumpversion.cfg ├── scripts └── wait_es.sh ├── README.md ├── benchmark ├── README.rst └── run.py ├── pyproject.toml ├── vagga.yaml └── LICENSE /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests_integ/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.python-version: -------------------------------------------------------------------------------- 1 | 3.12 2 | -------------------------------------------------------------------------------- /elasticmagic/ext/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests_integ/asyncio/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests_integ/general/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /elasticmagic/version.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.3.0' 2 | -------------------------------------------------------------------------------- /requirements_lint.txt: -------------------------------------------------------------------------------- 1 | flake8==3.7.9 2 | flake8-print==3.1.4 3 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | include = elasticmagic/* 3 | branch = true 4 | -------------------------------------------------------------------------------- /tests/test_query.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | 4 | class QueryTestCase(unittest.TestCase): 5 | def test(self): 6 | pass 7 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | markers = 3 | get: Elasticsearch get API 4 | multi_get: Elasticsearch multi_get API 5 | explain: Elasticsearch explain API 6 | -------------------------------------------------------------------------------- /docs/aggregations.rst: -------------------------------------------------------------------------------- 1 | .. _aggregations: 2 | 3 | ============ 4 | Aggregations 5 | ============ 6 | 7 | .. automodule:: elasticmagic.agg 8 | :member-order: bysource 9 | :members: 10 | -------------------------------------------------------------------------------- /elasticmagic/ext/asyncio/__init__.py: -------------------------------------------------------------------------------- 1 | from .cluster import AsyncCluster 2 | from .index import AsyncIndex 3 | from .search import AsyncSearchQuery 4 | 5 | __all__ = ['AsyncCluster', 'AsyncIndex', 'AsyncSearchQuery'] 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.py~ 3 | 4 | .python_history 5 | 6 | # Emacs 7 | *~ 8 | \#*# 9 | .ropeproject/ 10 | 11 | #PyCharm 12 | .idea/ 13 | 14 | # Distribution / packaging 15 | build/ 16 | dist/ 17 | *.egg-info/ 18 | .tox/ 19 | .vagga/ 20 | .cache/ 21 | htmlcov 22 | .coverage 23 | docs/_build 24 | -------------------------------------------------------------------------------- /.github/gh-tox-envs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | import tomllib 4 | 5 | 6 | proj = tomllib.load(open("pyproject.toml", "rb")) 7 | print( 8 | " ".join( 9 | f"-e {tox_env}" 10 | for tox_env in proj["tool"]["tox"]["gh"]["python"][sys.argv[1]] 11 | ) 12 | ) 13 | -------------------------------------------------------------------------------- /docs/search_api.rst: -------------------------------------------------------------------------------- 1 | .. _search_api: 2 | 3 | ================ 4 | Search Query API 5 | ================ 6 | 7 | .. automodule:: elasticmagic.search 8 | 9 | .. autoclass:: elasticmagic.search.SearchQuery 10 | :inherited-members: 11 | :members: 12 | :member-order: bysource 13 | 14 | .. autoclass:: elasticmagic.ext.asyncio.search.AsyncSearchQuery 15 | :member-order: bysource 16 | -------------------------------------------------------------------------------- /tests/test_result.py: -------------------------------------------------------------------------------- 1 | from elasticmagic import agg, types 2 | from elasticmagic.result import SearchResult 3 | 4 | 5 | def test_search_result_with_error_and_aggregations(): 6 | raw_result = {'error': True} 7 | res = SearchResult( 8 | raw_result, 9 | aggregations={'types': agg.Terms(field='type', type=types.Integer)} 10 | ) 11 | assert res.aggregations['types'].buckets == [] 12 | -------------------------------------------------------------------------------- /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.1.0 3 | parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?Pa|b|rc)?(?P\d+)? 4 | serialize = 5 | {major}.{minor}.{patch}{stage}{stage_num} 6 | {major}.{minor}.{patch} 7 | commit = False 8 | tag = False 9 | 10 | [bumpversion:part:stage] 11 | values = 12 | a 13 | b 14 | rc 15 | 16 | [bumpversion:file:setup.py] 17 | 18 | [bumpversion:file:elasticmagic/version.py] 19 | -------------------------------------------------------------------------------- /scripts/wait_es.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | host=$1 4 | 5 | echo " * Waiting Elasticsearch cluster: ${host} ..." 6 | while true; do 7 | output=`curl -XGET "${host}/_cat/health?h=status" -s | tr -d '[[:space:]]'` 8 | if [ -z "${output}" ]; then 9 | echo -n "." 10 | else 11 | echo " > Elasticsearch status: ${output}" 12 | fi 13 | if [ "${output}" = "green" -o "${output}" = "yellow" ]; then 14 | break 15 | fi 16 | sleep 1; 17 | done 18 | echo " * Elasticsearch is ready!" 19 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. elasticmagic documentation master file, created by 2 | sphinx-quickstart on Thu Nov 3 13:52:21 2016. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Elasticmagic's documentation! 7 | ======================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | quick_start 15 | search_api 16 | aggregations 17 | 18 | 19 | Indices and tables 20 | ================== 21 | 22 | * :ref:`genindex` 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Test and build](https://github.com/anti-social/elasticmagic/actions/workflows/python.yaml/badge.svg)](https://github.com/anti-social/elasticmagic/actions/workflows/python.yaml) 2 | [![codecov](https://codecov.io/gh/anti-social/elasticmagic/branch/master/graph/badge.svg)](https://codecov.io/gh/anti-social/elasticmagic) 3 | [![docs](https://readthedocs.org/projects/elasticmagic/badge/?version=latest )](https://elasticmagic.readthedocs.io/en/latest/) 4 | 5 | elasticmagic 6 | ============ 7 | 8 | Python DSL for Elasticsearch 9 | 10 | Caution: do not use it, this is a very alpha 11 | -------------------------------------------------------------------------------- /tests/fixtures.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from elasticsearch import Elasticsearch 4 | 5 | from elasticmagic import Cluster, Index 6 | from elasticmagic.compiler import all_compilers 7 | from elasticmagic.compiler import Compiler_7_0 8 | 9 | 10 | @pytest.fixture 11 | def client(): 12 | yield Elasticsearch() 13 | 14 | 15 | @pytest.fixture 16 | def cluster(client): 17 | yield Cluster( 18 | client, autodetect_es_version=False, compiler=Compiler_7_0 19 | ) 20 | 21 | 22 | @pytest.fixture 23 | def index(cluster): 24 | yield Index(cluster, 'test') 25 | 26 | 27 | @pytest.fixture(params=all_compilers) 28 | def compiler(request): 29 | return request.param 30 | -------------------------------------------------------------------------------- /elasticmagic/ext/queryfilter/__init__.py: -------------------------------------------------------------------------------- 1 | from .queryfilter import QueryFilter, FacetFilter, RangeFilter 2 | from .queryfilter import FacetQueryFilter, FacetQueryValue 3 | from .queryfilter import SimpleFilter, SimpleQueryFilter, SimpleQueryValue 4 | from .queryfilter import OrderingFilter, OrderingValue 5 | from .queryfilter import PageFilter 6 | from .queryfilter import NestedFacetFilter, NestedRangeFilter 7 | from .queryfilter import BinaryFilter 8 | 9 | 10 | __all__ = [ 11 | 'BinaryFilter', 12 | 'FacetFilter', 13 | 'FacetQueryFilter', 14 | 'FacetQueryValue', 15 | 'NestedFacetFilter', 16 | 'NestedRangeFilter', 17 | 'OrderingFilter', 18 | 'OrderingValue', 19 | 'PageFilter', 20 | 'QueryFilter', 21 | 'RangeFilter', 22 | 'SimpleFilter', 23 | 'SimpleQueryFilter', 24 | 'SimpleQueryValue', 25 | ] 26 | -------------------------------------------------------------------------------- /tests/base.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest.mock import MagicMock 3 | 4 | from elasticmagic import Cluster, Index 5 | from elasticmagic.compiler import Compiler_6_0 6 | 7 | 8 | class BaseTestCase(unittest.TestCase): 9 | maxDiff = None 10 | 11 | def setUp(self): 12 | self.client = MagicMock() 13 | self.cluster = Cluster(self.client, compiler=Compiler_6_0) 14 | self.index = Index(self.cluster, 'test') 15 | 16 | def assert_expression(self, expr, expected, compiler=None): 17 | compiler = compiler or Compiler_6_0 18 | self.assertEqual(expr.to_dict(compiler=compiler), expected) 19 | 20 | 21 | class OrderTolerantString(object): 22 | 23 | def __init__(self, line, sep): 24 | self.line = line 25 | self.sep = sep 26 | 27 | def __eq__(self, other): 28 | return set(self.line.split(self.sep)) == set(other.split(self.sep)) 29 | -------------------------------------------------------------------------------- /tests_integ/general/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from elasticmagic import Cluster 6 | 7 | from elasticsearch import Elasticsearch 8 | 9 | from ..conftest import Car 10 | 11 | 12 | @pytest.fixture 13 | def es_client(es_url): 14 | es_url = os.environ.get('ES_URL', es_url) 15 | es_client = Elasticsearch([es_url]) 16 | yield es_client 17 | if hasattr(es_client.transport, 'close'): 18 | es_client.transport.close() 19 | 20 | 21 | @pytest.fixture 22 | def es_cluster(es_client): 23 | return Cluster(es_client) 24 | 25 | 26 | @pytest.fixture 27 | def es_index(es_cluster, es_client, index_name): 28 | es_client.indices.create(index=index_name) 29 | es_index = es_cluster[index_name] 30 | es_index.put_mapping(Car) 31 | yield es_index 32 | es_client.indices.delete(index=index_name) 33 | 34 | 35 | @pytest.fixture 36 | def cars(es_index, car_docs): 37 | es_index.add(car_docs, refresh=True) 38 | yield car_docs 39 | -------------------------------------------------------------------------------- /elasticmagic/ext/asyncio/pagination/__init__.py: -------------------------------------------------------------------------------- 1 | from elasticmagic.ext.pagination import BaseSearchQueryWrapper 2 | 3 | 4 | class AsyncSearchQueryWrapper(BaseSearchQueryWrapper): 5 | 6 | async def _getitem_async(self, k): 7 | self._prepare_getitem(k) 8 | self.items = list(await self.sliced_query) 9 | self.count = (await self.sliced_query.get_result()).total 10 | return self.items 11 | 12 | def __getitem__(self, k): 13 | return self._getitem_async(k) 14 | 15 | def __await__(self): 16 | if self.items is None: 17 | raise ValueError('Slice first') 18 | return self.sliced_query.__await__() 19 | 20 | def __len__(self): 21 | if self.count is None: 22 | raise ValueError('Slice first') 23 | return self.count 24 | 25 | async def get_result(self): 26 | if self.sliced_query is None: 27 | raise ValueError('Slice first') 28 | return await self.sliced_query.get_result() 29 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from .fixtures import client, cluster, compiler, index # noqa: F401 2 | 3 | 4 | def assert_expression(expr, expected, compiler): # noqa: F811 5 | assert expr.to_dict(compiler=compiler) == expected 6 | 7 | 8 | def assert_same_elements(seq1, seq2): 9 | it1 = iter(seq1) 10 | it2 = iter(seq2) 11 | i = 0 12 | while True: 13 | try: 14 | e1 = next(it1) 15 | except StopIteration: 16 | try: 17 | e2 = next(it2) 18 | except StopIteration: 19 | break 20 | else: 21 | raise AssertionError( 22 | 'seq2 has more elements than seq1: {!r}'.format(e2) 23 | ) 24 | try: 25 | e2 = next(it2) 26 | except StopIteration: 27 | raise AssertionError( 28 | 'seq1 has more elements than seq2: {!r}'.format(e1) 29 | ) 30 | assert e1 is e2, '{} element is not the same'.format(i) 31 | i += 1 32 | -------------------------------------------------------------------------------- /elasticmagic/ext/asyncio/pagination/flask.py: -------------------------------------------------------------------------------- 1 | from elasticmagic.cluster import MAX_RESULT_WINDOW 2 | 3 | from . import AsyncSearchQueryWrapper 4 | from ...pagination.flask import BasePagination 5 | 6 | 7 | class AsyncPagination(BasePagination): 8 | """Helper class to provide compatibility with Flask-SQLAlchemy paginator. 9 | """ 10 | 11 | @classmethod 12 | async def create( 13 | cls, query, page=1, per_page=10, max_items=MAX_RESULT_WINDOW 14 | ): 15 | self = cls() 16 | self.original_query = query 17 | self.query = AsyncSearchQueryWrapper(query, max_items=max_items) 18 | self.page = page if page > 0 else 1 19 | self.per_page = per_page 20 | self.max_items = max_items 21 | self.offset = (self.page - 1) * self.per_page 22 | self.items = await self.query[self.offset:self.offset + self.per_page] 23 | self.total = (await self.query.get_result()).total 24 | return self 25 | 26 | async def prev(self): 27 | return await self.create( 28 | self.original_query, **self._prev_page_params() 29 | ) 30 | 31 | async def next(self): 32 | return await self.create( 33 | self.original_query, **self._next_page_params() 34 | ) 35 | -------------------------------------------------------------------------------- /tests_integ/general/test_indexing.py: -------------------------------------------------------------------------------- 1 | from elasticmagic import SearchQuery 2 | 3 | from ..conftest import Car 4 | 5 | 6 | def test_adding_documents(es_index): 7 | es_index.add( 8 | [ 9 | Car(_id=1, name='Lightning McQueen'), 10 | Car(_id=2, name='Sally Carerra'), 11 | ] 12 | ) 13 | 14 | doc = es_index.get(1, doc_cls=Car) 15 | assert doc.name == 'Lightning McQueen' 16 | assert doc._id == '1' 17 | assert doc._index == es_index.get_name() 18 | assert doc._score is None 19 | 20 | doc = es_index.get(2, doc_cls=Car) 21 | assert doc.name == 'Sally Carerra' 22 | assert doc._id == '2' 23 | assert doc._index == es_index.get_name() 24 | assert doc._score is None 25 | 26 | 27 | def test_scroll(es_index, cars): 28 | search_res = es_index.search( 29 | SearchQuery(), scroll='1m', 30 | ) 31 | 32 | assert search_res.total == 2 33 | assert len(search_res.hits) == 2 34 | assert search_res.scroll_id is not None 35 | 36 | scroll_res = es_index.scroll(search_res.scroll_id, scroll='1m') 37 | 38 | assert scroll_res.total == 2 39 | assert len(scroll_res.hits) == 0 40 | 41 | clear_scroll_res = es_index.clear_scroll(scroll_res.scroll_id) 42 | 43 | assert clear_scroll_res.succeeded is True 44 | -------------------------------------------------------------------------------- /benchmark/README.rst: -------------------------------------------------------------------------------- 1 | Benchmarking 2 | ============ 3 | 4 | This tool is intended to find bottlenecks in this library. 5 | 6 | Script has two modes -- generate sample data & process sample data; 7 | 8 | Generating sample data 9 | ---------------------- 10 | 11 | To generate data use following command: 12 | 13 | .. code-block:: bash 14 | 15 | python benchmark/run.py sample -o sample.json 16 | 17 | See ``--help`` for available options. 18 | 19 | Usefull option ``-s/--size`` -- sets sample data magnitude, default is 2 (100 docs) 20 | 21 | Processing sample data 22 | ---------------------- 23 | 24 | Run: 25 | 26 | .. code-block:: bash 27 | 28 | python benchmark/run.py run simple -i sample.json 29 | 30 | 31 | Some results 32 | ------------ 33 | 34 | ``searchResult`` instantiation time, ms: 35 | 36 | +---------+----------+-----------+-----------+ 37 | | | -s 2 | -s 3 | -s 4 | 38 | +---------+----------+-----------+-----------+ 39 | | -t hits | 2.862 | 22.314 | 224.592 | 40 | +---------+----------+-----------+-----------+ 41 | | -t aggs | 0.934 | 4.032 | 33.442 | 42 | +---------+----------+-----------+-----------+ 43 | | -t all | 3.287 | 26.199 | 253.697 | 44 | +---------+----------+-----------+-----------+ 45 | 46 | Command run: 47 | 48 | .. code-block:: bash 49 | 50 | $ python benchmark/run.py sample -s S -t T | python benchmark/run.py run simple 51 | -------------------------------------------------------------------------------- /tests_integ/conftest.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | import pytest 4 | 5 | from elasticmagic import Document, Field 6 | from elasticmagic.types import Text 7 | 8 | 9 | def pytest_addoption(parser): 10 | parser.addoption("--es-url", action="store", default="localhost:9200") 11 | 12 | 13 | def pytest_generate_tests(metafunc): 14 | # This is called for every test. Only get/set command line arguments 15 | # if the argument is specified in the list of test "fixturenames". 16 | option_value = metafunc.config.option.es_url 17 | if 'es_url' in metafunc.fixturenames: 18 | metafunc.parametrize("es_url", [option_value]) 19 | 20 | 21 | class Car(Document): 22 | __doc_type__ = 'car' 23 | 24 | name = Field(Text()) 25 | 26 | 27 | @pytest.fixture 28 | def index_name(): 29 | return 'test-{}'.format(str(uuid.uuid4()).split('-')[0]) 30 | 31 | 32 | @pytest.fixture 33 | def car_docs(): 34 | yield [ 35 | Car(_id=1, name='Lightning McQueen'), 36 | Car(_id=2, name='Sally Carerra'), 37 | ] 38 | 39 | 40 | @pytest.fixture 41 | def all_car_docs(): 42 | yield [ 43 | Car(_id=1, name='Lightning McQueen'), 44 | Car(_id=2, name='Sally Carerra'), 45 | Car(_id=3, name='Doc Hudson'), 46 | Car(_id=4, name='Ramone'), 47 | Car(_id=5, name='Luigi'), 48 | Car(_id=6, name='Guido'), 49 | Car(_id=7, name='Flo'), 50 | Car(_id=8, name='Sarge'), 51 | Car(_id=9, name='Sheriff'), 52 | Car(_id=10, name='Fillmore'), 53 | Car(_id=11, name='Mack'), 54 | ] 55 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from .base import BaseTestCase 2 | 3 | from elasticmagic.util import merge_params 4 | from elasticmagic.expression import Params 5 | 6 | 7 | class UtilsTest(BaseTestCase): 8 | 9 | def test_merge_params(self): 10 | original = Params() 11 | self.assertEqual(tuple(original.items()), ()) 12 | 13 | p = merge_params(original, (), {}) 14 | self.assertEqual(original, p) 15 | self.assertIsNot(original, p) 16 | 17 | p = merge_params(original, ({'key': 'value'},), {}) 18 | self.assertEqual(tuple(p.items()), (('key', 'value'),)) 19 | self.assertNotEqual(original, p) 20 | self.assertIsNot(original, p) 21 | 22 | p = merge_params(original, (), dict(key='value')) 23 | self.assertEqual(tuple(p.items()), (('key', 'value'),)) 24 | self.assertNotEqual(original, p) 25 | self.assertIsNot(original, p) 26 | 27 | p = merge_params(p, ({'key': 'new value'}, ), {'foo': 'bar'}) 28 | self.assertEqual(sorted(p.items()), 29 | [('foo', 'bar'), ('key', 'new value')]) 30 | 31 | self.assertRaises(AssertionError, 32 | lambda: merge_params(original, None, {})) 33 | self.assertRaises(AssertionError, 34 | lambda: merge_params(original, object(), {})) 35 | self.assertRaises(AssertionError, 36 | lambda: merge_params(original, (), None)) 37 | self.assertRaises(AssertionError, 38 | lambda: merge_params(original, (), [])) 39 | -------------------------------------------------------------------------------- /elasticmagic/__init__.py: -------------------------------------------------------------------------------- 1 | from .cluster import Cluster 2 | from .compiler import MultiSearchError 3 | from .document import Document, DynamicDocument 4 | from .expression import ( 5 | Params, Term, Terms, Exists, Missing, Range, 6 | Match, MatchPhrase, MatchPhrasePrefix, MultiMatch, MatchAll, 7 | Bool, Query, DisMax, Ids, Prefix, Limit, 8 | Sort, Boosting, Common, ConstantScore, FunctionScore, 9 | Field, SpanFirst, SpanMulti, SpanNear, SpanNot, SpanOr, SpanTerm, 10 | Nested, HasParent, HasChild, QueryRescorer, Script, SortScript 11 | ) 12 | from .index import Index 13 | from .search import SearchQuery 14 | from .types import ValidationError 15 | from .version import __version__ 16 | from .function import ( 17 | Weight, Factor, ScriptScore, RandomScore, Gauss, Exp, Linear, 18 | FieldValueFactor, 19 | ) 20 | 21 | 22 | __all__ = [ 23 | 'Cluster', 'MultiSearchError', 24 | 25 | 'Document', 'DynamicDocument', 26 | 27 | 'Params', 'Term', 'Terms', 'Exists', 'Missing', 28 | 'Match', 'MatchPhrase', 'MatchPhrasePrefix', 'MultiMatch', 'MatchAll', 29 | 'Range', 'Bool', 'Query', 'DisMax', 'Ids', 30 | 'Prefix', 'Limit', 'Sort', 'Boosting', 'Common', 31 | 'ConstantScore', 'FunctionScore', 'Field', 32 | 'SpanFirst', 'SpanMulti', 'SpanNear', 'SpanNot', 'SpanOr', 'SpanTerm', 33 | 'Nested', 'HasParent', 'HasChild', 'QueryRescorer', 'SortScript', 34 | 35 | 'Index', 36 | 37 | 'SearchQuery', 38 | 39 | 'ValidationError', 40 | 41 | '__version__', 42 | 43 | 'Weight', 'Factor', 'ScriptScore', 'RandomScore', 'Gauss', 'Exp', 'Linear', 44 | 'Script', 'FieldValueFactor', 45 | ] 46 | -------------------------------------------------------------------------------- /elasticmagic/datastructures.py: -------------------------------------------------------------------------------- 1 | import fnmatch 2 | 3 | 4 | class OrderedAttributes(object): 5 | __visit_name__ = 'ordered_attributes' 6 | 7 | def __init__(self, data=None, defaults=None): 8 | self._dict = {} 9 | self._keys = [] 10 | self._defaults = defaults or {} 11 | 12 | if data: 13 | for k, v in data: 14 | self[k] = v 15 | 16 | def _get_default(self, key): 17 | for template, default in self._defaults.items(): 18 | if fnmatch.fnmatch(key, template): 19 | return default 20 | 21 | def __setitem__(self, key, value): 22 | self._dict[key] = value 23 | self._keys.append(key) 24 | 25 | def __getitem__(self, key): 26 | if key not in self._dict: 27 | default = self._get_default(key) 28 | if default: 29 | return default(key) 30 | return self._dict[key] 31 | 32 | def __getattr__(self, key): 33 | try: 34 | return self[key] 35 | except KeyError: 36 | raise AttributeError("Has no field '%s'" % key) 37 | 38 | def __contains__(self, key): 39 | return key in self._dict 40 | 41 | def get(self, key, default=None): 42 | try: 43 | return self[key] 44 | except KeyError: 45 | return default 46 | 47 | def keys(self): 48 | return iter(self._keys) 49 | 50 | def values(self): 51 | return (self._dict[k] for k in self._keys) 52 | 53 | def items(self): 54 | return ((k, self._dict[k]) for k in self._keys) 55 | 56 | def __iter__(self): 57 | return self.values() 58 | 59 | def __len__(self): 60 | return len(self._dict) 61 | -------------------------------------------------------------------------------- /tests/test_script.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from .base import BaseTestCase 4 | 5 | 6 | @unittest.skip 7 | class ScriptTest(BaseTestCase): 8 | 9 | def test(self): 10 | self.assert_expression( 11 | script.doc[ShopDocument.type].value, 12 | "doc['type'].value" 13 | ) 14 | self.assert_expression( 15 | script.doc[ShopDocument.location].distance(30.5, 51.2), 16 | "doc['location'].distance(30.5, 51.2)" 17 | ) 18 | self.assert_expression( 19 | script.doc[ShopDocument.rank] * 100 + 20, 20 | "doc['rank'] * 100 + 20" 21 | ) 22 | self.assert_expression( 23 | script._fields[ShopDocument.holydays].values, 24 | "_fields['holidays'].values" 25 | ) 26 | self.assert_expression( 27 | script._source.employee.person.name, 28 | "_source.employee.person.name" 29 | ) 30 | self.assert_expression( 31 | script.sin(0), 32 | "sin(0)" 33 | ) 34 | self.assert_expression( 35 | script.sin(script.toRadians(30)), 36 | "sin(toRadians(30))" 37 | ) 38 | self.assert_expression( 39 | script.log(script._score * 2) + 8, 40 | "log(_score * 2) + 8" 41 | ) 42 | self.assert_expression( 43 | script._index.num_docs(), 44 | "_index.numDocs()" 45 | ) 46 | self.assert_expression( 47 | script._index[ShopDocument.region].docCount(), 48 | "_index['region'].docCount()" 49 | ) 50 | self.assert_expression( 51 | script._index[ShopDocument.region][19].tf(), 52 | "_index['region'][19].tf()" 53 | ) 54 | -------------------------------------------------------------------------------- /elasticmagic/util.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterable, Mapping 2 | from functools import wraps 3 | from itertools import chain 4 | 5 | 6 | def _with_clone(fn): 7 | @wraps(fn) 8 | def wrapper(self, *args, **kwargs): 9 | clone = self.clone() 10 | res = fn(clone, *args, **kwargs) 11 | if res is not None: 12 | return res 13 | return clone 14 | return wrapper 15 | 16 | 17 | class cached_property(object): 18 | def __init__(self, func): 19 | self.func = func 20 | 21 | def __get__(self, instance, type=None): 22 | if instance is None: 23 | return self 24 | 25 | res = instance.__dict__[self.func.__name__] = self.func(instance) 26 | return res 27 | 28 | 29 | def to_camel_case(s): 30 | return u''.join(map(lambda w: w.capitalize(), s.split('_'))) 31 | 32 | 33 | def clean_params(params, **kwargs): 34 | return { 35 | p: v for p, v in chain(params.items(), kwargs.items()) 36 | if v is not None 37 | } 38 | 39 | 40 | def collect_doc_classes(expr): 41 | if hasattr(expr, '_collect_doc_classes'): 42 | return expr._collect_doc_classes() 43 | 44 | if isinstance(expr, dict): 45 | return set().union( 46 | *[collect_doc_classes(e) 47 | for e in chain(expr.keys(), expr.values())] 48 | ) 49 | 50 | if isinstance(expr, (list, tuple)): 51 | return set().union(*[collect_doc_classes(e) for e in expr]) 52 | 53 | return set() 54 | 55 | 56 | def maybe_float(value): 57 | if value is None: 58 | return None 59 | return float(value) 60 | 61 | 62 | def merge_params(params, args, kwargs): 63 | assert isinstance(args, Iterable), args 64 | assert isinstance(kwargs, Mapping), kwargs 65 | new = dict() 66 | for a in args: 67 | new.update(a) 68 | new.update(kwargs) 69 | return type(params)(params, **new) 70 | -------------------------------------------------------------------------------- /tests_integ/asyncio/test_bulk.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from elasticmagic.actions import Create 4 | from elasticmagic.actions import Delete 5 | from elasticmagic.actions import Index 6 | from elasticmagic.actions import Update 7 | 8 | from .conftest import Car 9 | 10 | 11 | @pytest.mark.asyncio 12 | async def test_doc_params(es_index): 13 | res = await es_index.bulk( 14 | [ 15 | Index(Car(_id=1, _routing=2, name='Doc Hudson')) 16 | ], 17 | refresh=True 18 | ) 19 | assert not res.errors 20 | assert res.items[0].status == 201 21 | 22 | doc = await es_index.get(1, doc_cls=Car, routing=2) 23 | assert doc._id == '1' 24 | assert doc._routing == '2' 25 | assert doc._version == 1 26 | assert doc.name == 'Doc Hudson' 27 | 28 | await es_index.bulk( 29 | [ 30 | Delete(Car(_id=1, _routing=2)) 31 | ], 32 | refresh=True, 33 | ) 34 | assert (await es_index.count()).count == 0 35 | 36 | 37 | @pytest.mark.asyncio 38 | async def test_bulk_update_params(es_index): 39 | res = await es_index.bulk( 40 | [ 41 | Update( 42 | Car(_id=1, name='Doc Hudson'), 43 | doc_as_upsert=True, 44 | retry_on_conflict=2, 45 | ) 46 | ], 47 | refresh=True 48 | ) 49 | assert not res.errors 50 | assert res.items[0].status == 201 51 | 52 | doc = await es_index.get(1, doc_cls=Car) 53 | assert doc._id == '1' 54 | assert doc._routing is None 55 | assert doc._version == 1 56 | assert doc.name == 'Doc Hudson' 57 | 58 | 59 | @pytest.mark.asyncio 60 | async def test_bulk_create_params(es_index): 61 | res = await es_index.bulk( 62 | [ 63 | Create(Car(_id=1, _routing=2, name='Doc Hudson')) 64 | ], 65 | refresh=True 66 | ) 67 | assert not res.errors 68 | assert res.items[0].status == 201 69 | 70 | doc = await es_index.get(Car(_id=1, _routing=2)) 71 | assert doc._id == '1' 72 | assert doc._routing == '2' 73 | assert doc._version == 1 74 | assert doc.name == 'Doc Hudson' 75 | -------------------------------------------------------------------------------- /elasticmagic/function.py: -------------------------------------------------------------------------------- 1 | 2 | from .expression import Params, ParamsExpression 3 | 4 | 5 | class Function(ParamsExpression): 6 | __visit_name__ = 'function' 7 | 8 | def __init__(self, filter=None, weight=None, **kwargs): 9 | self.filter = filter 10 | self.weight = weight 11 | super(Function, self).__init__(**kwargs) 12 | 13 | 14 | class Weight(Function): 15 | __func_name__ = 'weight' 16 | __visit_name__ = 'weight_function' 17 | 18 | def __init__(self, weight, filter=None): 19 | super(Weight, self).__init__(filter=filter, weight=weight) 20 | 21 | 22 | class FieldValueFactor(Function): 23 | __func_name__ = 'field_value_factor' 24 | 25 | def __init__( 26 | self, field, factor=None, modifier=None, missing=None, 27 | filter=None, **kwargs 28 | ): 29 | super(FieldValueFactor, self).__init__( 30 | field=field, factor=factor, modifier=modifier, missing=missing, 31 | filter=filter, **kwargs 32 | ) 33 | 34 | 35 | Factor = FieldValueFactor 36 | 37 | 38 | class ScriptScore(Function): 39 | __func_name__ = 'script_score' 40 | 41 | def __init__(self, script, filter=None, **kwargs): 42 | super(ScriptScore, self).__init__( 43 | script=script, filter=filter, **kwargs 44 | ) 45 | 46 | 47 | class RandomScore(Function): 48 | __func_name__ = 'random_score' 49 | 50 | def __init__(self, seed=None, filter=None, **kwargs): 51 | super(RandomScore, self).__init__(seed=seed, filter=filter, **kwargs) 52 | 53 | 54 | class DecayFunction(Function): 55 | __visit_name__ = 'decay_function' 56 | 57 | def __init__( 58 | self, field, origin, scale, offset=None, decay=None, 59 | multi_value_mode=None, **kwargs 60 | ): 61 | self.field = field 62 | self.decay_params = Params( 63 | origin=origin, scale=scale, offset=offset, decay=decay, 64 | ) 65 | super(DecayFunction, self).__init__( 66 | multi_value_mode=multi_value_mode, **kwargs 67 | ) 68 | 69 | 70 | class Gauss(DecayFunction): 71 | __func_name__ = 'gauss' 72 | 73 | 74 | class Exp(DecayFunction): 75 | __func_name__ = 'exp' 76 | 77 | 78 | class Linear(DecayFunction): 79 | __func_name__ = 'linear' 80 | -------------------------------------------------------------------------------- /tests_integ/asyncio/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import gc 3 | import os 4 | import warnings 5 | 6 | from elasticsearch import AsyncElasticsearch 7 | 8 | import pytest 9 | import pytest_asyncio 10 | 11 | from elasticmagic.ext.asyncio.cluster import AsyncCluster 12 | 13 | from ..conftest import Car 14 | 15 | 16 | @pytest.fixture 17 | def event_loop(request): 18 | """Create an instance of the default event loop for each test case. 19 | Also catches all warnings and raises exception if there was 20 | 'coroutine was never awaited' wargning. 21 | """ 22 | loop = asyncio.get_event_loop_policy().new_event_loop() 23 | 24 | with warnings.catch_warnings(record=True) as catched_warnings: 25 | yield loop 26 | # Collecting garbage should trigger warning for non-awaited coroutines 27 | gc.collect() 28 | 29 | for w in catched_warnings: 30 | if ( 31 | isinstance(w.message, RuntimeWarning) and 32 | str(w.message).endswith('was never awaited') 33 | ): 34 | raise w.message 35 | else: 36 | warnings.showwarning(w.message, w.category, w.filename, w.lineno) 37 | 38 | loop.close() 39 | 40 | 41 | @pytest_asyncio.fixture 42 | async def es_client(event_loop, es_url): 43 | es_url = os.environ.get('ES_URL', es_url) 44 | es_client = AsyncElasticsearch([es_url], event_loop=event_loop) 45 | yield es_client 46 | await es_client.transport.close() 47 | 48 | 49 | @pytest_asyncio.fixture 50 | def es_cluster(es_client): 51 | yield AsyncCluster(es_client) 52 | 53 | 54 | @pytest_asyncio.fixture 55 | async def es_index(es_cluster, es_client, index_name): 56 | await es_cluster.create_index( 57 | index_name, 58 | settings={ 59 | 'index': { 60 | 'number_of_replicas': 0, 61 | } 62 | }, 63 | ) 64 | es_index = es_cluster[index_name] 65 | await es_index.put_mapping(Car) 66 | yield es_index 67 | await es_client.indices.delete(index=index_name) 68 | 69 | 70 | @pytest_asyncio.fixture 71 | async def cars(es_index, car_docs): 72 | await es_index.add(car_docs, refresh=True) 73 | yield car_docs 74 | 75 | 76 | @pytest_asyncio.fixture 77 | async def all_cars(es_index, all_car_docs): 78 | await es_index.add(all_car_docs, refresh=True) 79 | yield all_car_docs 80 | -------------------------------------------------------------------------------- /elasticmagic/ext/pagination/__init__.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | from ...cluster import MAX_RESULT_WINDOW 4 | 5 | 6 | class BaseSearchQueryWrapper(object): 7 | """Elasticsearch also returns total hits count with search response. 8 | Thus we can get documents and total hits making single request. 9 | """ 10 | def __init__(self, query, max_items=MAX_RESULT_WINDOW): 11 | self.query = query 12 | self.max_items = max_items 13 | self.sliced_query = None 14 | self.items = None 15 | self.count = None 16 | 17 | def _prepare_getitem(self, k): 18 | if not isinstance(k, slice): 19 | raise ValueError('__getitem__ without slicing is not supported') 20 | if k.start is not None: 21 | start = min(k.start, self.max_items) 22 | else: 23 | start = None 24 | if k.stop is not None: 25 | stop = min(k.stop, self.max_items) 26 | else: 27 | stop = None 28 | self.sliced_query = self.query.slice(start, stop) 29 | 30 | 31 | class SearchQueryWrapper(BaseSearchQueryWrapper): 32 | def __getitem__(self, k): 33 | self._prepare_getitem(k) 34 | self.items = list(self.sliced_query) 35 | self.count = self.sliced_query.get_result().total 36 | return self.items 37 | 38 | def __iter__(self): 39 | if self.items is None: 40 | raise ValueError('Slice first') 41 | return iter(self.items) 42 | 43 | def __len__(self): 44 | if self.count is None: 45 | raise ValueError('Slice first') 46 | return self.count 47 | 48 | def get_result(self): 49 | if self.sliced_query is None: 50 | raise ValueError('Slice first') 51 | return self.sliced_query.get_result() 52 | 53 | @property 54 | def result(self): 55 | """Deprecated!!! 56 | """ 57 | warnings.warn( 58 | 'Field `result` is deprecated, use `get_result` method instead', 59 | DeprecationWarning 60 | ) 61 | return self.get_result() 62 | 63 | @property 64 | def results(self): 65 | """Deprecated!!! 66 | """ 67 | warnings.warn( 68 | 'Field `results` is deprecated, use `get_result` method instead', 69 | DeprecationWarning 70 | ) 71 | return self.get_result() 72 | -------------------------------------------------------------------------------- /elasticmagic/ext/asyncio/search.py: -------------------------------------------------------------------------------- 1 | from ...search import BaseSearchQuery 2 | 3 | 4 | class AsyncSearchQuery(BaseSearchQuery): 5 | """Asynchronous version of the :class:`.SearchQuery` 6 | """ 7 | 8 | async def to_dict(self, compiler=None): 9 | compiler = compiler or await self.get_compiler() 10 | return compiler.compiled_query(self).body 11 | 12 | async def get_compiler(self): 13 | return await self._index_or_cluster.get_compiler() 14 | 15 | async def get_query_compiler(self): 16 | return (await self.get_compiler()).compiled_query 17 | 18 | async def get_result(self): 19 | if self._cached_result is not None: 20 | return self._cached_result 21 | 22 | self._cached_result = await self._index_or_cluster.search(self) 23 | return self._cached_result 24 | 25 | async def count(self): 26 | return ( 27 | await self._index_or_cluster.count(self) 28 | ).count 29 | 30 | async def exists(self): 31 | return ( 32 | await self._index_or_cluster.exists(self) 33 | ).exists 34 | 35 | async def explain(self, doc, **kwargs): 36 | return await self._index_or_cluster.explain(self, doc, **kwargs) 37 | 38 | async def delete( 39 | self, conflicts=None, refresh=None, timeout=None, 40 | scroll=None, scroll_size=None, 41 | wait_for_completion=None, requests_per_second=None, 42 | **kwargs 43 | ): 44 | return await self._index_or_cluster.delete_by_query( 45 | self, 46 | conflicts=conflicts, 47 | refresh=refresh, 48 | timeout=timeout, 49 | scroll=scroll, 50 | scroll_size=scroll_size, 51 | wait_for_completion=wait_for_completion, 52 | requests_per_second=requests_per_second, 53 | **kwargs 54 | ) 55 | 56 | async def _iter_result_async(self): 57 | return self._iter_result(await self.get_result()) 58 | 59 | def __await__(self): 60 | return self._iter_result_async().__await__() 61 | 62 | async def _getitem_async(self, k): 63 | clone, is_slice = self._prepare_slice(k) 64 | if is_slice: 65 | return list(await clone) 66 | else: 67 | return list(await clone)[0] 68 | 69 | def __getitem__(self, k): 70 | return self._getitem_async(k) 71 | -------------------------------------------------------------------------------- /tests/test_function.py: -------------------------------------------------------------------------------- 1 | from elasticmagic import ( 2 | Weight, Factor, ScriptScore, RandomScore, Gauss, Exp, Linear, Script, 3 | DynamicDocument, Bool, 4 | ) 5 | from elasticmagic.compiler import Compiler_7_0 6 | 7 | 8 | PostDocument = DynamicDocument 9 | 10 | 11 | def test_weight(): 12 | assert Weight(3).to_elastic(Compiler_7_0) == {"weight": 3} 13 | assert Weight(2, filter=Bool.must( 14 | PostDocument.status.in_([0, 1]), 15 | PostDocument.created_date >= 'now/d-7d') 16 | ).to_elastic(Compiler_7_0) == { 17 | "weight": 2, 18 | "filter": { 19 | "bool": { 20 | "must": [ 21 | {"terms": {"status": [0, 1]}}, 22 | {"range": {"created_date": {"gte": "now/d-7d"}}} 23 | ] 24 | } 25 | } 26 | } 27 | 28 | 29 | def test_factor(): 30 | assert Factor(PostDocument.popularity).to_elastic(Compiler_7_0) == { 31 | "field_value_factor": { 32 | "field": "popularity" 33 | } 34 | } 35 | 36 | 37 | def test_script_score(): 38 | assert ScriptScore( 39 | Script(lang='painless', 40 | inline='_score * doc[params.field].value', 41 | params={'field': PostDocument.popularity}) 42 | ).to_elastic(Compiler_7_0) == { 43 | "script_score": { 44 | "script": { 45 | "lang": "painless", 46 | "source": "_score * doc[params.field].value", 47 | "params": {"field": "popularity"} 48 | } 49 | } 50 | } 51 | 52 | 53 | def test_random_score(): 54 | assert RandomScore(17).to_elastic(Compiler_7_0) == {"random_score": {"seed": 17}} 55 | 56 | 57 | def test_gauss(): 58 | assert Gauss(PostDocument.created_date, origin='now', scale='1h').to_elastic(Compiler_7_0) == { 59 | "gauss": { 60 | "created_date": {"origin": "now", "scale": "1h"} 61 | } 62 | } 63 | 64 | 65 | def test_exp(): 66 | assert Exp(PostDocument.popularity, origin=0, scale=20).to_elastic(Compiler_7_0) == { 67 | "exp": { 68 | "popularity": {"origin": 0, "scale": 20} 69 | } 70 | } 71 | 72 | 73 | def test_linear(): 74 | assert Linear( 75 | PostDocument.places, origin='11,12', scale='2km', multi_value_mode='avg' 76 | ).to_elastic(Compiler_7_0) == { 77 | "linear": { 78 | "places": {"origin": "11,12", "scale": "2km"}, 79 | "multi_value_mode": "avg" 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "elasticmagic" 3 | description = "Add your description here" 4 | authors = [ 5 | { name = "Alexander Koval", email = "kovalidis@gmail.com" } 6 | ] 7 | dependencies = [ 8 | "python-dateutil>=2.8.2", 9 | "elasticsearch<7.14", 10 | ] 11 | readme = "README.md" 12 | requires-python = ">= 3.8" 13 | version = "0.3.0" 14 | 15 | [project.optional-dependencies] 16 | async = [ 17 | "elasticsearch[async]<7.14", 18 | ] 19 | 20 | [dependency-groups] 21 | dev = [ 22 | "coverage>=7.6.1", 23 | "flake8>=5.0.4", 24 | "flake8-pyproject>=1.2.3", 25 | "imagesize>=1.4.1", 26 | "pytest>=8.3.5", 27 | "pytest-asyncio>=0.24.0", 28 | "pytest-cov>=5.0.0", 29 | "sphinx>=7.1.2", 30 | "sphinx-rtd-theme>=3.0.2", 31 | ] 32 | 33 | [build-system] 34 | requires = ["hatchling"] 35 | build-backend = "hatchling.build" 36 | 37 | [tool.uv] 38 | managed = true 39 | 40 | [tool.hatch.metadata] 41 | allow-direct-references = true 42 | 43 | [tool.hatch.build.targets.wheel] 44 | packages = [ 45 | "elasticmagic", 46 | ] 47 | 48 | [tool.setuptools_scm] 49 | version_file = "elasticmagic/version.py" 50 | 51 | [tool.pytest.ini_options] 52 | addopts = "--cov-report=term --cov-report=html:htmlcov" 53 | filterwarnings = """ 54 | ignore::DeprecationWarning:elasticsearch_async.connection 55 | ignore:The loop argument is deprecated since Python 3.8:DeprecationWarning: 56 | """ 57 | 58 | [tool.coverage.run] 59 | branch = true 60 | source = "elasticmagic,tests" 61 | 62 | [tool.flake8] 63 | ignore = [ 64 | # comparison to None should be 'if cond is not None:' 65 | "E711", 66 | # line break before binary operator 67 | "W503", 68 | # line break after binary operator 69 | "W504", 70 | ] 71 | 72 | ### Tox configuration ### 73 | 74 | [tool.tox] 75 | env_list = ["py3.11", "py3.12", "py3.13", "pypy3.11", "flake8"] 76 | 77 | [tool.tox.env_run_base] 78 | runner = "uv-venv-lock-runner" 79 | commands = [ 80 | ["pytest", "tests", { replace = "posargs", extend = true }] 81 | ] 82 | 83 | [tool.tox.env.flake8] 84 | commands = [ 85 | ["flake8", "elasticmagic", "tests_integ", "tests/conftest.py", "tests/fixtures.py", "tests/test_document.py"] 86 | ] 87 | 88 | [tool.tox.env.doctest] 89 | set_env = { LC_ALL = "C" } 90 | commands = [ 91 | ["sphinx-build", "-b", "doctest", "docs", "docs/_build/doctest"] 92 | ] 93 | 94 | [tool.tox.env.integ] 95 | commands = [ 96 | ["pytest", "tests_integ/general", { replace = "posargs", extend = true }] 97 | ] 98 | 99 | [tool.tox.env.integ-async] 100 | extras = ["async"] 101 | commands = [ 102 | ["pytest", "tests_integ/asyncio", { replace = "posargs", extend = true }] 103 | ] 104 | 105 | [tool.tox.gh.python] 106 | "3.11" = ["py3.11"] 107 | "3.12" = ["py3.12"] 108 | "3.13" = ["py3.13"] 109 | "pypy3.11" = ["pypy3.11"] 110 | -------------------------------------------------------------------------------- /elasticmagic/actions.py: -------------------------------------------------------------------------------- 1 | from .util import clean_params 2 | 3 | 4 | class Action(object): 5 | __visit_name__ = 'action' 6 | __action_name__ = None 7 | 8 | def __init__(self, doc, index=None, doc_type=None, 9 | routing=None, parent=None, timestamp=None, ttl=None, 10 | version=None, version_type=None, **kwargs): 11 | from .index import Index as ESIndex 12 | index = index.get_name() if isinstance(index, ESIndex) else index 13 | 14 | self.doc = doc 15 | self.meta_params = clean_params({ 16 | '_index': index, 17 | '_type': doc_type, 18 | 'routing': routing, 19 | 'parent': parent, 20 | 'timestamp': timestamp, 21 | 'ttl': ttl, 22 | 'version': version, 23 | 'version_type': version_type, 24 | }, **kwargs) 25 | self.source_params = {} 26 | 27 | def to_meta(self, compiler): 28 | meta_compiler = compiler.compiled_bulk.compiled_meta 29 | return meta_compiler(self).body 30 | 31 | def to_source(self, compiler): 32 | source_compiler = compiler.compiled_bulk.compiled_source 33 | return source_compiler(self).body 34 | 35 | 36 | class Index(Action): 37 | __action_name__ = 'index' 38 | 39 | 40 | class Delete(Action): 41 | __action_name__ = 'delete' 42 | 43 | def get_source(self): 44 | pass 45 | 46 | 47 | class Create(Action): 48 | __action_name__ = 'create' 49 | 50 | 51 | class Update(Action): 52 | __action_name__ = 'update' 53 | 54 | def __init__(self, doc=None, script=None, script_id=None, 55 | index=None, doc_type=None, 56 | consistency=None, refresh=None, 57 | routing=None, parent=None, 58 | timestamp=None, ttl=None, 59 | version=None, version_type=None, 60 | detect_noop=None, retry_on_conflict=None, 61 | upsert=None, doc_as_upsert=None, 62 | scripted_upsert=None, params=None, 63 | **kwargs): 64 | super(Update, self).__init__( 65 | doc or {}, 66 | index=index, 67 | doc_type=doc_type, 68 | consistency=consistency, 69 | refresh=refresh, 70 | routing=routing, 71 | parent=parent, 72 | timestamp=timestamp, 73 | ttl=ttl, 74 | version=version, 75 | version_type=version_type, 76 | retry_on_conflict=retry_on_conflict, 77 | **kwargs 78 | ) 79 | self.source_params = clean_params({ 80 | 'detect_noop': detect_noop, 81 | 'upsert': upsert, 82 | 'doc_as_upsert': doc_as_upsert, 83 | 'scripted_upsert': scripted_upsert, 84 | 'params': params, 85 | 'script': script, 86 | 'script_id': script_id, 87 | }) 88 | -------------------------------------------------------------------------------- /tests/test_pagination.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock 2 | 3 | from elasticmagic.ext.pagination import SearchQueryWrapper 4 | from elasticmagic.ext.pagination.flask import Pagination 5 | 6 | from .base import BaseTestCase 7 | 8 | 9 | class FlaskPaginationTest(BaseTestCase): 10 | def test_pagination(self): 11 | self.client.search = Mock( 12 | return_value={ 13 | "hits": { 14 | "max_score": 1, 15 | "total": 28, 16 | "hits": [ 17 | { 18 | "_id": "333", 19 | "_type": "car", 20 | "_score": 1 21 | }, 22 | { 23 | "_id": "444", 24 | "_type": "car", 25 | "_score": 1 26 | } 27 | ] 28 | } 29 | } 30 | ) 31 | 32 | p = Pagination(self.index.search_query(doc_cls=self.index['car']), page=2, per_page=2) 33 | 34 | self.assertEqual(p.total, 28) 35 | self.assertEqual(p.pages, 14) 36 | self.assertEqual(len(p.items), 2) 37 | self.assertEqual(p.items[0]._id, '333') 38 | self.assertEqual(p.items[1]._id, '444') 39 | self.assertEqual(p.has_next, True) 40 | self.assertEqual(p.next_num, 3) 41 | self.assertEqual(p.has_prev, True) 42 | self.assertEqual(p.prev_num, 1) 43 | for page, check_page in zip(p.iter_pages(), [1, 2, 3, 4, 5, 6, None, 13, 14]): 44 | self.assertEqual(page, check_page) 45 | 46 | self.assertEqual(self.client.search.call_count, 1) 47 | 48 | def test_wrapper(self): 49 | self.client.search = Mock( 50 | return_value={ 51 | "hits": { 52 | "max_score": 1, 53 | "total": 28, 54 | "hits": [ 55 | { 56 | "_id": "333", 57 | "_type": "car", 58 | "_score": 1 59 | }, 60 | { 61 | "_id": "444", 62 | "_type": "car", 63 | "_score": 1 64 | } 65 | ] 66 | } 67 | } 68 | ) 69 | 70 | sq = self.index.search_query(doc_cls=self.index['car']) 71 | wrapper = SearchQueryWrapper(sq) 72 | self.assertRaises(ValueError, lambda: wrapper[None]) 73 | self.assertRaises(ValueError, lambda: [d for d in wrapper]) 74 | self.assertRaises(ValueError, lambda: len(wrapper)) 75 | self.assertRaises(ValueError, lambda: wrapper.get_result()) 76 | self.assertEqual(len(wrapper[:2]), 2) 77 | self.assertEqual(len([d for d in wrapper]), 2) 78 | self.assertEqual(len(wrapper.get_result().hits), 2) 79 | -------------------------------------------------------------------------------- /tests_integ/asyncio/test_pagination.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from .conftest import Car 4 | 5 | from elasticmagic.ext.asyncio.pagination import AsyncSearchQueryWrapper 6 | from elasticmagic.ext.asyncio.pagination.flask import AsyncPagination 7 | 8 | 9 | @pytest.mark.asyncio 10 | async def test_search_query_wrapper(es_index, all_cars): 11 | sq = ( 12 | es_index.search_query(doc_cls=Car) 13 | ) 14 | 15 | wrapped_sq = AsyncSearchQueryWrapper(sq) 16 | 17 | with pytest.raises(ValueError): 18 | await wrapped_sq[0] 19 | 20 | with pytest.raises(ValueError): 21 | await wrapped_sq 22 | 23 | with pytest.raises(ValueError): 24 | len(wrapped_sq) 25 | 26 | with pytest.raises(ValueError): 27 | await wrapped_sq.get_result() 28 | 29 | hits = await wrapped_sq[:3] 30 | assert len(hits) == 3 31 | 32 | hits = await wrapped_sq[5:] 33 | assert len(hits) == 6 34 | 35 | assert len(wrapped_sq) == 11 36 | 37 | hits = await wrapped_sq 38 | assert len(list(hits)) == 6 39 | 40 | 41 | @pytest.mark.asyncio 42 | async def test_flask_pagination_default(es_index, all_cars): 43 | sq = ( 44 | es_index.search_query(doc_cls=Car) 45 | ) 46 | p = await AsyncPagination.create(sq) 47 | 48 | assert len(p.items) == 10 49 | assert p.page == 1 50 | assert p.pages == 2 51 | assert p.total == 11 52 | assert p.has_prev is False 53 | assert p.prev_num is None 54 | assert p.has_next is True 55 | assert p.next_num == 2 56 | assert list(p.iter_pages()) == [1, 2] 57 | 58 | p2 = await p.next() 59 | assert len(p2.items) == 1 60 | assert p2.page == 2 61 | assert p2.pages == 2 62 | assert p2.total == 11 63 | assert p2.has_prev is True 64 | assert p2.prev_num == 1 65 | assert p2.has_next is False 66 | assert p2.next_num is None 67 | assert list(p2.iter_pages()) == [1, 2] 68 | 69 | p1 = await p2.prev() 70 | assert p1.page == 1 71 | 72 | 73 | @pytest.mark.asyncio 74 | async def test_flask_pagination_many_pages(es_index, all_cars): 75 | sq = ( 76 | es_index.search_query(doc_cls=Car) 77 | ) 78 | p = await AsyncPagination.create(sq, per_page=2) 79 | 80 | assert len(p.items) == 2 81 | assert p.page == 1 82 | assert p.pages == 6 83 | assert p.total == 11 84 | assert p.has_prev is False 85 | assert p.prev_num is None 86 | assert p.has_next is True 87 | assert p.next_num == 2 88 | assert list(p.iter_pages(right_current=2)) == \ 89 | [1, 2, None, 5, 6] 90 | 91 | 92 | @pytest.mark.asyncio 93 | async def test_flask_pagination_max_items(es_index, all_cars): 94 | sq = ( 95 | es_index.search_query(doc_cls=Car) 96 | ) 97 | p = await AsyncPagination.create(sq, page=3, per_page=5, max_items=10) 98 | 99 | assert len(p.items) == 0 100 | assert p.page == 3 101 | assert p.has_next is False 102 | assert p.has_prev is True 103 | assert list(p.iter_pages()) == [1, 2] 104 | -------------------------------------------------------------------------------- /tests_integ/asyncio/test_search.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from .conftest import Car 4 | 5 | 6 | @pytest.mark.asyncio 7 | async def test_to_dict(es_index, cars): 8 | sq = ( 9 | es_index.search_query() 10 | .limit(1) 11 | ) 12 | 13 | assert await sq.to_dict() == { 14 | 'size': 1 15 | } 16 | 17 | 18 | @pytest.mark.asyncio 19 | async def test_get_result(es_index, cars): 20 | sq = ( 21 | es_index.search_query(Car.name.match('Sally')) 22 | .limit(1) 23 | ) 24 | 25 | res = await sq.get_result() 26 | 27 | assert res.total == 1 28 | assert len(res.hits) == 1 29 | assert res.error is None 30 | doc = res.hits[0] 31 | assert isinstance(doc, Car) 32 | assert doc._id == '2' 33 | 34 | cached_res = await sq.get_result() 35 | assert cached_res is res 36 | 37 | 38 | @pytest.mark.asyncio 39 | async def test_count(es_index, cars): 40 | assert await es_index.search_query().count() == 2 41 | assert await es_index.search_query(Car.name.match('Sally')).count() == 1 42 | 43 | 44 | @pytest.mark.asyncio 45 | async def test_exists(es_index, cars): 46 | assert await es_index.search_query().exists() 47 | assert await es_index.search_query(Car.name.match('Sally')).exists() 48 | assert not await es_index.search_query(Car.name.match('Buzz')).exists() 49 | 50 | 51 | @pytest.mark.asyncio 52 | async def test_iter(es_index, cars): 53 | sq = ( 54 | es_index.search_query(Car.name.match('Sally')) 55 | .limit(1) 56 | ) 57 | 58 | for doc in await sq: 59 | assert isinstance(doc, Car) 60 | assert doc._id == '2' 61 | 62 | for doc in await sq: 63 | assert isinstance(doc, Car) 64 | assert doc._id == '2' 65 | 66 | 67 | @pytest.mark.asyncio 68 | async def test_getitem(es_index, cars): 69 | sq = es_index.search_query() 70 | 71 | with pytest.raises(TypeError): 72 | await sq['test'] 73 | 74 | with pytest.raises(ValueError): 75 | await sq[-1] 76 | 77 | with pytest.raises(ValueError): 78 | await sq[:-1] 79 | 80 | with pytest.raises(ValueError): 81 | await sq[::2] 82 | 83 | docs = await sq[1:2] 84 | assert len(docs) == 1 85 | 86 | docs = await sq[:2] 87 | assert len(docs) == 2 88 | 89 | doc = await sq[0] 90 | assert doc is not None 91 | 92 | 93 | @pytest.mark.asyncio 94 | async def test_scroll(es_index, all_cars): 95 | assert await es_index.search_query().count() == 11 96 | 97 | sq = es_index.search_query(scroll='1m', doc_cls=Car).limit(5) 98 | 99 | res = await sq.get_result() 100 | assert len(res.hits) == 5 101 | 102 | res = await es_index.scroll( 103 | scroll_id=res.scroll_id, scroll='1m', doc_cls=Car 104 | ) 105 | assert len(res.hits) == 5 106 | 107 | res = await es_index.scroll( 108 | scroll_id=res.scroll_id, scroll='1m', doc_cls=Car 109 | ) 110 | assert len(res.hits) == 1 111 | 112 | await es_index.clear_scroll(scroll_id=res.scroll_id) 113 | 114 | 115 | @pytest.mark.asyncio 116 | async def test_disabled_stored_fields(es_index, cars): 117 | res = await es_index.search_query().stored_fields("_none_").get_result() 118 | 119 | assert res.error is None 120 | for doc in res: 121 | assert doc._id is None 122 | -------------------------------------------------------------------------------- /elasticmagic/ext/pagination/flask.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import unicode_literals 3 | 4 | from abc import ABCMeta 5 | from math import ceil 6 | 7 | from . import SearchQueryWrapper 8 | from ...cluster import MAX_RESULT_WINDOW 9 | 10 | 11 | class BasePagination(metaclass=ABCMeta): 12 | def _prev_page_params(self): 13 | return { 14 | 'page': self.prev_num, 15 | 'per_page': self.per_page, 16 | 'max_items': self.max_items, 17 | } 18 | 19 | def _next_page_params(self): 20 | return { 21 | 'page': self.next_num, 22 | 'per_page': self.per_page, 23 | 'max_items': self.max_items, 24 | } 25 | 26 | @property 27 | def pages(self): 28 | return int( 29 | ceil(min(self.total, self.max_items) / float(self.per_page)) 30 | ) 31 | 32 | @property 33 | def has_prev(self): 34 | return self.page > 1 35 | 36 | @property 37 | def prev_num(self): 38 | if not self.has_prev: 39 | return None 40 | return self.page - 1 41 | 42 | @property 43 | def has_next(self): 44 | return self.page < self.pages 45 | 46 | @property 47 | def next_num(self): 48 | if not self.has_next: 49 | return None 50 | return self.page + 1 51 | 52 | def iter_pages(self, left_edge=2, left_current=2, 53 | right_current=5, right_edge=2): 54 | """Iterates over the page numbers in the pagination. The four 55 | parameters control the thresholds how many numbers should be produced 56 | from the sides. Skipped page numbers are represented as `None`. 57 | This is how you could render such a pagination in the templates: 58 | 59 | .. sourcecode:: html+jinja 60 | 61 | {% macro render_pagination(pagination, endpoint) %} 62 | 75 | {% endmacro %} 76 | """ 77 | last = 0 78 | for num in range(1, self.pages + 1): 79 | is_left = num <= left_edge 80 | is_right = num > self.pages - right_edge 81 | is_center = ( 82 | self.page - left_current - 1 < num < self.page + right_current 83 | ) 84 | if is_left or is_right or is_center: 85 | if last + 1 != num: 86 | yield None 87 | yield num 88 | last = num 89 | 90 | 91 | class Pagination(BasePagination): 92 | """Helper class to provide compatibility with Flask-SQLAlchemy paginator. 93 | """ 94 | def __init__( 95 | self, query, page=1, per_page=10, max_items=MAX_RESULT_WINDOW 96 | ): 97 | self.original_query = query 98 | self.query = SearchQueryWrapper(query, max_items=max_items) 99 | self.page = page if page > 0 else 1 100 | self.per_page = per_page 101 | self.max_items = max_items 102 | self.offset = (self.page - 1) * self.per_page 103 | 104 | self.items = self.query[self.offset:self.offset + self.per_page] 105 | self.total = len(self.query) 106 | 107 | def prev(self): 108 | return type(self)( 109 | self.original_query, **self._prev_page_params() 110 | ) 111 | 112 | def next(self): 113 | return type(self)( 114 | self.original_query, **self._next_page_params() 115 | ) 116 | -------------------------------------------------------------------------------- /.github/workflows/python.yaml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Test and build 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | tags: [ "v*" ] 10 | pull_request: 11 | branches: [ master ] 12 | 13 | jobs: 14 | unit-tests: 15 | runs-on: ubuntu-latest 16 | if: | 17 | !contains(github.event.head_commit.message, '[skip tests]') 18 | strategy: 19 | matrix: 20 | python-version: ['3.11', '3.12', '3.13', 'pypy3.11'] 21 | steps: 22 | - uses: actions/checkout@v3 23 | - name: Set up Python ${{ matrix.python-version }} 24 | uses: actions/setup-python@v5 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | - name: Install the latest version of uv 28 | uses: astral-sh/setup-uv@v6 29 | # tox-gh fails with uv-venv-lock-runner: https://github.com/tox-dev/tox-gh/issues/175 30 | - name: Set tox environments 31 | run: | 32 | echo "TOX_ENV_OPTS=$(.github/gh-tox-envs.py ${{ matrix.python-version }})" >> $GITHUB_ENV 33 | - name: Install dependencies 34 | run: | 35 | uv tool install tox --with tox-uv 36 | - name: Lint with flake8 37 | run: | 38 | uv tool run -v tox -e flake8 39 | - name: Run unit tests 40 | run: | 41 | uv tool run -v tox ${TOX_ENV_OPTS} -- -v --cov --cov-append 42 | - name: Run doc tests 43 | run: | 44 | uv tool run -v tox -e doctest 45 | 46 | integ-tests: 47 | runs-on: ubuntu-latest 48 | if: | 49 | !contains(github.event.head_commit.message, '[skip tests]') 50 | strategy: 51 | matrix: 52 | es-image: 53 | - 'elasticsearch:6.8.23' 54 | - 'elasticsearch:7.17.28' 55 | - 'elasticsearch:8.19.5' 56 | - 'opensearchproject/opensearch:2.19.2' 57 | python-version: ['3.11', '3.12', '3.13', 'pypy3.11'] 58 | services: 59 | elasticsearch: 60 | image: ${{ matrix.es-image }} 61 | ports: 62 | - 9200:9200 63 | options: >- 64 | -e="discovery.type=single-node" 65 | ${{ startsWith(matrix.es-image, 'elasticsearch:8.') && '-e="xpack.security.enabled=false" -e="xpack.security.enrollment.enabled=false"' || '' }} 66 | ${{ startsWith(matrix.es-image, 'opensearch') && '-e="DISABLE_SECURITY_PLUGIN=true"' || '' }} 67 | --health-cmd="curl http://localhost:9200/_cluster/health" 68 | --health-interval=5s 69 | --health-timeout=2s 70 | --health-retries=10 71 | steps: 72 | - uses: actions/checkout@v3 73 | - name: Set up Python ${{ matrix.python-version }} 74 | uses: actions/setup-python@v5 75 | with: 76 | python-version: ${{ matrix.python-version }} 77 | - name: Install the latest version of uv 78 | uses: astral-sh/setup-uv@v6 79 | - name: Install dependencies 80 | run: | 81 | uv tool install tox --with tox-uv 82 | - name: Run integration tests 83 | run: | 84 | uv tool run tox -e integ -- -v --cov --cov-append 85 | - name: Run async integration tests 86 | if: startsWith(matrix.python-version, '3.') 87 | run: | 88 | uv tool run tox -e integ-async -- -v --cov --cov-append 89 | 90 | deploy: 91 | runs-on: ubuntu-latest 92 | environment: 93 | name: pypi 94 | permissions: 95 | id-token: write 96 | contents: read 97 | if: | 98 | always() && 99 | github.event_name == 'push' && 100 | startsWith(github.ref, 'refs/tags/v') 101 | needs: 102 | - unit-tests 103 | - integ-tests 104 | strategy: 105 | matrix: 106 | python-version: ['3.11'] 107 | steps: 108 | - uses: actions/checkout@v3 109 | - name: Set up Python ${{ matrix.python-version }} 110 | uses: actions/setup-python@v4 111 | with: 112 | python-version: ${{ matrix.python-version }} 113 | - name: Install the latest version of uv 114 | uses: astral-sh/setup-uv@v6 115 | - name: Build package 116 | run: | 117 | uv build 118 | - name: Upload package to pypi.org 119 | run: | 120 | uv publish 121 | -------------------------------------------------------------------------------- /elasticmagic/attribute.py: -------------------------------------------------------------------------------- 1 | from .expression import Expression, Field, FieldOperators 2 | from .datastructures import OrderedAttributes 3 | 4 | 5 | # really its factory factory 6 | def _attributed_field_factory( 7 | attr_cls, doc_cls, dynamic_field, make_field_name=None 8 | ): 9 | def _attr_field(name): 10 | field = dynamic_field.clone() 11 | field._name = make_field_name(name) if make_field_name else name 12 | return attr_cls(doc_cls, name, field) 13 | return _attr_field 14 | 15 | 16 | class AttributedField(Expression, FieldOperators): 17 | __visit_name__ = 'attributed_field' 18 | 19 | def __init__(self, parent, attr_name, field): 20 | self._parent = parent 21 | self._attr_name = attr_name 22 | self._field = field 23 | 24 | self._dynamic_fields = OrderedAttributes() 25 | 26 | if self._field._type.doc_cls: 27 | doc_cls = self._field._type.doc_cls 28 | dynamic_defaults = {} 29 | for dyn_field_name, dyn_attr_field in \ 30 | doc_cls.dynamic_fields.items(): 31 | dyn_field = dyn_attr_field._field.clone() 32 | dyn_field._name = self._make_field_name(dyn_field._name) 33 | self._dynamic_fields[dyn_field_name] = AttributedField( 34 | self._parent, dyn_field_name, dyn_field 35 | ) 36 | default = _attributed_field_factory( 37 | AttributedField, self._parent, dyn_field, 38 | self._make_field_name 39 | ) 40 | dynamic_defaults[dyn_field_name] = default 41 | 42 | self._sub_fields = OrderedAttributes(defaults=dynamic_defaults) 43 | for attr_name, attr_field in doc_cls.user_fields.items(): 44 | field = attr_field._field.clone() 45 | field._name = self._make_field_name(field._name) 46 | self._sub_fields[attr_name] = AttributedField( 47 | self._parent, attr_name, field 48 | ) 49 | elif self._field._fields: 50 | self._sub_fields = OrderedAttributes() 51 | for field_attr, field in self._field._fields.items(): 52 | field = field.clone() 53 | field._name = self._make_field_name(field_attr) 54 | self._sub_fields[field_attr] = AttributedField( 55 | self, field_attr, field 56 | ) 57 | else: 58 | self._sub_fields = OrderedAttributes() 59 | 60 | def _make_field_name(self, name): 61 | return '{}.{}'.format(self._field._name, name) 62 | 63 | @property 64 | def fields(self): 65 | return self._sub_fields 66 | 67 | @property 68 | def dynamic_fields(self): 69 | return self._dynamic_fields 70 | 71 | def __getattr__(self, name): 72 | return getattr(self.fields, name) 73 | 74 | def wildcard(self, name): 75 | return DynamicAttributedField( 76 | self._parent, name, Field(self._make_field_name(name)) 77 | ) 78 | 79 | def get_parent(self): 80 | return self._parent 81 | 82 | def get_attr_name(self): 83 | return self._attr_name 84 | 85 | def get_field(self): 86 | return self._field 87 | 88 | def get_field_name(self): 89 | return self._field._name 90 | 91 | def get_type(self): 92 | return self._field.get_type() 93 | 94 | def __get__(self, obj, type=None): 95 | if obj is None: 96 | return self 97 | 98 | obj.__dict__[self._attr_name] = None 99 | return None 100 | 101 | def _collect_doc_classes(self): 102 | if isinstance(self._parent, AttributedField): 103 | return self._parent._collect_doc_classes() 104 | return {self._parent} 105 | 106 | def _to_python(self, value): 107 | return self._field._to_python(value) 108 | 109 | def _from_python(self, value): 110 | return self._field._from_python(value) 111 | 112 | 113 | class DynamicAttributedField(AttributedField): 114 | def __getattr__(self, name): 115 | return getattr( 116 | self.fields, 117 | name, 118 | DynamicAttributedField( 119 | self._parent, name, Field(self._make_field_name(name)) 120 | ) 121 | ) 122 | -------------------------------------------------------------------------------- /vagga.yaml: -------------------------------------------------------------------------------- 1 | _defaults: 2 | es-uid: &es-uid 999 3 | 4 | containers: 5 | py2-test: 6 | setup: 7 | - !Ubuntu bionic 8 | - !Install [curl] 9 | - !PipConfig 10 | dependencies: true 11 | - !Py2Requirements requirements.txt 12 | - !Py2Requirements requirements_test.txt 13 | 14 | py3-test: 15 | setup: 16 | - !Ubuntu bionic 17 | - !Install [curl] 18 | - !PipConfig 19 | dependencies: true 20 | - !Py3Requirements requirements.txt 21 | - !Py3Requirements requirements_async.txt 22 | - !Py3Requirements requirements_async_test.txt 23 | - !Py3Requirements requirements_lint.txt 24 | - !Py3Requirements requirements_test.txt 25 | 26 | py2-sphinx: 27 | setup: 28 | - !Alpine v3.5 29 | - !Install 30 | - make 31 | - !PipConfig 32 | dependencies: true 33 | - !Py2Requirements requirements.txt 34 | - !Py2Requirements requirements_test.txt 35 | - !Py2Requirements requirements_doc.txt 36 | 37 | py3-sphinx: 38 | setup: 39 | - !Alpine v3.5 40 | - !Install 41 | - make 42 | - !PipConfig 43 | dependencies: true 44 | - !Py3Requirements requirements.txt 45 | - !Py3Requirements requirements_test.txt 46 | - !Py3Requirements requirements_doc.txt 47 | 48 | bump: 49 | setup: 50 | - !Alpine v3.8 51 | - !Install 52 | - git 53 | - !PipConfig 54 | dependencies: true 55 | - !Py3Install 56 | - bumpversion 57 | 58 | bench: 59 | environ: 60 | HOME: /work 61 | PYTHONPATH: /work 62 | setup: 63 | - !Alpine v3.4 64 | - !PipConfig 65 | dependencies: true 66 | - !Py3Requirements requirements.txt 67 | - !Py3Install [coverage] 68 | 69 | elastic-5x: 70 | setup: 71 | - !Ubuntu bionic 72 | - !UbuntuUniverse 73 | # elastic PGP & Repo 74 | - !Install [gnupg] 75 | - !AptTrust 76 | server: pgp.mit.edu 77 | keys: [D88E42B4] 78 | - !UbuntuRepo 79 | url: https://artifacts.elastic.co/packages/5.x/apt 80 | suite: stable 81 | components: [main] 82 | - !Env 83 | ES_UID: *es-uid 84 | - !Sh | 85 | groupadd -g $ES_UID elasticsearch 86 | useradd -u $ES_UID -g elasticsearch -d /usr/share/elasticsearch elasticsearch 87 | - !Install 88 | - ca-certificates 89 | - ca-certificates-java 90 | - openjdk-8-jre-headless 91 | - elasticsearch=5.6.14 92 | volumes: 93 | /var/lib/elasticsearch: !Tmpfs 94 | 95 | commands: 96 | test-py2: !Command 97 | description: Run tests with Python2 98 | container: py2-test 99 | run: [py.test] 100 | 101 | test-py3: !Command 102 | description: Run tests with Python3 103 | container: py3-test 104 | run: [py.test] 105 | 106 | flake8: !Command 107 | description: Run flake8 linter 108 | container: py3-test 109 | run: [flake8, elasticmagic, tests_integ] 110 | 111 | bump: !Command 112 | description: Bump version 113 | container: bump 114 | run: [bumpversion] 115 | 116 | test-integ-5x: !Supervise 117 | options: | 118 | Usage: 119 | vagga test-integ-5x [options] [--tests=] [--] [...] 120 | 121 | Options: 122 | --tests= Tests to run 123 | Pytest options 124 | children: 125 | elastic-5x: !Command 126 | container: elastic-5x 127 | user-id: *es-uid 128 | group-id: *es-uid 129 | work-dir: /usr/share/elasticsearch 130 | run: | 131 | ./bin/elasticsearch \ 132 | -Enetwork.host=_local_ \ 133 | -Epath.conf=/etc/elasticsearch \ 134 | -Epath.scripts=/etc/elasticsearch/scripts \ 135 | -Epath.data=/var/lib/elasticsearch/data \ 136 | -Epath.logs=/var/lib/elasticsearch/logs 137 | test: !Command 138 | container: py3-test 139 | run: | 140 | ./scripts/wait_es.sh ${ES_URL:-localhost:9200} 141 | TESTS=${VAGGAOPT_TESTS:-tests_integ} 142 | eval py.test $TESTS $VAGGACLI_PYTEST_OPTIONS 143 | 144 | _py2-cov: !Command 145 | container: py2-test 146 | run: [py.test, --cov] 147 | 148 | cover: !Command 149 | prerequisites: [_py2-cov] 150 | container: py3-test 151 | run: [py.test, --cov, --cov-append] 152 | 153 | doctest: !Command 154 | description: Run doctests 155 | container: py3-sphinx 156 | work-dir: docs 157 | run: [make, doctest] 158 | 159 | doc: !Command 160 | description: Build documentation 161 | container: py3-sphinx 162 | work-dir: docs 163 | prerequisites: [doctest] 164 | run: [make, html] 165 | 166 | doctest-py2: !Command 167 | description: Run doctests 168 | container: py2-sphinx 169 | work-dir: docs 170 | run: [make, doctest] 171 | 172 | doc-py2: !Command 173 | description: Build documentation 174 | container: py2-sphinx 175 | work-dir: docs 176 | prerequisites: [doctest-py2] 177 | run: [make, html] 178 | 179 | bench: !Command 180 | description: Run benchmarks 181 | container: bench 182 | run: [python3, benchmark/run.py] 183 | -------------------------------------------------------------------------------- /docs/quick_start.rst: -------------------------------------------------------------------------------- 1 | .. _quick_start: 2 | 3 | =========== 4 | Quick Start 5 | =========== 6 | 7 | .. testsetup:: python 8 | 9 | from unittest.mock import patch 10 | 11 | from elasticsearch import Elasticsearch 12 | from elasticsearch.client import IndicesClient 13 | 14 | info_patch = patch.object( 15 | Elasticsearch, 'info', 16 | return_value={ 17 | 'version': { 18 | 'number': '6.7.2', 19 | 'distribution': 'elasticsearch', 20 | 21 | } 22 | } 23 | ) 24 | info_patch.__enter__() 25 | 26 | put_mapping_patch = patch.object(IndicesClient, 'put_mapping') 27 | 28 | index_patch = patch.object(Elasticsearch, 'bulk', 29 | return_value={ 30 | 'took': 5, 31 | 'errors': False, 32 | 'items': [], 33 | } 34 | ) 35 | 36 | search_hits_patch = patch.object(Elasticsearch, 'search', 37 | return_value={ 38 | 'hits': { 39 | 'total': 1, 40 | 'max_score': 1, 41 | 'hits': [ 42 | { 43 | '_id': '1234', 44 | '_type': 'product', 45 | '_index': 'test', 46 | '_score': 1.0, 47 | '_source': { 48 | 'name': "Lego Ninjago Cole's dragon", 49 | 'status': 0, 50 | } 51 | } 52 | ], 53 | } 54 | } 55 | ) 56 | 57 | search_aggs_patch = patch.object(Elasticsearch, 'search', 58 | return_value={ 59 | 'hits': { 60 | 'total': 1, 61 | 'max_score': 1, 62 | 'hits': [], 63 | }, 64 | 'aggregations': { 65 | 'prices': { 66 | 'buckets': [ 67 | {'key': 0, 'doc_count': 4}, 68 | {'key': 20, 'doc_count': 35}, 69 | {'key': 40, 'doc_count': 7}, 70 | ] 71 | } 72 | } 73 | } 74 | ) 75 | 76 | First of all create Elasticsearch cluster and index objects: 77 | 78 | .. testcode:: python 79 | 80 | from elasticsearch import Elasticsearch 81 | from elasticmagic import Cluster, Index 82 | 83 | es_cluster = Cluster(Elasticsearch()) 84 | es_index = Index(es_cluster, 'test') 85 | 86 | Let's describe elasticsearch document: 87 | 88 | .. testcode:: python 89 | 90 | from elasticmagic import Document, Field 91 | from elasticmagic.types import String, Integer, Float 92 | 93 | class ProductDocument(Document): 94 | __doc_type__ = 'product' 95 | 96 | name = Field(String, fields={ 97 | 'sort': Field( 98 | String, index='no', doc_values=True, analyzer='keyword' 99 | ), 100 | }) 101 | status = Field(Integer) 102 | price = Field(Float) 103 | 104 | To create or update document mapping just run: 105 | 106 | .. testcode:: python 107 | :hide: 108 | 109 | put_mapping_patch.__enter__() 110 | 111 | .. testcode:: python 112 | 113 | es_index.put_mapping(ProductDocument) 114 | 115 | .. testcode:: python 116 | :hide: 117 | 118 | put_mapping_patch.__exit__(None, None, None) 119 | 120 | Try to reindex some documents: 121 | 122 | .. testcode:: python 123 | :hide: 124 | 125 | index_patch.__enter__() 126 | 127 | .. testcode:: python 128 | 129 | from decimal import Decimal 130 | 131 | doc1 = ProductDocument( 132 | name="Lego Ninjago Cole's dragon", 133 | status=0, 134 | price=Decimal('10.99'), 135 | ) 136 | doc2 = ProductDocument() 137 | doc2.name = 'Lego minifigure' 138 | doc2.status = 1 139 | doc2.price = Decimal('2.50') 140 | result = es_index.add([doc1, doc2]) 141 | assert result.errors == False 142 | 143 | .. testcode:: python 144 | :hide: 145 | 146 | index_patch.__exit__(None, None, None) 147 | 148 | Now we can build query: 149 | 150 | .. testcode:: python 151 | 152 | search_query = ( 153 | es_index.search_query(ProductDocument.name.match('lego')) 154 | .filter(ProductDocument.status == 0) 155 | .order_by(ProductDocument.name.sort) 156 | .limit(20) 157 | ) 158 | 159 | And finally make request and process result: 160 | 161 | .. testcode:: python 162 | :hide: 163 | 164 | search_hits_patch.__enter__() 165 | 166 | .. testcode:: python 167 | 168 | for doc in search_query: 169 | print('{}: {}'.format(doc._id, doc.name)) 170 | 171 | .. testoutput:: python 172 | :hide: 173 | 174 | 1234: Lego Ninjago Cole's dragon 175 | 176 | .. testcode:: python 177 | :hide: 178 | 179 | search_hits_patch.__exit__(None, None, None) 180 | 181 | Let's build a histogram by price: 182 | 183 | .. testcode:: python 184 | :hide: 185 | 186 | search_aggs_patch.__enter__() 187 | 188 | .. testcode:: python 189 | 190 | from elasticmagic import agg 191 | 192 | search_query = ( 193 | es_index.search_query() 194 | .filter(ProductDocument.status == 0) 195 | .aggs({ 196 | 'prices': agg.Histogram(ProductDocument.price, interval=20) 197 | }) 198 | .limit(0) 199 | ) 200 | 201 | for bucket in search_query.result.get_aggregation('prices').buckets: 202 | print('{} ({})'.format(bucket.key, bucket.doc_count)) 203 | 204 | .. testoutput:: python 205 | :hide: 206 | 207 | 0 (4) 208 | 20 (35) 209 | 40 (7) 210 | 211 | .. testcode:: python 212 | :hide: 213 | 214 | search_aggs_patch.__exit__(None, None, None) 215 | -------------------------------------------------------------------------------- /tests_integ/asyncio/test_index.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from elasticmagic.search import SearchQuery 4 | 5 | from .conftest import Car 6 | 7 | 8 | @pytest.mark.asyncio 9 | async def test_get(es_index, cars): 10 | doc = await es_index.get(1, doc_cls=Car) 11 | assert doc.name == 'Lightning McQueen' 12 | assert doc._id == '1' 13 | assert doc._index == es_index.get_name() 14 | assert doc._score is None 15 | 16 | doc = await es_index.get(2, doc_cls=Car) 17 | assert doc.name == 'Sally Carerra' 18 | assert doc._id == '2' 19 | assert doc._index == es_index.get_name() 20 | assert doc._score is None 21 | 22 | 23 | @pytest.mark.asyncio 24 | async def test_multi_get_by_ids(es_index, cars): 25 | docs = await es_index.multi_get([1, 2, 3], doc_cls=Car) 26 | 27 | assert len(docs) == 3 28 | 29 | doc = docs[0] 30 | assert doc.name == 'Lightning McQueen' 31 | assert doc._id == '1' 32 | assert doc._index == es_index.get_name() 33 | assert doc._score is None 34 | 35 | doc = docs[1] 36 | assert doc.name == 'Sally Carerra' 37 | assert doc._id == '2' 38 | assert doc._index == es_index.get_name() 39 | assert doc._score is None 40 | 41 | doc = docs[2] 42 | assert doc is None 43 | 44 | 45 | @pytest.mark.asyncio 46 | async def test_multi_get_by_ids_with_doc_cls_as_list(es_index, cars): 47 | docs = await es_index.multi_get([1, 2], doc_cls=[Car]) 48 | 49 | doc = docs[0] 50 | assert doc.name == 'Lightning McQueen' 51 | assert doc._id == '1' 52 | assert doc._index == es_index.get_name() 53 | assert doc._score is None 54 | 55 | doc = docs[1] 56 | assert doc.name == 'Sally Carerra' 57 | assert doc._id == '2' 58 | assert doc._index == es_index.get_name() 59 | assert doc._score is None 60 | 61 | 62 | @pytest.mark.asyncio 63 | async def test_multi_get_by_docs(es_index, cars): 64 | docs = await es_index.multi_get([Car(_id=1), Car(_id=2)]) 65 | 66 | doc = docs[0] 67 | assert doc.name == 'Lightning McQueen' 68 | assert doc._id == '1' 69 | assert doc._index == es_index.get_name() 70 | assert doc._score is None 71 | 72 | doc = docs[1] 73 | assert doc.name == 'Sally Carerra' 74 | assert doc._id == '2' 75 | assert doc._index == es_index.get_name() 76 | assert doc._score is None 77 | 78 | 79 | @pytest.mark.asyncio 80 | async def test_multi_get_by_dicts(es_index, cars): 81 | docs = await es_index.multi_get([ 82 | {'_id': 1, '_type': 'car'}, 83 | {'_id': 2, 'doc_cls': Car}, 84 | ]) 85 | 86 | doc = docs[0] 87 | assert doc.name == 'Lightning McQueen' 88 | assert doc._id == '1' 89 | assert doc._index == es_index.get_name() 90 | assert doc._score is None 91 | 92 | doc = docs[1] 93 | assert doc.name == 'Sally Carerra' 94 | assert doc._id == '2' 95 | assert doc._index == es_index.get_name() 96 | assert doc._score is None 97 | 98 | 99 | @pytest.mark.asyncio 100 | async def test_search(es_index, cars): 101 | res = await es_index.search( 102 | SearchQuery(Car.name.match("Lightning")) 103 | ) 104 | 105 | assert res.total == 1 106 | assert len(res.hits) == 1 107 | doc = res.hits[0] 108 | assert doc.name == 'Lightning McQueen' 109 | assert doc._id == '1' 110 | assert doc._index == es_index.get_name() 111 | assert doc._score > 0 112 | assert doc._score == res.max_score 113 | 114 | 115 | @pytest.mark.asyncio 116 | async def test_count(es_index, cars): 117 | assert (await es_index.count()).count == 2 118 | assert (await es_index.count({ 119 | "match": { 120 | "name": "Lightning" 121 | } 122 | })).count == 1 123 | assert (await es_index.count( 124 | SearchQuery(Car.name.match("Lightning")) 125 | )).count == 1 126 | 127 | 128 | @pytest.mark.asyncio 129 | async def test_exists(es_index, cars): 130 | assert (await es_index.exists()).exists is True 131 | assert (await es_index.exists({ 132 | "match": { 133 | "name": "Lightning" 134 | } 135 | })).exists is True 136 | assert (await es_index.exists( 137 | SearchQuery(Car.name.match("Mack")) 138 | )).exists is False 139 | 140 | 141 | @pytest.mark.asyncio 142 | async def test_scroll(es_index, cars): 143 | search_res = await es_index.search(SearchQuery(), scroll='1m') 144 | 145 | assert search_res.total == 2 146 | assert len(search_res.hits) == 2 147 | assert search_res.scroll_id is not None 148 | 149 | scroll_res = await es_index.scroll(search_res.scroll_id, scroll='1m') 150 | 151 | assert scroll_res.total == 2 152 | assert len(scroll_res.hits) == 0 153 | 154 | clear_scroll_res = await es_index.clear_scroll(scroll_res.scroll_id) 155 | 156 | assert clear_scroll_res.succeeded is True 157 | 158 | 159 | @pytest.mark.asyncio 160 | async def test_multi_search(es_index, cars): 161 | results = await es_index.multi_search([ 162 | SearchQuery(Car.name.match("Lightning")), 163 | SearchQuery(Car.name.match("Sally")), 164 | ]) 165 | 166 | assert len(results) == 2 167 | 168 | res = results[0] 169 | assert res.total == 1 170 | assert len(res.hits) == 1 171 | doc = res.hits[0] 172 | assert doc.name == 'Lightning McQueen' 173 | assert doc._id == '1' 174 | assert doc._index == es_index.get_name() 175 | assert doc._score > 0 176 | assert doc._score == res.max_score 177 | 178 | res = results[1] 179 | assert res.total == 1 180 | assert len(res.hits) == 1 181 | doc = res.hits[0] 182 | assert doc.name == 'Sally Carerra' 183 | assert doc._id == '2' 184 | assert doc._index == es_index.get_name() 185 | assert doc._score > 0 186 | assert doc._score == res.max_score 187 | 188 | 189 | @pytest.mark.asyncio 190 | async def test_delete(es_index, cars): 191 | res = await es_index.delete(1, doc_type='car') 192 | 193 | assert res.result == 'deleted' 194 | 195 | 196 | @pytest.mark.asyncio 197 | async def test_delete_by_query(es_index, cars): 198 | res = await es_index.delete_by_query( 199 | SearchQuery(Car.name.match("Lightning")), 200 | refresh=True, 201 | ) 202 | 203 | assert res.deleted == 1 204 | assert (await es_index.count()).count == 1 205 | 206 | 207 | @pytest.mark.asyncio 208 | async def test_flush(es_index, cars): 209 | await es_index.add([Car(name='Mater')]) 210 | res = await es_index.flush() 211 | 212 | assert res 213 | -------------------------------------------------------------------------------- /elasticmagic/ext/asyncio/index.py: -------------------------------------------------------------------------------- 1 | from ...index import BaseIndex 2 | 3 | 4 | class AsyncIndex(BaseIndex): 5 | async def get_es_version(self): 6 | return await self._cluster.get_es_version() 7 | 8 | async def get_compiler(self): 9 | return await self._cluster.get_compiler() 10 | 11 | async def get( 12 | self, doc_or_id, doc_cls=None, doc_type=None, source=None, 13 | realtime=None, routing=None, preference=None, refresh=None, 14 | version=None, version_type=None, **kwargs 15 | ): 16 | return await self._cluster.get( 17 | doc_or_id, index=self._name, doc_cls=doc_cls, doc_type=doc_type, 18 | source=source, realtime=realtime, routing=routing, 19 | preference=preference, refresh=refresh, version=version, 20 | version_type=version_type, **kwargs 21 | ) 22 | 23 | async def multi_get( 24 | self, docs, doc_type=None, source=None, realtime=None, 25 | routing=None, preference=None, refresh=None, **kwargs 26 | ): 27 | return await self._cluster.multi_get( 28 | docs, index=self._name, doc_type=doc_type, source=source, 29 | realtime=realtime, routing=routing, preference=preference, 30 | refresh=refresh, **kwargs 31 | ) 32 | 33 | mget = multi_get 34 | 35 | async def search( 36 | self, q, doc_type=None, routing=None, preference=None, 37 | timeout=None, search_type=None, query_cache=None, 38 | terminate_after=None, scroll=None, stats=None, **kwargs 39 | ): 40 | return await self._cluster.search( 41 | q, index=self._name, doc_type=doc_type, 42 | routing=routing, preference=preference, timeout=timeout, 43 | search_type=search_type, query_cache=query_cache, 44 | terminate_after=terminate_after, scroll=scroll, stats=stats, 45 | **kwargs 46 | ) 47 | 48 | async def explain( 49 | self, q, doc_or_id, doc_cls=None, routing=None, **kwargs 50 | ): 51 | return await self._cluster.explain( 52 | q, doc_or_id, index=self._name, doc_cls=doc_cls, routing=routing, 53 | **kwargs 54 | ) 55 | 56 | async def multi_search( 57 | self, queries, doc_type=None, routing=None, preference=None, 58 | search_type=None, **kwargs 59 | ): 60 | return await self._cluster.multi_search( 61 | queries, index=self._name, doc_type=doc_type, 62 | routing=routing, preference=preference, search_type=search_type, 63 | **kwargs 64 | ) 65 | 66 | msearch = multi_search 67 | 68 | async def count( 69 | self, q=None, doc_type=None, routing=None, preference=None, 70 | **kwargs 71 | ): 72 | return await self._cluster.count( 73 | q, index=self._name, doc_type=doc_type, routing=routing, 74 | preference=preference, **kwargs 75 | ) 76 | 77 | async def exists( 78 | self, q=None, doc_type=None, refresh=None, routing=None, **kwargs 79 | ): 80 | return await self._cluster.exists( 81 | q, index=self._name, doc_type=doc_type, refresh=refresh, 82 | routing=routing, **kwargs 83 | ) 84 | 85 | async def scroll( 86 | self, scroll_id, scroll, doc_cls=None, instance_mapper=None, 87 | **kwargs 88 | ): 89 | return await self._cluster.scroll( 90 | scroll_id, scroll, 91 | doc_cls=doc_cls, instance_mapper=instance_mapper, 92 | **kwargs 93 | ) 94 | 95 | async def clear_scroll(self, scroll_id, **kwargs): 96 | return await self._cluster.clear_scroll(scroll_id, **kwargs) 97 | 98 | async def put_mapping( 99 | self, doc_cls_or_mapping, doc_type=None, allow_no_indices=None, 100 | expand_wildcards=None, ignore_conflicts=None, 101 | ignore_unavailable=None, master_timeout=None, timeout=None, 102 | **kwargs 103 | ): 104 | return await self._cluster.put_mapping( 105 | doc_cls_or_mapping, index=self._name, doc_type=doc_type, 106 | allow_no_indices=allow_no_indices, 107 | expand_wildcards=expand_wildcards, 108 | ignore_conflicts=ignore_conflicts, 109 | ignore_unavailable=ignore_unavailable, 110 | master_timeout=master_timeout, timeout=timeout, 111 | **kwargs 112 | ) 113 | 114 | async def add( 115 | self, docs, doc_type=None, refresh=None, timeout=None, 116 | consistency=None, replication=None, **kwargs 117 | ): 118 | return await self._cluster.add( 119 | docs, index=self._name, doc_type=doc_type, refresh=refresh, 120 | timeout=timeout, consistency=consistency, replication=replication, 121 | **kwargs 122 | ) 123 | 124 | async def delete( 125 | self, doc_or_id, doc_cls=None, doc_type=None, 126 | timeout=None, consistency=None, replication=None, 127 | parent=None, routing=None, refresh=None, version=None, 128 | version_type=None, 129 | **kwargs 130 | ): 131 | return await self._cluster.delete( 132 | doc_or_id, index=self._name, doc_cls=doc_cls, doc_type=doc_type, 133 | timeout=timeout, consistency=consistency, replication=replication, 134 | parent=parent, routing=routing, refresh=refresh, 135 | version=version, version_type=version_type, 136 | **kwargs 137 | ) 138 | 139 | async def delete_by_query( 140 | self, q, doc_type=None, timeout=None, consistency=None, 141 | replication=None, routing=None, **kwargs 142 | ): 143 | return await self._cluster.delete_by_query( 144 | q, index=self._name, doc_type=doc_type, 145 | timeout=timeout, consistency=consistency, 146 | replication=replication, routing=routing, 147 | **kwargs 148 | ) 149 | 150 | async def bulk(self, actions, doc_type=None, refresh=None, **kwargs): 151 | return await self._cluster.bulk( 152 | actions, index=self._name, doc_type=doc_type, refresh=refresh, 153 | **kwargs 154 | ) 155 | 156 | async def refresh(self, **kwargs): 157 | return await self._cluster.refresh(index=self._name, **kwargs) 158 | 159 | async def flush(self, **kwargs): 160 | return await self._cluster.flush(index=self._name, **kwargs) 161 | -------------------------------------------------------------------------------- /elasticmagic/ext/queryfilter/codec.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import math 3 | from collections import defaultdict 4 | 5 | import dateutil.parser 6 | 7 | from elasticmagic.types import instantiate 8 | from elasticmagic.types import Type 9 | 10 | 11 | TIME_ATTRS = {'hour', 'minute', 'second', 'microsecond', 'tzinfo'} 12 | 13 | 14 | class TypeCodec(object): 15 | def decode(self, value, es_type=None): 16 | raise NotImplementedError 17 | 18 | def encode(self, value, es_type=None): 19 | raise NotImplementedError 20 | 21 | 22 | class StringCodec(TypeCodec): 23 | def decode(self, value, es_type=None): 24 | return str(value) 25 | 26 | def encode(self, value, es_type=None): 27 | return str(value) 28 | 29 | 30 | class FloatCodec(TypeCodec): 31 | def decode(self, value, es_type=None): 32 | v = float(value) 33 | if math.isnan(v) or math.isinf(v): 34 | raise ValueError('NaN or Inf is not supported') 35 | return v 36 | 37 | def encode(self, value, es_type=None): 38 | return value 39 | 40 | 41 | class IntCodec(TypeCodec): 42 | def encode(self, value, es_type=None): 43 | if isinstance(value, int): 44 | return str(value) 45 | return str(int(value)) 46 | 47 | def decode(self, value, es_type=None): 48 | v = int(value) 49 | if ( 50 | es_type is not None and 51 | (v < es_type.MIN_VALUE or v > es_type.MAX_VALUE) 52 | ): 53 | raise ValueError( 54 | 'Value must be between {} and {}'.format( 55 | es_type.MIN_VALUE, es_type.MAX_VALUE 56 | ) 57 | ) 58 | return v 59 | 60 | 61 | class BoolCodec(TypeCodec): 62 | def encode(self, value, es_type=None): 63 | if value is True: 64 | return 'true' 65 | if value is False: 66 | return 'false' 67 | return bool(value) 68 | 69 | def decode(self, value, es_type=None): 70 | if isinstance(value, bool): 71 | return value 72 | if value == 'true': 73 | return True 74 | if value == 'false': 75 | return False 76 | raise ValueError('Cannot decode boolean value: {}'.format(value)) 77 | 78 | 79 | class DateCodec(TypeCodec): 80 | def encode(self, value, es_type=None): 81 | if isinstance(value, datetime.datetime): 82 | return value.strftime('%Y-%m-%dT%H:%M:%S.%f') 83 | if isinstance(value, datetime.date): 84 | return value.strftime('%Y-%m-%d') 85 | raise ValueError('Value must be date or datetime: {}'.format(value)) 86 | 87 | def decode(self, value, es_type=None): 88 | if isinstance(value, (datetime.datetime, datetime.date)): 89 | return value 90 | return dateutil.parser.parse(value) 91 | 92 | 93 | def wrap_list(v): 94 | if not isinstance(v, (list, tuple)): 95 | return [v] 96 | return v 97 | 98 | 99 | class BaseCodec(object): 100 | def decode_value(self, value, es_type=None): 101 | raise NotImplementedError() 102 | 103 | def decode(self, params, types=None): 104 | raise NotImplementedError() 105 | 106 | def encode_value(self, value, es_type=None): 107 | raise NotImplementedError() 108 | 109 | def encode(self, values, types=None): 110 | raise NotImplementedError() 111 | 112 | 113 | class SimpleCodec(BaseCodec): 114 | OP_SEP = '__' 115 | 116 | NULL_VAL = 'null' 117 | 118 | DEFAULT_OP = 'exact' 119 | 120 | CODECS = { 121 | None: StringCodec, 122 | float: FloatCodec, 123 | int: IntCodec, 124 | bool: BoolCodec, 125 | datetime.datetime: DateCodec, 126 | } 127 | 128 | @staticmethod 129 | def _normalize_params(params): 130 | if hasattr(params, 'getall'): 131 | # Webob 132 | return params.dict_of_lists() 133 | if hasattr(params, 'getlist'): 134 | # Django 135 | return dict(params.lists()) 136 | if isinstance(params, (list, tuple)): 137 | # list, tuple 138 | new_params = defaultdict(list) 139 | for p, v in params: 140 | new_params[p].extend(v) 141 | return new_params 142 | if isinstance(params, dict): 143 | # dict 144 | return params 145 | 146 | raise TypeError("'params' must be Webob MultiDict, " 147 | "Django QueryDict, list, tuple or dict") 148 | 149 | @staticmethod 150 | def _get_es_type_class(es_type): 151 | if es_type is not None and isinstance(es_type, Type): 152 | if es_type.sub_type: 153 | return SimpleCodec._get_es_type_class(es_type.sub_type) 154 | return es_type.__class__ 155 | return es_type 156 | 157 | @staticmethod 158 | def _get_es_and_python_types(es_type): 159 | if es_type is None: 160 | return None, None 161 | es_type = instantiate(es_type) 162 | if es_type.sub_type: 163 | es_type = es_type.sub_type 164 | return es_type, es_type.python_type 165 | 166 | def decode_value(self, value, es_type=None): 167 | if value is None or value == self.NULL_VAL: 168 | return None 169 | 170 | es_type, python_type = self._get_es_and_python_types(es_type) 171 | value_codec = self.CODECS.get(python_type, StringCodec)() 172 | return value_codec.decode(value, es_type=es_type) 173 | 174 | def decode(self, params, types=None): 175 | params = self._normalize_params(params) 176 | types = types or {} 177 | decoded_params = {} 178 | for name, v in params.items(): 179 | name, _, op = name.partition(self.OP_SEP) 180 | if not op: 181 | op = self.DEFAULT_OP 182 | es_type = types.get(name) 183 | for w in wrap_list(v): 184 | try: 185 | decoded_value = self.decode_value(w, es_type=es_type) 186 | decoded_params \ 187 | .setdefault(name, {}) \ 188 | .setdefault(op, []) \ 189 | .append(decoded_value) 190 | except ValueError: 191 | # just ignore values we cannot decode 192 | pass 193 | 194 | return decoded_params 195 | 196 | def encode_value(self, value, es_type=None): 197 | if value is None: 198 | return self.NULL_VAL 199 | 200 | es_type, python_type = self._get_es_and_python_types(es_type) 201 | value_codec = self.CODECS.get(python_type, StringCodec)() 202 | return value_codec.encode(value, es_type=es_type) 203 | 204 | def encode(self, values, types=None): 205 | params = {} 206 | for name, ops in values.items(): 207 | for op, vals in ops.items(): 208 | if op == self.DEFAULT_OP: 209 | key = name 210 | else: 211 | key = '{}__{}'.format(name, op) 212 | if types: 213 | es_type = types.get(name) 214 | else: 215 | es_type = None 216 | params[key] = [ 217 | self.encode_value(v, es_type=es_type) 218 | for v in vals 219 | ] 220 | return params 221 | -------------------------------------------------------------------------------- /elasticmagic/result.py: -------------------------------------------------------------------------------- 1 | from .document import DynamicDocument 2 | from .document import get_doc_type_for_hit 3 | 4 | 5 | class Result(object): 6 | def __init__(self, raw_result): 7 | self.raw = raw_result 8 | 9 | 10 | class SearchResult(Result): 11 | def __init__( 12 | self, raw_result, aggregations=None, doc_cls_map=None, 13 | instance_mapper=None, 14 | ): 15 | super(SearchResult, self).__init__(raw_result) 16 | 17 | self._query_aggs = aggregations or {} 18 | 19 | self._doc_cls_map = doc_cls_map or {} 20 | doc_classes = list(self._doc_cls_map.values()) 21 | 22 | self._mapper_registry = {} 23 | if isinstance(instance_mapper, dict): 24 | self._instance_mappers = instance_mapper 25 | else: 26 | self._instance_mappers = { 27 | doc_cls: instance_mapper for doc_cls in doc_classes 28 | } 29 | 30 | self.error = raw_result.get('error') 31 | self.took = raw_result.get('took') 32 | self.timed_out = raw_result.get('timed_out') 33 | 34 | hits = raw_result.get('hits') or {} 35 | total = hits.get('total') 36 | if isinstance(total, dict): 37 | self.total = total['value'] 38 | else: 39 | self.total = total 40 | self.max_score = hits.get('max_score') 41 | self.hits = [] 42 | for hit in hits.get('hits', []): 43 | doc_type = get_doc_type_for_hit(hit) 44 | doc_cls = self._doc_cls_map.get(doc_type, DynamicDocument) 45 | self.hits.append(doc_cls(_hit=hit, _result=self)) 46 | 47 | self.aggregations = {} 48 | for agg_name, agg_expr in self._query_aggs.items(): 49 | raw_agg_data = raw_result.get('aggregations', {}).get(agg_name, {}) 50 | agg_result = agg_expr.build_agg_result( 51 | raw_agg_data, self._doc_cls_map, 52 | mapper_registry=self._mapper_registry 53 | ) 54 | self.aggregations[agg_name] = agg_result 55 | 56 | self.scroll_id = raw_result.get('_scroll_id') 57 | 58 | def __iter__(self): 59 | return iter(self.hits) 60 | 61 | def __len__(self): 62 | return len(self.hits) 63 | 64 | def get_aggregation(self, name): 65 | return self.aggregations.get(name) 66 | 67 | def _populate_instances(self, doc_cls): 68 | docs = [doc for doc in self.hits if isinstance(doc, doc_cls)] 69 | instances = self._instance_mappers.get(doc_cls)( 70 | [doc._id for doc in docs] 71 | ) 72 | for doc in docs: 73 | doc.__dict__['instance'] = instances.get(doc._id) 74 | 75 | 76 | class CountResult(Result): 77 | def __init__(self, raw_result): 78 | super(CountResult, self).__init__(raw_result) 79 | self.count = raw_result['count'] 80 | 81 | 82 | class ExistsResult(Result): 83 | def __init__(self, raw_result): 84 | super(ExistsResult, self).__init__(raw_result) 85 | self.exists = raw_result['exists'] 86 | 87 | 88 | class ExplainResult(Result): 89 | def __init__(self, raw_result, doc_cls_map=None, _store_hit=False): 90 | super(ExplainResult, self).__init__(raw_result) 91 | 92 | doc_cls_map = doc_cls_map or {} 93 | 94 | self.matched = raw_result['matched'] 95 | self.explanation = raw_result['explanation'] 96 | 97 | raw_hit = raw_result.get('get', {}).copy() 98 | self._id = raw_result['_id'] 99 | self._type = raw_result.get('_type') 100 | self._index = raw_result['_index'] 101 | self.hit = None 102 | if raw_hit: 103 | raw_hit['_id'] = raw_result['_id'] 104 | if doc_type := raw_result.get('_type'): 105 | raw_hit['_type'] = doc_type 106 | raw_hit['_index'] = raw_result['_index'] 107 | doc_type = get_doc_type_for_hit(raw_hit) 108 | doc_cls = doc_cls_map.get(doc_type, DynamicDocument) 109 | hit = doc_cls(raw_hit) 110 | self._id = hit._id 111 | self._type = hit._type 112 | if _store_hit: 113 | self.hit = hit 114 | 115 | 116 | class ActionResult(Result): 117 | def __init__(self, raw_result): 118 | super(ActionResult, self).__init__(raw_result) 119 | self.name = next(iter(raw_result.keys())) 120 | data = next(iter(raw_result.values())) 121 | self.status = data['status'] 122 | self.found = data.get('found') 123 | raw_error = data.get('error') 124 | if raw_error: 125 | if isinstance(raw_error, str): 126 | self.error = raw_error 127 | else: 128 | self.error = ErrorReason(raw_error) 129 | else: 130 | self.error = None 131 | self._index = data['_index'] 132 | self._type = data.get('_type') 133 | self._id = data['_id'] 134 | self._version = data.get('_version') 135 | 136 | 137 | class ErrorReason(object): 138 | def __init__(self, raw_error): 139 | self.type = raw_error.get('type') 140 | self.reason = raw_error.get('reason') 141 | self.caused_by = raw_error.get('caused_by') 142 | 143 | 144 | class BulkResult(Result): 145 | def __init__(self, raw_result): 146 | super(BulkResult, self).__init__(raw_result) 147 | self.took = raw_result['took'] 148 | self.errors = raw_result['errors'] 149 | self.items = list(map(ActionResult, raw_result['items'])) 150 | 151 | def __iter__(self): 152 | return iter(self.items) 153 | 154 | 155 | class DeleteResult(Result): 156 | def __init__(self, raw_result): 157 | super(DeleteResult, self).__init__(raw_result) 158 | self.found = raw_result.get('found') 159 | self.result = raw_result.get('result') 160 | self._index = raw_result['_index'] 161 | self._type = raw_result.get('_type') 162 | self._id = raw_result['_id'] 163 | self._version = raw_result['_version'] 164 | 165 | 166 | class DeleteByQueryResult(Result): 167 | def __init__(self, raw_result): 168 | super(DeleteByQueryResult, self).__init__(raw_result) 169 | self.took = raw_result.get('took') 170 | self.timed_out = raw_result.get('timed_out') 171 | self.deleted = raw_result.get('deleted') 172 | self.batches = raw_result.get('batches') 173 | self.version_conflicts = raw_result.get('version_conflicts') 174 | self.noops = raw_result.get('noops') 175 | self.retries = self.Retries(raw_result.get('retries') or {}) 176 | self.throttled_millis = raw_result.get('throttled_millis') 177 | self.requests_per_second = raw_result.get('requests_per_second') 178 | self.throttled_until_millis = raw_result.get('throttled_until_millis') 179 | self.total = raw_result.get('total') 180 | self.failures = raw_result.get('failures') 181 | 182 | class Retries(object): 183 | def __init__(self, raw_result): 184 | self.bulk = raw_result.get('bulk') 185 | self.search = raw_result.get('search') 186 | 187 | 188 | class RefreshResult(Result): 189 | pass 190 | 191 | 192 | class FlushResult(Result): 193 | pass 194 | 195 | 196 | class ClearScrollResult(Result): 197 | def __init__(self, raw_result): 198 | super(ClearScrollResult, self).__init__(raw_result) 199 | self.succeeded = raw_result.get('succeeded') 200 | self.num_freed = raw_result.get('num_freed') 201 | 202 | 203 | class PutMappingResult(Result): 204 | pass 205 | -------------------------------------------------------------------------------- /elasticmagic/ext/asyncio/cluster.py: -------------------------------------------------------------------------------- 1 | from elasticmagic.compiler import get_compiler_by_es_version 2 | 3 | from ...cluster import BaseCluster 4 | from .index import AsyncIndex 5 | from .search import AsyncSearchQuery 6 | 7 | 8 | class AsyncCluster(BaseCluster): 9 | _index_cls = AsyncIndex 10 | _search_query_cls = AsyncSearchQuery 11 | 12 | async def _do_request(self, compiler, *args, **kwargs): 13 | compiled_query = compiler(*args, **kwargs) 14 | api_method = compiled_query.api_method(self._client) 15 | raw_res = await self._do_api_call( 16 | api_method, compiled_query.params, compiled_query.body 17 | ) 18 | return compiled_query.process_result(raw_res) 19 | 20 | async def _do_api_call(self, api_method, api_kwargs, body): 21 | if body is not None: 22 | api_kwargs['body'] = body 23 | return await api_method(**api_kwargs) 24 | 25 | async def get_es_version(self): 26 | if not self._es_version: 27 | self._es_version = self._es_version_result( 28 | await self._client.info() 29 | ) 30 | return self._es_version 31 | 32 | async def get_compiler(self): 33 | if self._compiler: 34 | return self._compiler 35 | else: 36 | return get_compiler_by_es_version(await self.get_es_version()) 37 | 38 | async def get( 39 | self, doc_or_id, index=None, doc_cls=None, doc_type=None, 40 | source=None, realtime=None, routing=None, parent=None, 41 | preference=None, refresh=None, version=None, version_type=None, 42 | **kwargs 43 | ): 44 | return await self._do_request( 45 | (await self.get_compiler()).compiled_get, 46 | doc_or_id, self._get_params(locals()), doc_cls=doc_cls 47 | ) 48 | 49 | async def multi_get( 50 | self, docs_or_ids, index=None, doc_cls=None, doc_type=None, 51 | source=None, parent=None, routing=None, preference=None, 52 | realtime=None, refresh=None, **kwargs 53 | ): 54 | return await self._do_request( 55 | (await self.get_compiler()).compiled_multi_get, 56 | docs_or_ids, self._multi_get_params(locals()), doc_cls=doc_cls 57 | ) 58 | 59 | mget = multi_get 60 | 61 | async def search( 62 | self, q, index=None, doc_type=None, routing=None, preference=None, 63 | timeout=None, search_type=None, query_cache=None, 64 | terminate_after=None, scroll=None, stats=None, **kwargs 65 | ): 66 | return await self._do_request( 67 | (await self.get_compiler()).compiled_search_query, 68 | q, self._search_params(locals()) 69 | ) 70 | 71 | async def explain( 72 | self, q, doc_or_id, index, doc_cls=None, routing=None, **kwargs 73 | ): 74 | return await self._do_request( 75 | (await self.get_compiler()).compiled_explain, 76 | q, doc_or_id, self._explain_params(locals()), doc_cls=doc_cls 77 | ) 78 | 79 | async def count( 80 | self, q=None, index=None, doc_type=None, routing=None, 81 | preference=None, **kwargs 82 | ): 83 | return await self._do_request( 84 | (await self.get_compiler()).compiled_count_query, 85 | q, self._search_params(locals()) 86 | ) 87 | 88 | async def exists( 89 | self, q=None, index=None, doc_type=None, refresh=None, 90 | routing=None, **kwargs 91 | ): 92 | return await self._do_request( 93 | (await self.get_compiler()).compiled_exists_query, 94 | q, self._search_params(locals()) 95 | ) 96 | 97 | async def scroll( 98 | self, scroll_id, scroll, doc_cls=None, instance_mapper=None, 99 | **kwargs 100 | ): 101 | return await self._do_request( 102 | (await self.get_compiler()).compiled_scroll, 103 | self._scroll_params(locals()), 104 | doc_cls=doc_cls, 105 | instance_mapper=instance_mapper, 106 | ) 107 | 108 | async def clear_scroll(self, scroll_id, **kwargs): 109 | params = self._preprocess_params(locals()) 110 | return self._clear_scroll_result( 111 | await self._client.clear_scroll(**params) 112 | ) 113 | 114 | async def multi_search( 115 | self, queries, index=None, doc_type=None, 116 | routing=None, preference=None, search_type=None, 117 | raise_on_error=None, **kwargs 118 | ): 119 | params, raise_on_error = self._multi_search_params(locals()) 120 | return await self._do_request( 121 | (await self.get_compiler()).compiled_multi_search, 122 | queries, params, raise_on_error=raise_on_error 123 | ) 124 | 125 | msearch = multi_search 126 | 127 | async def create_index( 128 | self, index, settings=None, mappings=None, **kwargs 129 | ): 130 | return await self._do_request( 131 | (await self.get_compiler()).compiled_create_index, 132 | settings, mappings, self._create_index_params(locals()) 133 | ) 134 | 135 | async def put_mapping( 136 | self, doc_cls_or_mapping, index, doc_type=None, 137 | allow_no_indices=None, expand_wildcards=None, 138 | ignore_conflicts=None, ignore_unavailable=None, 139 | master_timeout=None, timeout=None, **kwargs 140 | ): 141 | return await self._do_request( 142 | (await self.get_compiler()).compiled_put_mapping, 143 | doc_cls_or_mapping, self._put_mapping_params(locals()) 144 | ) 145 | 146 | async def add( 147 | self, docs, index=None, doc_type=None, refresh=None, 148 | timeout=None, consistency=None, replication=None, **kwargs 149 | ): 150 | actions, params = self._add_params(locals()) 151 | return await self.bulk(actions, **params) 152 | 153 | async def delete( 154 | self, doc_or_id, index, doc_cls=None, doc_type=None, 155 | timeout=None, consistency=None, replication=None, 156 | parent=None, routing=None, refresh=None, version=None, 157 | version_type=None, 158 | **kwargs 159 | ): 160 | return await self._do_request( 161 | (await self.get_compiler()).compiled_delete, 162 | doc_or_id, self._delete_params(locals()), doc_cls=doc_cls 163 | ) 164 | 165 | async def delete_by_query( 166 | self, q, index=None, doc_type=None, 167 | timeout=None, consistency=None, replication=None, routing=None, 168 | **kwargs 169 | ): 170 | return await self._do_request( 171 | (await self.get_compiler()).compiled_delete_by_query, 172 | q, self._search_params(locals()) 173 | ) 174 | 175 | async def bulk( 176 | self, actions, index=None, doc_type=None, refresh=None, 177 | timeout=None, consistency=None, replication=None, **kwargs 178 | ): 179 | return await self._do_request( 180 | (await self.get_compiler()).compiled_bulk, 181 | actions, self._bulk_params(locals()) 182 | ) 183 | 184 | async def refresh(self, index=None, **kwargs): 185 | params = self._preprocess_params(locals()) 186 | return self._refresh_result( 187 | await self._client.indices.refresh(**params) 188 | ) 189 | 190 | async def flush(self, index=None, **kwargs): 191 | params = self._preprocess_params(locals()) 192 | return self._flush_result( 193 | await self._client.indices.flush(**params) 194 | ) 195 | 196 | async def flush_synced(self, index=None, **kwargs): 197 | params = self._preprocess_params(locals()) 198 | return self._flush_result( 199 | await self._client.indices.flush_synced(**params) 200 | ) 201 | -------------------------------------------------------------------------------- /tests/test_codec.py: -------------------------------------------------------------------------------- 1 | from datetime import date, datetime 2 | from unittest.mock import Mock 3 | 4 | import pytest 5 | 6 | from elasticmagic.types import Integer, Long, Float, Boolean, Date, List 7 | from elasticmagic.ext.queryfilter.codec import BaseCodec, SimpleCodec 8 | 9 | 10 | def test_base_codec(): 11 | codec = BaseCodec() 12 | with pytest.raises(NotImplementedError): 13 | codec.decode({}) 14 | with pytest.raises(NotImplementedError): 15 | codec.encode({}) 16 | with pytest.raises(NotImplementedError): 17 | codec.decode_value('1', None) 18 | with pytest.raises(NotImplementedError): 19 | codec.encode_value(1, None) 20 | 21 | 22 | def test_simple_codec_decode(): 23 | codec = SimpleCodec() 24 | assert \ 25 | codec.decode({'category_id': [1, '2', 'null']}) == \ 26 | { 27 | 'category_id': { 28 | 'exact': ['1', '2', None], 29 | } 30 | } 31 | assert \ 32 | codec.decode({'country': ['ru', 'ua', 'null']}) == \ 33 | { 34 | 'country': { 35 | 'exact': ['ru', 'ua', None], 36 | } 37 | } 38 | # Webob's MultiDict 39 | data = {'country': ['ru', 'ua', 'null']} 40 | assert \ 41 | codec.decode( 42 | Mock( 43 | spec=['dict_of_lists'], 44 | dict_of_lists=Mock( 45 | return_value=data 46 | ), 47 | getall=Mock( 48 | return_value=data 49 | ) 50 | ) 51 | ) == \ 52 | { 53 | 'country': { 54 | 'exact': ['ru', 'ua', None], 55 | } 56 | } 57 | # Django's QueryDict 58 | assert \ 59 | codec.decode( 60 | Mock( 61 | spec=['lists'], 62 | getlist=Mock(), 63 | lists=Mock( 64 | return_value=data 65 | ) 66 | ) 67 | ) == \ 68 | { 69 | 'country': { 70 | 'exact': ['ru', 'ua', None], 71 | } 72 | } 73 | assert \ 74 | codec.decode({'country': ['ru', 'ua', 'null']}) == \ 75 | { 76 | 'country': { 77 | 'exact': ['ru', 'ua', None], 78 | } 79 | } 80 | assert \ 81 | codec.decode({'category': ['5', '6', 'a', 'null']}, 82 | {'category': Integer}) == \ 83 | { 84 | 'category': { 85 | 'exact': [5, 6, None] 86 | } 87 | } 88 | assert \ 89 | codec.decode({'is_available': ['true', 'false', '', 'null']}, 90 | {'is_available': Boolean}) == \ 91 | { 92 | 'is_available': { 93 | 'exact': [True, False, None] 94 | } 95 | } 96 | assert \ 97 | codec.decode({'manu': ['nokia', 'samsung']}, 98 | {'manu': None}) == \ 99 | { 100 | 'manu': { 101 | 'exact': ['nokia', 'samsung'], 102 | } 103 | } 104 | assert \ 105 | codec.decode({'is_active': ['true']}, {'is_active': Boolean}) == \ 106 | { 107 | 'is_active': { 108 | 'exact': [True], 109 | } 110 | } 111 | assert \ 112 | codec.decode( 113 | [('price__gte', ['100.1', '101.0']), ('price__lte', ['200'])], 114 | {'price': Float} 115 | ) == \ 116 | { 117 | 'price': { 118 | 'gte': [100.1, 101.0], 119 | 'lte': [200.0], 120 | } 121 | } 122 | assert \ 123 | codec.decode({'price__lte': '123a:bc'}, {'price': Float}) == \ 124 | {} 125 | assert \ 126 | codec.decode({'price__gte': 'Inf', 'price__lte': 'NaN'}, 127 | {'price': Float}) == \ 128 | {} 129 | assert \ 130 | codec.decode({'size': '{}'.format(2 ** 31)}, {'size': Integer}) == \ 131 | {} 132 | assert \ 133 | codec.decode({'size': '{}'.format(2 ** 31)}, {'size': Long}) == \ 134 | { 135 | 'size': {'exact': [2147483648]} 136 | } 137 | assert \ 138 | codec.decode({'size': '{}'.format(2 ** 63)}, {'size': Long}) == \ 139 | {} 140 | with pytest.raises(TypeError): 141 | codec.decode('') 142 | 143 | 144 | def test_simple_coded_decode_custom_type(): 145 | class IntegerKeyword(Integer): 146 | """Integer that stored as keyword 147 | """ 148 | 149 | __visit_name__ = 'keyword' 150 | 151 | codec = SimpleCodec() 152 | assert \ 153 | codec.decode( 154 | {'company_id': ['123', 'asdf']}, 155 | {'company_id': IntegerKeyword} 156 | ) == \ 157 | { 158 | 'company_id': { 159 | 'exact': [123], 160 | } 161 | } 162 | 163 | 164 | def test_simple_codec_encode(): 165 | codec = SimpleCodec() 166 | 167 | assert \ 168 | codec.encode( 169 | { 170 | 'country': { 171 | 'exact': ['ru', 'ua', None], 172 | } 173 | } 174 | ) == \ 175 | {'country': ['ru', 'ua', 'null']} 176 | assert \ 177 | codec.encode( 178 | { 179 | 'price': { 180 | 'gte': [100.1, 101.0], 181 | 'lte': [200.0], 182 | } 183 | } 184 | ) == \ 185 | { 186 | 'price__gte': ['100.1', '101.0'], 187 | 'price__lte': ['200.0'], 188 | } 189 | assert \ 190 | codec.encode( 191 | { 192 | 'price': { 193 | 'gte': [100.1, 101.0], 194 | 'lte': [200.0], 195 | } 196 | }, 197 | {'price': Integer} 198 | ) == \ 199 | { 200 | 'price__gte': ['100', '101'], 201 | 'price__lte': ['200'], 202 | } 203 | assert \ 204 | codec.encode( 205 | { 206 | 'category': { 207 | 'exact': [11, 13], 208 | } 209 | }, 210 | {'category': List(Integer)} 211 | ) == \ 212 | { 213 | 'category': ['11', '13'], 214 | } 215 | assert \ 216 | codec.encode( 217 | { 218 | 'date_modified': { 219 | 'gt': [date(2019, 9, 1)] 220 | } 221 | }, 222 | {'date_modified': Date} 223 | ) == \ 224 | { 225 | 'date_modified__gt': ['2019-09-01'] 226 | } 227 | assert \ 228 | codec.encode( 229 | { 230 | 'date_modified': { 231 | 'gt': [datetime(2019, 9, 1, 23, 59, 59, 999999)] 232 | } 233 | }, 234 | {'date_modified': Date} 235 | ) == \ 236 | { 237 | 'date_modified__gt': ['2019-09-01T23:59:59.999999'] 238 | } 239 | assert \ 240 | codec.encode( 241 | { 242 | 'date_modified': { 243 | 'gt': [datetime(2019, 9, 1, 23, 59, 59, 999999)] 244 | } 245 | } 246 | ) == \ 247 | { 248 | 'date_modified__gt': ['2019-09-01 23:59:59.999999'] 249 | } 250 | with pytest.raises(ValueError): 251 | codec.encode( 252 | { 253 | 'date_modified': { 254 | 'gt': ['yesterday'] 255 | } 256 | }, 257 | {'date_modified': Date} 258 | ) 259 | assert \ 260 | codec.encode( 261 | { 262 | 'is_available': { 263 | 'exact': [True] 264 | } 265 | }, 266 | {'is_available': Boolean} 267 | ) == \ 268 | { 269 | 'is_available': ['true'] 270 | } 271 | -------------------------------------------------------------------------------- /benchmark/run.py: -------------------------------------------------------------------------------- 1 | # Benchmark result processing; 2 | import sys 3 | import argparse 4 | import inspect 5 | import json 6 | import time 7 | import cProfile 8 | import gc 9 | import coverage 10 | 11 | from collections import OrderedDict 12 | 13 | from elasticmagic import ( 14 | Document, Field, 15 | SearchQuery, 16 | MatchAll, 17 | ) 18 | from elasticmagic.result import SearchResult 19 | from elasticmagic.types import ( 20 | Boolean, Integer, Float, String, Date, 21 | List, 22 | ) 23 | from elasticmagic.agg import Terms 24 | 25 | 26 | def setup(): 27 | ap = argparse.ArgumentParser() 28 | sub = ap.add_subparsers(help='Valid commands') 29 | for command, setup, handler in [('sample', sample_setup, gen_sample), 30 | ('run', run_setup, run)]: 31 | sub_ap = sub.add_parser(command, help=handler.__doc__) 32 | sub_ap.set_defaults(action=handler) 33 | setup(sub_ap) 34 | return ap 35 | 36 | 37 | def sample_setup(ap): 38 | ap.add_argument('-p', '--pretty', dest='indent', 39 | action='store_const', const=2, 40 | default=None, 41 | help="generate indented JSON") 42 | ap.add_argument('-o', '--output', dest='output', 43 | type=argparse.FileType('w'), default=sys.stdout, 44 | help="Output file") 45 | ap.add_argument('-s', '--size', dest='size', 46 | type=lambda x: 10**int(x), 47 | default=1000, 48 | help="Population size, power of 10, default: 3") 49 | ap.add_argument('-t', '--type', dest='type', 50 | choices=['all', 'hits', 'aggs'], 51 | default='all', 52 | help="Fields to generate.") 53 | 54 | 55 | def run_setup(ap): 56 | ap.add_argument('test', choices=['simple'], help="Test to run") 57 | ap.add_argument('-i', '--input', dest='input', 58 | type=argparse.FileType('r'), default=sys.stdin, 59 | help="Input file") 60 | ap.add_argument('-p', '--profile', dest='profile', 61 | action='store_true', default=False) 62 | 63 | 64 | def main(): 65 | ap = setup() 66 | options = ap.parse_args() 67 | if not hasattr(options, 'action'): 68 | ap.print_help() 69 | return 70 | return options.action(options) 71 | 72 | 73 | # Actions 74 | 75 | 76 | def gen_sample(options): 77 | """Generate sample population.""" 78 | 79 | def unwrap_gen(obj): 80 | if inspect.isgenerator(obj): 81 | return list(obj) 82 | return obj 83 | 84 | def write(item, *items): 85 | obj = OrderedDict(filter(None, (item,) + items)) 86 | json.dump(obj, 87 | options.output, 88 | ensure_ascii=False, 89 | default=unwrap_gen, 90 | indent=options.indent) 91 | 92 | def hits_gen(): 93 | return OrderedDict(( 94 | ('total', options.size), 95 | ('max_score', 0), 96 | ('hits', gen_simple_document(options.size)), 97 | )) 98 | 99 | def aggs_gen(): 100 | return { 101 | "terms": { 102 | "doc_count_error_upper_bound": 0, 103 | "sum_other_doc_count": options.size, 104 | "buckets": gen_terms_buckets(options.size), 105 | }, 106 | } 107 | 108 | hits = options.type in ['all', 'hits'] 109 | aggs = options.type in ['all', 'aggs'] 110 | 111 | dumb_hits = {'total': options.size, 'hits': [], 'max_score': 0} 112 | 113 | write(("took", 0), 114 | ("timed_out", False), 115 | ("_shards", { 116 | "total": 1, 117 | "successful": 1, 118 | "failed": 0, 119 | }), 120 | ("hits", hits_gen()) if hits else ("hits", dumb_hits), 121 | ("aggregations", aggs_gen()) if aggs else (), 122 | ) 123 | 124 | 125 | def run(options): 126 | """Run benchmark.""" 127 | prof = cProfile.Profile() 128 | cov = coverage.Coverage() 129 | 130 | times = OrderedDict.fromkeys(['data_load', 'json_loads', 'searchResult']) 131 | start = time.monotonic() * 1000 132 | 133 | raw_data = options.input.read() 134 | times['data_load'] = time.monotonic() * 1000 - start 135 | start = time.monotonic() * 1000 136 | 137 | raw_results = json.loads(raw_data) 138 | times['json_loads'] = time.monotonic() * 1000 - start 139 | 140 | query = SearchQuery(MatchAll(), 141 | doc_cls=SimpleDocument) 142 | if 'aggregations' in raw_results: 143 | query = query.aggs(terms=Terms(SimpleDocument.integer_0)) 144 | gc.disable() 145 | if options.profile: 146 | cov.start() 147 | prof.enable() 148 | 149 | SearchResult( 150 | raw_results, 151 | query._aggregations, 152 | doc_cls=query._get_doc_cls(), 153 | instance_mapper=query._instance_mapper) 154 | times['searchResult'] = time.monotonic() * 1000 - start 155 | if options.profile: 156 | prof.disable() 157 | cov.stop() 158 | gc.enable() 159 | 160 | for key, duration in times.items(): 161 | print("Took {} {:10.3f}ms".format(key, duration)) 162 | 163 | if options.profile: 164 | prof.print_stats('cumulative') 165 | cov.report() 166 | cov.html_report() 167 | 168 | 169 | class SimpleDocument(Document): 170 | __doc_type__ = 'simple' 171 | 172 | boolean_0 = Field(Boolean) 173 | integer_0 = Field(Integer) 174 | float_0 = Field(Float) 175 | string_0 = Field(String) 176 | date_0 = Field(Date) 177 | 178 | boolean_1 = Field(Boolean) 179 | integer_1 = Field(Integer) 180 | float_1 = Field(Float) 181 | string_1 = Field(String) 182 | date_1 = Field(Date) 183 | 184 | 185 | class ListsDocument(Document): 186 | __doc_type__ = 'lists' 187 | 188 | boolean_0 = Field(List(Boolean)) 189 | integer_0 = Field(List(Integer)) 190 | float_0 = Field(List(Float)) 191 | string_0 = Field(List(String)) 192 | date_0 = Field(List(Date)) 193 | 194 | boolean_1 = Field(List(Boolean)) 195 | integer_1 = Field(List(Integer)) 196 | float_1 = Field(List(Float)) 197 | string_1 = Field(List(String)) 198 | date_1 = Field(List(Date)) 199 | 200 | 201 | _INDEX = 'test' 202 | 203 | 204 | def gen_simple_document(N): 205 | for i in range(N): 206 | yield { 207 | '_index': _INDEX, 208 | '_type': 'simple', 209 | '_id': i, 210 | '_source': { 211 | 'boolean_0': bool(i % 2), 212 | 'integer_0': i, 213 | 'float_0': i / (10 ** len(str(i))), 214 | 'string_0': str(i), 215 | 'date_0': None, 216 | 'boolean_1': bool(1 + i % 2), 217 | 'integer_1': -i, 218 | 'float_1': i / (10 ** len(str(i))), 219 | 'string_1': str(i), 220 | 'date_1': None, 221 | } 222 | } 223 | 224 | 225 | def gen_terms_buckets(N): 226 | for i in range(N): 227 | yield { 228 | "key": i, 229 | "doc_count": i, 230 | } 231 | 232 | 233 | def gen_lists_document(N): 234 | K = len(N * .1) 235 | for i in range(N): 236 | yield { 237 | '_index': _INDEX, 238 | '_type': 'lists', 239 | '_id': i, 240 | '_source': { 241 | 'boolean_0': [bool(i*a % 2) for a in range(K)], 242 | 'integer_0': [i] * K, 243 | 'float_0': [(i / (10 ** len(str(i))))] * K, 244 | 'string_0': [str(i)] * K, 245 | 'date_0': None, 246 | 'boolean_1': [bool(1 + i % 2)] * K, 247 | 'integer_1': [-i] * K, 248 | 'float_1': [(i / (10 ** len(str(i))))] * K, 249 | 'string_1': [str(i)] * K, 250 | 'date_1': None, 251 | } 252 | } 253 | 254 | 255 | if __name__ == '__main__': 256 | main() 257 | -------------------------------------------------------------------------------- /elasticmagic/index.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from abc import ABCMeta 4 | 5 | from .document import DynamicDocument 6 | from .util import to_camel_case 7 | 8 | 9 | class BaseIndex(metaclass=ABCMeta): 10 | def __init__(self, cluster, name): 11 | self._cluster = cluster 12 | self._name = name 13 | 14 | self._doc_cls_cache = {} 15 | 16 | def __getitem__(self, doc_type): 17 | return self.get_doc_cls(doc_type) 18 | 19 | def get_doc_cls(self, doc_type): 20 | if doc_type not in self._doc_cls_cache: 21 | self._doc_cls_cache[doc_type] = type( 22 | '{}{}'.format(to_camel_case(doc_type), 'Document'), 23 | (DynamicDocument,), 24 | {'__doc_type__': doc_type} 25 | ) 26 | return self._doc_cls_cache[doc_type] 27 | 28 | def get_name(self): 29 | return self._name 30 | 31 | def get_cluster(self): 32 | return self._cluster 33 | 34 | def get_settings(self): 35 | return self._cluster._client.indices.get_settings(index=self._name) 36 | 37 | def search_query(self, *args, **kwargs): 38 | """Returns a :class:`search.SearchQuery` instance that is bound to this 39 | index. 40 | """ 41 | kwargs['index'] = self 42 | return self._cluster._search_query_cls(*args, **kwargs) 43 | 44 | def query(self, *args, **kwargs): 45 | return self.search_query(*args, **kwargs) 46 | 47 | 48 | class Index(BaseIndex): 49 | # Methods that do requests to elasticsearch 50 | 51 | def get_es_version(self): 52 | return self._cluster.get_es_version() 53 | 54 | def get_compiler(self): 55 | return self._cluster.get_compiler() 56 | 57 | def get( 58 | self, doc_or_id, doc_cls=None, doc_type=None, source=None, 59 | realtime=None, routing=None, preference=None, refresh=None, 60 | version=None, version_type=None, **kwargs 61 | ): 62 | return self._cluster.get( 63 | doc_or_id, index=self._name, doc_cls=doc_cls, doc_type=doc_type, 64 | source=source, realtime=realtime, routing=routing, 65 | preference=preference, refresh=refresh, version=version, 66 | version_type=version_type, **kwargs 67 | ) 68 | 69 | def multi_get( 70 | self, docs, doc_type=None, source=None, realtime=None, 71 | routing=None, preference=None, refresh=None, **kwargs 72 | ): 73 | return self._cluster.multi_get( 74 | docs, index=self._name, doc_type=doc_type, source=source, 75 | realtime=realtime, routing=routing, preference=preference, 76 | refresh=refresh, **kwargs 77 | ) 78 | 79 | mget = multi_get 80 | 81 | def search( 82 | self, q, doc_type=None, routing=None, preference=None, 83 | timeout=None, search_type=None, query_cache=None, 84 | terminate_after=None, scroll=None, stats=None, **kwargs 85 | ): 86 | return self._cluster.search( 87 | q, index=self._name, doc_type=doc_type, 88 | routing=routing, preference=preference, timeout=timeout, 89 | search_type=search_type, query_cache=query_cache, 90 | terminate_after=terminate_after, scroll=scroll, stats=stats, 91 | **kwargs 92 | ) 93 | 94 | def explain(self, q, doc_or_id, doc_cls=None, routing=None, **kwargs): 95 | return self._cluster.explain( 96 | q, doc_or_id, index=self._name, doc_cls=doc_cls, routing=routing, 97 | **kwargs 98 | ) 99 | 100 | def multi_search( 101 | self, queries, doc_type=None, routing=None, preference=None, 102 | search_type=None, **kwargs 103 | ): 104 | return self._cluster.multi_search( 105 | queries, index=self._name, doc_type=doc_type, 106 | routing=routing, preference=preference, search_type=search_type, 107 | **kwargs 108 | ) 109 | 110 | msearch = multi_search 111 | 112 | def count( 113 | self, q=None, doc_type=None, routing=None, preference=None, 114 | **kwargs 115 | ): 116 | return self._cluster.count( 117 | q, index=self._name, doc_type=doc_type, routing=routing, 118 | preference=preference, **kwargs 119 | ) 120 | 121 | def exists( 122 | self, q=None, doc_type=None, refresh=None, routing=None, **kwargs 123 | ): 124 | return self._cluster.exists( 125 | q, index=self._name, doc_type=doc_type, refresh=refresh, 126 | routing=routing, **kwargs 127 | ) 128 | 129 | def scroll( 130 | self, scroll_id, scroll, doc_cls=None, instance_mapper=None, 131 | **kwargs 132 | ): 133 | return self._cluster.scroll( 134 | scroll_id, scroll, 135 | doc_cls=doc_cls, 136 | instance_mapper=instance_mapper, 137 | **kwargs 138 | ) 139 | 140 | def clear_scroll(self, scroll_id, **kwargs): 141 | return self._cluster.clear_scroll(scroll_id, **kwargs) 142 | 143 | def put_mapping( 144 | self, doc_cls_or_mapping, doc_type=None, allow_no_indices=None, 145 | expand_wildcards=None, ignore_conflicts=None, 146 | ignore_unavailable=None, master_timeout=None, timeout=None, 147 | **kwargs 148 | ): 149 | return self._cluster.put_mapping( 150 | doc_cls_or_mapping, index=self._name, doc_type=doc_type, 151 | allow_no_indices=allow_no_indices, 152 | expand_wildcards=expand_wildcards, 153 | ignore_conflicts=ignore_conflicts, 154 | ignore_unavailable=ignore_unavailable, 155 | master_timeout=master_timeout, timeout=timeout, 156 | **kwargs 157 | ) 158 | 159 | def add( 160 | self, docs, doc_type=None, refresh=None, timeout=None, 161 | consistency=None, replication=None, **kwargs 162 | ): 163 | return self._cluster.add( 164 | docs, index=self._name, doc_type=doc_type, refresh=refresh, 165 | timeout=timeout, consistency=consistency, replication=replication, 166 | **kwargs 167 | ) 168 | 169 | def delete( 170 | self, doc_or_id, doc_cls=None, doc_type=None, 171 | timeout=None, consistency=None, replication=None, 172 | parent=None, routing=None, refresh=None, version=None, 173 | version_type=None, 174 | **kwargs 175 | ): 176 | return self._cluster.delete( 177 | doc_or_id, index=self._name, doc_cls=doc_cls, doc_type=doc_type, 178 | timeout=timeout, consistency=consistency, replication=replication, 179 | parent=parent, routing=routing, refresh=refresh, 180 | version=version, version_type=version_type, 181 | **kwargs 182 | ) 183 | 184 | def delete_by_query( 185 | self, q, doc_type=None, routing=None, 186 | conflicts=None, refresh=None, timeout=None, 187 | scroll=None, scroll_size=None, 188 | wait_for_completion=None, requests_per_second=None, 189 | **kwargs 190 | ): 191 | return self._cluster.delete_by_query( 192 | q, index=self._name, doc_type=doc_type, routing=routing, 193 | conflicts=conflicts, refresh=refresh, timeout=timeout, 194 | scroll=scroll, scroll_size=scroll_size, 195 | wait_for_completion=wait_for_completion, 196 | requests_per_second=requests_per_second, 197 | **kwargs 198 | ) 199 | 200 | def bulk(self, actions, doc_type=None, refresh=None, **kwargs): 201 | return self._cluster.bulk( 202 | actions, index=self._name, doc_type=doc_type, refresh=refresh, 203 | **kwargs 204 | ) 205 | 206 | def refresh(self, **kwargs): 207 | return self._cluster.refresh(index=self._name, **kwargs) 208 | 209 | def flush(self, **kwargs): 210 | return self._cluster.flush(index=self._name, **kwargs) 211 | 212 | def flush_synced(self, **kwargs): 213 | return self._cluster.flush_synced(index=self._name, **kwargs) 214 | -------------------------------------------------------------------------------- /tests/test_actions.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import pytest 3 | 4 | from elasticmagic import Document, DynamicDocument, Field, Script 5 | from elasticmagic import actions 6 | from elasticmagic.compiler import Compiler_6_0 7 | from elasticmagic.compiler import Compiler_7_0 8 | from elasticmagic.types import Date 9 | from elasticmagic.types import Integer 10 | from elasticmagic.types import List 11 | from elasticmagic.types import Text 12 | 13 | 14 | class OrderDocument(Document): 15 | __doc_type__ = 'order' 16 | 17 | product_ids = Field(List(Integer)) 18 | date_created = Field(Date) 19 | 20 | 21 | class ProductWithoudTypeDocument(Document): 22 | name = Field(Text) 23 | 24 | 25 | @pytest.fixture( 26 | params=[ 27 | Compiler_6_0, 28 | Compiler_7_0, 29 | ] 30 | ) 31 | def compiler(request): 32 | yield request.param 33 | 34 | @pytest.fixture 35 | def order_doc(): 36 | yield OrderDocument( 37 | product_ids=[1, 2, 3], 38 | date_created=datetime.datetime(2019, 1, 1) 39 | ) 40 | 41 | 42 | def test_index_action_dict(compiler): 43 | action = actions.Index( 44 | {'_id': 1, '_type': 'test', 'name': 'Test'}, 45 | refresh=True 46 | ) 47 | if compiler.features.requires_doc_type: 48 | expected_meta = { 49 | 'index': { 50 | '_id': 1, 51 | '_type': 'test', 52 | 'refresh': True, 53 | } 54 | } 55 | else: 56 | expected_meta = { 57 | 'index': { 58 | '_id': 1, 59 | 'refresh': True, 60 | } 61 | } 62 | assert action.to_meta(compiler=compiler) == expected_meta 63 | assert action.to_source(compiler=compiler) == { 64 | 'name': 'Test', 65 | } 66 | 67 | 68 | def test_index_action_document(compiler, order_doc): 69 | action = actions.Index(order_doc, index='orders-2019') 70 | if compiler.features.requires_doc_type: 71 | expected_meta = { 72 | 'index': { 73 | '_type': 'order', 74 | '_index': 'orders-2019', 75 | } 76 | } 77 | else: 78 | expected_meta = { 79 | 'index': { 80 | '_index': 'orders-2019', 81 | } 82 | } 83 | assert action.to_meta(compiler=compiler) == expected_meta 84 | assert action.to_source(compiler=compiler) == { 85 | 'product_ids': [1, 2, 3], 86 | 'date_created': datetime.datetime(2019, 1, 1), 87 | } 88 | 89 | 90 | def test_index_action_document_withoud_doc_type(compiler): 91 | doc = ProductWithoudTypeDocument(name='Type is unknown') 92 | action = actions.Index(doc, index='any', doc_type='product') 93 | assert action.to_meta(compiler=compiler) == { 94 | 'index': { 95 | '_type': 'product', 96 | '_index': 'any', 97 | } 98 | } 99 | assert action.to_source(compiler=compiler) == { 100 | 'name': 'Type is unknown', 101 | } 102 | 103 | 104 | def test_delete_action_dict(compiler): 105 | action = actions.Delete( 106 | {'_id': 1, '_type': 'test', 'name': 'Test'}, 107 | routing=2 108 | ) 109 | if compiler.features.requires_doc_type: 110 | expected_meta = { 111 | 'delete': { 112 | '_id': 1, 113 | '_type': 'test', 114 | 'routing': 2, 115 | } 116 | } 117 | else: 118 | expected_meta = { 119 | 'delete': { 120 | '_id': 1, 121 | 'routing': 2, 122 | } 123 | } 124 | assert action.to_meta(compiler=compiler) == expected_meta 125 | assert action.to_source(compiler=compiler) is None 126 | 127 | 128 | def test_delete_action_document(compiler, order_doc): 129 | action = actions.Delete(order_doc, index='orders-2019') 130 | if compiler.features.requires_doc_type: 131 | expected_meta = { 132 | 'delete': { 133 | '_type': 'order', 134 | '_index': 'orders-2019', 135 | } 136 | } 137 | else: 138 | expected_meta = { 139 | 'delete': { 140 | '_index': 'orders-2019', 141 | } 142 | } 143 | assert action.to_meta(compiler=compiler) == expected_meta 144 | assert action.to_source(compiler=compiler) is None 145 | 146 | 147 | def test_delete_action_dynamic_document(compiler): 148 | action = actions.Delete( 149 | DynamicDocument(_id='1', _type='order', _index='orders-2019'), 150 | index='orders-2022' 151 | ) 152 | if compiler.features.requires_doc_type: 153 | expected_meta = { 154 | 'delete': { 155 | '_id': '1', 156 | '_type': 'order', 157 | '_index': 'orders-2022', 158 | } 159 | } 160 | else: 161 | expected_meta = { 162 | 'delete': { 163 | '_id': '1', 164 | '_index': 'orders-2022', 165 | } 166 | } 167 | assert action.to_meta(compiler=compiler) == expected_meta 168 | assert action.to_source(compiler=compiler) is None 169 | 170 | 171 | def test_create_action_dict(compiler): 172 | action = actions.Create( 173 | {'_id': 1, '_type': 'test', 'name': 'Test'}, 174 | refresh=True 175 | ) 176 | if compiler.features.requires_doc_type: 177 | expected_meta = { 178 | 'create': { 179 | '_id': 1, 180 | '_type': 'test', 181 | 'refresh': True, 182 | } 183 | } 184 | else: 185 | expected_meta = { 186 | 'create': { 187 | '_id': 1, 188 | 'refresh': True, 189 | } 190 | } 191 | assert action.to_meta(compiler=compiler) == expected_meta 192 | assert action.to_source(compiler=compiler) == { 193 | 'name': 'Test', 194 | } 195 | 196 | 197 | def test_create_action_document(compiler, order_doc): 198 | action = actions.Create(order_doc, index='orders-2019') 199 | if compiler.features.requires_doc_type: 200 | expected_meta = { 201 | 'create': { 202 | '_type': 'order', 203 | '_index': 'orders-2019', 204 | } 205 | } 206 | else: 207 | expected_meta = { 208 | 'create': { 209 | '_index': 'orders-2019', 210 | } 211 | } 212 | assert action.to_meta(compiler=compiler) == expected_meta 213 | assert action.to_source(compiler=compiler) == { 214 | 'product_ids': [1, 2, 3], 215 | 'date_created': datetime.datetime(2019, 1, 1), 216 | } 217 | 218 | 219 | def test_update_action_dict(compiler): 220 | action = actions.Update( 221 | {'_id': 1, '_type': 'test', 'name': 'Test'}, 222 | refresh=True 223 | ) 224 | if compiler.features.requires_doc_type: 225 | expected_meta = { 226 | 'update': { 227 | '_id': 1, 228 | '_type': 'test', 229 | 'refresh': True, 230 | } 231 | } 232 | else: 233 | expected_meta = { 234 | 'update': { 235 | '_id': 1, 236 | 'refresh': True, 237 | } 238 | } 239 | assert action.to_meta(compiler=compiler) == expected_meta 240 | assert action.to_source(compiler=compiler) == { 241 | 'doc': { 242 | 'name': 'Test', 243 | } 244 | } 245 | 246 | 247 | @pytest.mark.parametrize('compiler', [Compiler_6_0, Compiler_7_0]) 248 | def test_update_action_script(compiler): 249 | action = actions.Update( 250 | {'_id': 1, '_type': 'test', 'name': 'Test'}, 251 | script=Script(inline='ctx._source.product_ids.append(911)'), 252 | upsert={'name': 'Test via upsert'}, 253 | refresh=True 254 | ) 255 | if compiler.features.requires_doc_type: 256 | expected_meta = { 257 | 'update': { 258 | '_id': 1, 259 | '_type': 'test', 260 | 'refresh': True, 261 | } 262 | } 263 | else: 264 | expected_meta = { 265 | 'update': { 266 | '_id': 1, 267 | 'refresh': True, 268 | } 269 | } 270 | assert action.to_meta(compiler=compiler) == expected_meta 271 | 272 | assert action.to_source(compiler=compiler) == { 273 | 'script': { 274 | 'source': 'ctx._source.product_ids.append(911)', 275 | }, 276 | 'upsert': { 277 | 'name': 'Test via upsert', 278 | }, 279 | } 280 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. epub3 to make an epub3 31 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 32 | echo. text to make text files 33 | echo. man to make manual pages 34 | echo. texinfo to make Texinfo files 35 | echo. gettext to make PO message catalogs 36 | echo. changes to make an overview over all changed/added/deprecated items 37 | echo. xml to make Docutils-native XML files 38 | echo. pseudoxml to make pseudoxml-XML files for display purposes 39 | echo. linkcheck to check all external links for integrity 40 | echo. doctest to run all doctests embedded in the documentation if enabled 41 | echo. coverage to run coverage check of the documentation if enabled 42 | echo. dummy to check syntax errors of document sources 43 | goto end 44 | ) 45 | 46 | if "%1" == "clean" ( 47 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 48 | del /q /s %BUILDDIR%\* 49 | goto end 50 | ) 51 | 52 | 53 | REM Check if sphinx-build is available and fallback to Python version if any 54 | %SPHINXBUILD% 1>NUL 2>NUL 55 | if errorlevel 9009 goto sphinx_python 56 | goto sphinx_ok 57 | 58 | :sphinx_python 59 | 60 | set SPHINXBUILD=python -m sphinx.__init__ 61 | %SPHINXBUILD% 2> nul 62 | if errorlevel 9009 ( 63 | echo. 64 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 65 | echo.installed, then set the SPHINXBUILD environment variable to point 66 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 67 | echo.may add the Sphinx directory to PATH. 68 | echo. 69 | echo.If you don't have Sphinx installed, grab it from 70 | echo.http://sphinx-doc.org/ 71 | exit /b 1 72 | ) 73 | 74 | :sphinx_ok 75 | 76 | 77 | if "%1" == "html" ( 78 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 79 | if errorlevel 1 exit /b 1 80 | echo. 81 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 82 | goto end 83 | ) 84 | 85 | if "%1" == "dirhtml" ( 86 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 87 | if errorlevel 1 exit /b 1 88 | echo. 89 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 90 | goto end 91 | ) 92 | 93 | if "%1" == "singlehtml" ( 94 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 95 | if errorlevel 1 exit /b 1 96 | echo. 97 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 98 | goto end 99 | ) 100 | 101 | if "%1" == "pickle" ( 102 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 103 | if errorlevel 1 exit /b 1 104 | echo. 105 | echo.Build finished; now you can process the pickle files. 106 | goto end 107 | ) 108 | 109 | if "%1" == "json" ( 110 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 111 | if errorlevel 1 exit /b 1 112 | echo. 113 | echo.Build finished; now you can process the JSON files. 114 | goto end 115 | ) 116 | 117 | if "%1" == "htmlhelp" ( 118 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 119 | if errorlevel 1 exit /b 1 120 | echo. 121 | echo.Build finished; now you can run HTML Help Workshop with the ^ 122 | .hhp project file in %BUILDDIR%/htmlhelp. 123 | goto end 124 | ) 125 | 126 | if "%1" == "qthelp" ( 127 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 128 | if errorlevel 1 exit /b 1 129 | echo. 130 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 131 | .qhcp project file in %BUILDDIR%/qthelp, like this: 132 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\elasticmagic.qhcp 133 | echo.To view the help file: 134 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\elasticmagic.ghc 135 | goto end 136 | ) 137 | 138 | if "%1" == "devhelp" ( 139 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 140 | if errorlevel 1 exit /b 1 141 | echo. 142 | echo.Build finished. 143 | goto end 144 | ) 145 | 146 | if "%1" == "epub" ( 147 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 148 | if errorlevel 1 exit /b 1 149 | echo. 150 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 151 | goto end 152 | ) 153 | 154 | if "%1" == "epub3" ( 155 | %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 156 | if errorlevel 1 exit /b 1 157 | echo. 158 | echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. 159 | goto end 160 | ) 161 | 162 | if "%1" == "latex" ( 163 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 164 | if errorlevel 1 exit /b 1 165 | echo. 166 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 167 | goto end 168 | ) 169 | 170 | if "%1" == "latexpdf" ( 171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 172 | cd %BUILDDIR%/latex 173 | make all-pdf 174 | cd %~dp0 175 | echo. 176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 177 | goto end 178 | ) 179 | 180 | if "%1" == "latexpdfja" ( 181 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 182 | cd %BUILDDIR%/latex 183 | make all-pdf-ja 184 | cd %~dp0 185 | echo. 186 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 187 | goto end 188 | ) 189 | 190 | if "%1" == "text" ( 191 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 192 | if errorlevel 1 exit /b 1 193 | echo. 194 | echo.Build finished. The text files are in %BUILDDIR%/text. 195 | goto end 196 | ) 197 | 198 | if "%1" == "man" ( 199 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 200 | if errorlevel 1 exit /b 1 201 | echo. 202 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 203 | goto end 204 | ) 205 | 206 | if "%1" == "texinfo" ( 207 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 208 | if errorlevel 1 exit /b 1 209 | echo. 210 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 211 | goto end 212 | ) 213 | 214 | if "%1" == "gettext" ( 215 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 216 | if errorlevel 1 exit /b 1 217 | echo. 218 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 219 | goto end 220 | ) 221 | 222 | if "%1" == "changes" ( 223 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 224 | if errorlevel 1 exit /b 1 225 | echo. 226 | echo.The overview file is in %BUILDDIR%/changes. 227 | goto end 228 | ) 229 | 230 | if "%1" == "linkcheck" ( 231 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 232 | if errorlevel 1 exit /b 1 233 | echo. 234 | echo.Link check complete; look for any errors in the above output ^ 235 | or in %BUILDDIR%/linkcheck/output.txt. 236 | goto end 237 | ) 238 | 239 | if "%1" == "doctest" ( 240 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 241 | if errorlevel 1 exit /b 1 242 | echo. 243 | echo.Testing of doctests in the sources finished, look at the ^ 244 | results in %BUILDDIR%/doctest/output.txt. 245 | goto end 246 | ) 247 | 248 | if "%1" == "coverage" ( 249 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage 250 | if errorlevel 1 exit /b 1 251 | echo. 252 | echo.Testing of coverage in the sources finished, look at the ^ 253 | results in %BUILDDIR%/coverage/python.txt. 254 | goto end 255 | ) 256 | 257 | if "%1" == "xml" ( 258 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 259 | if errorlevel 1 exit /b 1 260 | echo. 261 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 262 | goto end 263 | ) 264 | 265 | if "%1" == "pseudoxml" ( 266 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 267 | if errorlevel 1 exit /b 1 268 | echo. 269 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 270 | goto end 271 | ) 272 | 273 | if "%1" == "dummy" ( 274 | %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy 275 | if errorlevel 1 exit /b 1 276 | echo. 277 | echo.Build finished. Dummy builder generates no files. 278 | goto end 279 | ) 280 | 281 | :end 282 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help 23 | help: 24 | @echo "Please use \`make ' where is one of" 25 | @echo " html to make standalone HTML files" 26 | @echo " dirhtml to make HTML files named index.html in directories" 27 | @echo " singlehtml to make a single large HTML file" 28 | @echo " pickle to make pickle files" 29 | @echo " json to make JSON files" 30 | @echo " htmlhelp to make HTML files and a HTML help project" 31 | @echo " qthelp to make HTML files and a qthelp project" 32 | @echo " applehelp to make an Apple Help Book" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " epub3 to make an epub3" 36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 37 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 39 | @echo " text to make text files" 40 | @echo " man to make manual pages" 41 | @echo " texinfo to make Texinfo files" 42 | @echo " info to make Texinfo files and run them through makeinfo" 43 | @echo " gettext to make PO message catalogs" 44 | @echo " changes to make an overview of all changed/added/deprecated items" 45 | @echo " xml to make Docutils-native XML files" 46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 47 | @echo " linkcheck to check all external links for integrity" 48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 49 | @echo " coverage to run coverage check of the documentation (if enabled)" 50 | @echo " dummy to check syntax errors of document sources" 51 | 52 | .PHONY: clean 53 | clean: 54 | rm -rf $(BUILDDIR)/* 55 | 56 | .PHONY: html 57 | html: 58 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 61 | 62 | .PHONY: dirhtml 63 | dirhtml: 64 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 65 | @echo 66 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 67 | 68 | .PHONY: singlehtml 69 | singlehtml: 70 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 71 | @echo 72 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 73 | 74 | .PHONY: pickle 75 | pickle: 76 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 77 | @echo 78 | @echo "Build finished; now you can process the pickle files." 79 | 80 | .PHONY: json 81 | json: 82 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 83 | @echo 84 | @echo "Build finished; now you can process the JSON files." 85 | 86 | .PHONY: htmlhelp 87 | htmlhelp: 88 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 89 | @echo 90 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 91 | ".hhp project file in $(BUILDDIR)/htmlhelp." 92 | 93 | .PHONY: qthelp 94 | qthelp: 95 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 96 | @echo 97 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 98 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 99 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/elasticmagic.qhcp" 100 | @echo "To view the help file:" 101 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/elasticmagic.qhc" 102 | 103 | .PHONY: applehelp 104 | applehelp: 105 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 106 | @echo 107 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 108 | @echo "N.B. You won't be able to view it unless you put it in" \ 109 | "~/Library/Documentation/Help or install it in your application" \ 110 | "bundle." 111 | 112 | .PHONY: devhelp 113 | devhelp: 114 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 115 | @echo 116 | @echo "Build finished." 117 | @echo "To view the help file:" 118 | @echo "# mkdir -p $$HOME/.local/share/devhelp/elasticmagic" 119 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/elasticmagic" 120 | @echo "# devhelp" 121 | 122 | .PHONY: epub 123 | epub: 124 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 125 | @echo 126 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 127 | 128 | .PHONY: epub3 129 | epub3: 130 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 131 | @echo 132 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." 133 | 134 | .PHONY: latex 135 | latex: 136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 137 | @echo 138 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 139 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 140 | "(use \`make latexpdf' here to do that automatically)." 141 | 142 | .PHONY: latexpdf 143 | latexpdf: 144 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 145 | @echo "Running LaTeX files through pdflatex..." 146 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 147 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 148 | 149 | .PHONY: latexpdfja 150 | latexpdfja: 151 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 152 | @echo "Running LaTeX files through platex and dvipdfmx..." 153 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 154 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 155 | 156 | .PHONY: text 157 | text: 158 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 159 | @echo 160 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 161 | 162 | .PHONY: man 163 | man: 164 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 165 | @echo 166 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 167 | 168 | .PHONY: texinfo 169 | texinfo: 170 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 171 | @echo 172 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 173 | @echo "Run \`make' in that directory to run these through makeinfo" \ 174 | "(use \`make info' here to do that automatically)." 175 | 176 | .PHONY: info 177 | info: 178 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 179 | @echo "Running Texinfo files through makeinfo..." 180 | make -C $(BUILDDIR)/texinfo info 181 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 182 | 183 | .PHONY: gettext 184 | gettext: 185 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 186 | @echo 187 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 188 | 189 | .PHONY: changes 190 | changes: 191 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 192 | @echo 193 | @echo "The overview file is in $(BUILDDIR)/changes." 194 | 195 | .PHONY: linkcheck 196 | linkcheck: 197 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 198 | @echo 199 | @echo "Link check complete; look for any errors in the above output " \ 200 | "or in $(BUILDDIR)/linkcheck/output.txt." 201 | 202 | .PHONY: doctest 203 | doctest: 204 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 205 | @echo "Testing of doctests in the sources finished, look at the " \ 206 | "results in $(BUILDDIR)/doctest/output.txt." 207 | 208 | .PHONY: coverage 209 | coverage: 210 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 211 | @echo "Testing of coverage in the sources finished, look at the " \ 212 | "results in $(BUILDDIR)/coverage/python.txt." 213 | 214 | .PHONY: xml 215 | xml: 216 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 217 | @echo 218 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 219 | 220 | .PHONY: pseudoxml 221 | pseudoxml: 222 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 223 | @echo 224 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 225 | 226 | .PHONY: dummy 227 | dummy: 228 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy 229 | @echo 230 | @echo "Build finished. Dummy builder generates no files." 231 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # elasticmagic documentation build configuration file, created by 4 | # sphinx-quickstart on Thu Nov 3 13:52:21 2016. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | sys.path.insert(0, os.path.abspath('..')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | #needs_sphinx = '1.0' 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | 'sphinx.ext.autodoc', 33 | 'sphinx.ext.doctest', 34 | 'sphinx.ext.todo', 35 | 'sphinx.ext.coverage', 36 | 'sphinx.ext.viewcode', 37 | ] 38 | 39 | # Add any paths that contain templates here, relative to this directory. 40 | templates_path = ['_templates'] 41 | 42 | # The suffix(es) of source filenames. 43 | # You can specify multiple suffix as a list of string: 44 | # source_suffix = ['.rst', '.md'] 45 | source_suffix = '.rst' 46 | 47 | # The encoding of source files. 48 | #source_encoding = 'utf-8-sig' 49 | 50 | # The master toctree document. 51 | master_doc = 'index' 52 | 53 | # General information about the project. 54 | project = u'elasticmagic' 55 | copyright = u'2016, Alexander Koval ' 56 | author = u'Alexander Koval ' 57 | 58 | # The version info for the project you're documenting, acts as replacement for 59 | # |version| and |release|, also used in various other places throughout the 60 | # built documents. 61 | # 62 | # The short X.Y version. 63 | version = u'0.1' 64 | # The full version, including alpha/beta/rc tags. 65 | release = u'0.1.0-alpha' 66 | 67 | # The language for content autogenerated by Sphinx. Refer to documentation 68 | # for a list of supported languages. 69 | # 70 | # This is also used if you do content translation via gettext catalogs. 71 | # Usually you set "language" from the command line for these cases. 72 | language = None 73 | 74 | # There are two options for replacing |today|: either, you set today to some 75 | # non-false value, then it is used: 76 | #today = '' 77 | # Else, today_fmt is used as the format for a strftime call. 78 | #today_fmt = '%B %d, %Y' 79 | 80 | # List of patterns, relative to source directory, that match files and 81 | # directories to ignore when looking for source files. 82 | # This patterns also effect to html_static_path and html_extra_path 83 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 84 | 85 | # The reST default role (used for this markup: `text`) to use for all 86 | # documents. 87 | #default_role = None 88 | 89 | # If true, '()' will be appended to :func: etc. cross-reference text. 90 | #add_function_parentheses = True 91 | 92 | # If true, the current module name will be prepended to all description 93 | # unit titles (such as .. function::). 94 | #add_module_names = True 95 | 96 | # If true, sectionauthor and moduleauthor directives will be shown in the 97 | # output. They are ignored by default. 98 | #show_authors = False 99 | 100 | # The name of the Pygments (syntax highlighting) style to use. 101 | pygments_style = 'sphinx' 102 | 103 | # A list of ignored prefixes for module index sorting. 104 | #modindex_common_prefix = [] 105 | 106 | # If true, keep warnings as "system message" paragraphs in the built documents. 107 | #keep_warnings = False 108 | 109 | # If true, `todo` and `todoList` produce output, else they produce nothing. 110 | todo_include_todos = True 111 | 112 | 113 | # -- Options for HTML output ---------------------------------------------- 114 | 115 | # The theme to use for HTML and HTML Help pages. See the documentation for 116 | # a list of builtin themes. 117 | html_theme = 'default' 118 | 119 | # Theme options are theme-specific and customize the look and feel of a theme 120 | # further. For a list of options available for each theme, see the 121 | # documentation. 122 | #html_theme_options = {} 123 | 124 | # Add any paths that contain custom themes here, relative to this directory. 125 | #html_theme_path = [] 126 | 127 | # The name for this set of Sphinx documents. 128 | # " v documentation" by default. 129 | #html_title = u'elasticmagic v0.0.9-alpha' 130 | 131 | # A shorter title for the navigation bar. Default is the same as html_title. 132 | #html_short_title = None 133 | 134 | # The name of an image file (relative to this directory) to place at the top 135 | # of the sidebar. 136 | #html_logo = None 137 | 138 | # The name of an image file (relative to this directory) to use as a favicon of 139 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 140 | # pixels large. 141 | #html_favicon = None 142 | 143 | # Add any paths that contain custom static files (such as style sheets) here, 144 | # relative to this directory. They are copied after the builtin static files, 145 | # so a file named "default.css" will overwrite the builtin "default.css". 146 | html_static_path = ['_static'] 147 | 148 | # Add any extra paths that contain custom files (such as robots.txt or 149 | # .htaccess) here, relative to this directory. These files are copied 150 | # directly to the root of the documentation. 151 | #html_extra_path = [] 152 | 153 | # If not None, a 'Last updated on:' timestamp is inserted at every page 154 | # bottom, using the given strftime format. 155 | # The empty string is equivalent to '%b %d, %Y'. 156 | #html_last_updated_fmt = None 157 | 158 | # If true, SmartyPants will be used to convert quotes and dashes to 159 | # typographically correct entities. 160 | #html_use_smartypants = True 161 | 162 | # Custom sidebar templates, maps document names to template names. 163 | #html_sidebars = {} 164 | 165 | # Additional templates that should be rendered to pages, maps page names to 166 | # template names. 167 | #html_additional_pages = {} 168 | 169 | # If false, no module index is generated. 170 | #html_domain_indices = True 171 | 172 | # If false, no index is generated. 173 | #html_use_index = True 174 | 175 | # If true, the index is split into individual pages for each letter. 176 | #html_split_index = False 177 | 178 | # If true, links to the reST sources are added to the pages. 179 | #html_show_sourcelink = True 180 | 181 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 182 | #html_show_sphinx = True 183 | 184 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 185 | #html_show_copyright = True 186 | 187 | # If true, an OpenSearch description file will be output, and all pages will 188 | # contain a tag referring to it. The value of this option must be the 189 | # base URL from which the finished HTML is served. 190 | #html_use_opensearch = '' 191 | 192 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 193 | #html_file_suffix = None 194 | 195 | # Language to be used for generating the HTML full-text search index. 196 | # Sphinx supports the following languages: 197 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' 198 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' 199 | #html_search_language = 'en' 200 | 201 | # A dictionary with options for the search language support, empty by default. 202 | # 'ja' uses this config value. 203 | # 'zh' user can custom change `jieba` dictionary path. 204 | #html_search_options = {'type': 'default'} 205 | 206 | # The name of a javascript file (relative to the configuration directory) that 207 | # implements a search results scorer. If empty, the default will be used. 208 | #html_search_scorer = 'scorer.js' 209 | 210 | # Output file base name for HTML help builder. 211 | htmlhelp_basename = 'elasticmagicdoc' 212 | 213 | # -- Options for LaTeX output --------------------------------------------- 214 | 215 | latex_elements = { 216 | # The paper size ('letterpaper' or 'a4paper'). 217 | #'papersize': 'letterpaper', 218 | 219 | # The font size ('10pt', '11pt' or '12pt'). 220 | #'pointsize': '10pt', 221 | 222 | # Additional stuff for the LaTeX preamble. 223 | #'preamble': '', 224 | 225 | # Latex figure (float) alignment 226 | #'figure_align': 'htbp', 227 | } 228 | 229 | # Grouping the document tree into LaTeX files. List of tuples 230 | # (source start file, target name, title, 231 | # author, documentclass [howto, manual, or own class]). 232 | latex_documents = [ 233 | (master_doc, 'elasticmagic.tex', u'elasticmagic Documentation', 234 | u'Alexander Koval \\textless{}kovalidis@gmail.com\\textgreater{}', 'manual'), 235 | ] 236 | 237 | # The name of an image file (relative to this directory) to place at the top of 238 | # the title page. 239 | #latex_logo = None 240 | 241 | # For "manual" documents, if this is true, then toplevel headings are parts, 242 | # not chapters. 243 | #latex_use_parts = False 244 | 245 | # If true, show page references after internal links. 246 | #latex_show_pagerefs = False 247 | 248 | # If true, show URL addresses after external links. 249 | #latex_show_urls = False 250 | 251 | # Documents to append as an appendix to all manuals. 252 | #latex_appendices = [] 253 | 254 | # If false, no module index is generated. 255 | #latex_domain_indices = True 256 | 257 | 258 | # -- Options for manual page output --------------------------------------- 259 | 260 | # One entry per manual page. List of tuples 261 | # (source start file, name, description, authors, manual section). 262 | man_pages = [ 263 | (master_doc, 'elasticmagic', u'elasticmagic Documentation', 264 | [author], 1) 265 | ] 266 | 267 | # If true, show URL addresses after external links. 268 | #man_show_urls = False 269 | 270 | 271 | # -- Options for Texinfo output ------------------------------------------- 272 | 273 | # Grouping the document tree into Texinfo files. List of tuples 274 | # (source start file, target name, title, author, 275 | # dir menu entry, description, category) 276 | texinfo_documents = [ 277 | (master_doc, 'elasticmagic', u'elasticmagic Documentation', 278 | author, 'elasticmagic', 'One line description of project.', 279 | 'Miscellaneous'), 280 | ] 281 | 282 | # Documents to append as an appendix to all manuals. 283 | #texinfo_appendices = [] 284 | 285 | # If false, no module index is generated. 286 | #texinfo_domain_indices = True 287 | 288 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 289 | #texinfo_show_urls = 'footnote' 290 | 291 | # If true, do not generate a @detailmenu in the "Top" node's menu. 292 | #texinfo_no_detailmenu = False 293 | 294 | # on_rtd is whether we are on readthedocs.org 295 | import os 296 | on_rtd = os.environ.get('READTHEDOCS', None) == 'True' 297 | 298 | if not on_rtd: # only import and set the theme if we're building docs locally 299 | import sphinx_rtd_theme 300 | html_theme = 'sphinx_rtd_theme' 301 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 302 | 303 | # otherwise, readthedocs.org uses their theme by default, so no need to specify it 304 | -------------------------------------------------------------------------------- /elasticmagic/cluster.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta 2 | 3 | from .compiler import ( 4 | ESVersion, 5 | get_compiler_by_es_version, 6 | ) 7 | from .index import Index 8 | from .result import ( 9 | ClearScrollResult, 10 | FlushResult, 11 | RefreshResult, 12 | ) 13 | from .search import SearchQuery 14 | from .util import clean_params 15 | 16 | MAX_RESULT_WINDOW = 10000 17 | 18 | 19 | class BaseCluster(metaclass=ABCMeta): 20 | _index_cls = None 21 | _search_query_cls = None 22 | 23 | def __init__( 24 | self, client, index_cls=None, 25 | multi_search_raise_on_error=True, 26 | autodetect_es_version=True, compiler=None, 27 | ): 28 | self._client = client 29 | self._index_cls = index_cls or self._index_cls 30 | self._multi_search_raise_on_error = multi_search_raise_on_error 31 | assert autodetect_es_version or compiler, ( 32 | 'Cannot detect compiler: either `autodetect_es_version` should be ' 33 | '`True` or `compiler` must be specified' 34 | ) 35 | self._autodetect_es_version = autodetect_es_version 36 | self._compiler = compiler 37 | self._index_cache = {} 38 | self._es_version = None 39 | 40 | def __getitem__(self, index_name): 41 | return self.get_index(index_name) 42 | 43 | def get_index(self, name): 44 | if isinstance(name, tuple): 45 | name = ','.join(name) 46 | 47 | if name not in self._index_cache: 48 | self._index_cache[name] = self._index_cls(self, name) 49 | return self._index_cache[name] 50 | 51 | def get_client(self): 52 | return self._client 53 | 54 | def search_query(self, *args, **kwargs): 55 | """Returns a :class:`search.SearchQuery` instance that is bound to this 56 | cluster. 57 | """ 58 | kwargs['cluster'] = self 59 | return self._search_query_cls(*args, **kwargs) 60 | 61 | def query(self, *args, **kwargs): 62 | return self.search_query(*args, **kwargs) 63 | 64 | def _es_version_result(self, raw_result): 65 | version_str = raw_result['version']['number'] 66 | version_str, _, snapshot = version_str.partition('-') 67 | major, minor, patch = map(int, version_str.split('.')) 68 | return ESVersion( 69 | major, minor, patch, raw_result['version'].get('distribution') 70 | ) 71 | 72 | def _preprocess_params(self, params, *pop_keys): 73 | params = params.copy() 74 | params.pop('self') 75 | kwargs = params.pop('kwargs') or {} 76 | for key in pop_keys: 77 | params.pop(key) 78 | return clean_params(params, **kwargs) 79 | 80 | def _get_params(self, params): 81 | return self._preprocess_params(params, 'doc_or_id', 'doc_cls') 82 | 83 | def _multi_get_params(self, params): 84 | return self._preprocess_params(params, 'docs_or_ids', 'doc_cls') 85 | 86 | def _search_params(self, params): 87 | return self._preprocess_params(params, 'q') 88 | 89 | def _explain_params(self, params): 90 | return self._preprocess_params(params, 'q', 'doc_or_id', 'doc_cls') 91 | 92 | def _scroll_params(self, params): 93 | return self._preprocess_params(params, 'doc_cls', 'instance_mapper') 94 | 95 | def _clear_scroll_result(self, raw_result): 96 | return ClearScrollResult(raw_result) 97 | 98 | def _multi_search_params(self, params): 99 | params = self._preprocess_params(params, 'queries') 100 | raise_on_error = params.pop( 101 | 'raise_on_error', self._multi_search_raise_on_error 102 | ) 103 | return params, raise_on_error 104 | 105 | def _put_mapping_params(self, params): 106 | return self._preprocess_params(params, 'doc_cls_or_mapping') 107 | 108 | def _create_index_params(self, params): 109 | return self._preprocess_params(params, 'settings', 'mappings') 110 | 111 | def _add_params(self, params): 112 | from . import actions 113 | 114 | params = self._preprocess_params(params) 115 | docs = params.pop('docs') 116 | return ( 117 | [actions.Index(d) for d in docs], 118 | params 119 | ) 120 | 121 | def _delete_params(self, params): 122 | return self._preprocess_params(params, 'doc_or_id', 'doc_cls') 123 | 124 | def _bulk_params(self, params): 125 | return self._preprocess_params(params, 'actions') 126 | 127 | def _refresh_result(self, raw_result): 128 | return RefreshResult(raw_result) 129 | 130 | def _flush_result(self, raw_result): 131 | return FlushResult(raw_result) 132 | 133 | 134 | class Cluster(BaseCluster): 135 | _index_cls = Index 136 | _search_query_cls = SearchQuery 137 | 138 | def _do_request(self, compiler, *args, **kwargs): 139 | compiled_query = compiler(*args, **kwargs) 140 | api_method = compiled_query.api_method(self._client) 141 | if compiled_query.body is None: 142 | raw_res = api_method(**compiled_query.params) 143 | else: 144 | raw_res = api_method( 145 | body=compiled_query.body, **compiled_query.params 146 | ) 147 | return compiled_query.process_result(raw_res) 148 | 149 | def get_compiler(self): 150 | if self._compiler: 151 | return self._compiler 152 | else: 153 | return get_compiler_by_es_version(self.get_es_version()) 154 | 155 | def get_es_version(self): 156 | if not self._es_version: 157 | self._es_version = self._es_version_result( 158 | self._client.info() 159 | ) 160 | return self._es_version 161 | 162 | def get( 163 | self, doc_or_id, index=None, doc_cls=None, doc_type=None, 164 | routing=None, source=None, realtime=None, parent=None, 165 | preference=None, refresh=None, version=None, version_type=None, 166 | **kwargs 167 | ): 168 | return self._do_request( 169 | self.get_compiler().compiled_get, 170 | doc_or_id, self._get_params(locals()), doc_cls=doc_cls 171 | ) 172 | 173 | def multi_get( 174 | self, docs_or_ids, index=None, doc_cls=None, doc_type=None, 175 | source=None, parent=None, routing=None, preference=None, 176 | realtime=None, refresh=None, **kwargs 177 | ): 178 | return self._do_request( 179 | self.get_compiler().compiled_multi_get, 180 | docs_or_ids, self._multi_get_params(locals()), doc_cls=doc_cls 181 | ) 182 | 183 | mget = multi_get 184 | 185 | def search( 186 | self, q, index=None, doc_type=None, routing=None, preference=None, 187 | timeout=None, search_type=None, query_cache=None, 188 | terminate_after=None, scroll=None, stats=None, **kwargs 189 | ): 190 | return self._do_request( 191 | self.get_compiler().compiled_search_query, 192 | q, self._search_params(locals()) 193 | ) 194 | 195 | def explain( 196 | self, q, doc_or_id, index, doc_cls=None, routing=None, **kwargs 197 | ): 198 | return self._do_request( 199 | self.get_compiler().compiled_explain, 200 | q, doc_or_id, self._explain_params(locals()), doc_cls=doc_cls 201 | ) 202 | 203 | def count( 204 | self, q=None, index=None, doc_type=None, routing=None, 205 | preference=None, **kwargs 206 | ): 207 | return self._do_request( 208 | self.get_compiler().compiled_count_query, 209 | q, self._search_params(locals()) 210 | ) 211 | 212 | def exists( 213 | self, q=None, index=None, doc_type=None, refresh=None, 214 | routing=None, **kwargs 215 | ): 216 | return self._do_request( 217 | self.get_compiler().compiled_exists_query, 218 | q, self._search_params(locals()) 219 | ) 220 | 221 | def scroll( 222 | self, scroll_id, scroll, doc_cls=None, instance_mapper=None, 223 | **kwargs 224 | ): 225 | return self._do_request( 226 | self.get_compiler().compiled_scroll, 227 | self._scroll_params(locals()), 228 | doc_cls=doc_cls, instance_mapper=instance_mapper 229 | ) 230 | 231 | def clear_scroll(self, scroll_id, **kwargs): 232 | params = self._preprocess_params(locals()) 233 | return self._clear_scroll_result( 234 | self._client.clear_scroll(**params) 235 | ) 236 | 237 | def multi_search( 238 | self, queries, index=None, doc_type=None, 239 | routing=None, preference=None, search_type=None, 240 | raise_on_error=None, **kwargs 241 | ): 242 | params, raise_on_error = self._multi_search_params(locals()) 243 | return self._do_request( 244 | self.get_compiler().compiled_multi_search, 245 | queries, params, raise_on_error=raise_on_error 246 | ) 247 | 248 | msearch = multi_search 249 | 250 | def put_mapping( 251 | self, doc_cls_or_mapping, index=None, doc_type=None, 252 | allow_no_indices=None, expand_wildcards=None, 253 | ignore_conflicts=None, ignore_unavailable=None, 254 | master_timeout=None, timeout=None, **kwargs 255 | ): 256 | return self._do_request( 257 | self.get_compiler().compiled_put_mapping, 258 | doc_cls_or_mapping, self._put_mapping_params(locals()) 259 | ) 260 | 261 | def add( 262 | self, docs, index=None, doc_type=None, refresh=None, 263 | timeout=None, consistency=None, replication=None, **kwargs 264 | ): 265 | actions, params = self._add_params(locals()) 266 | return self.bulk(actions, **params) 267 | 268 | def delete( 269 | self, doc_or_id, index, doc_cls=None, doc_type=None, 270 | timeout=None, consistency=None, replication=None, 271 | parent=None, routing=None, refresh=None, version=None, 272 | version_type=None, 273 | **kwargs 274 | ): 275 | return self._do_request( 276 | self.get_compiler().compiled_delete, 277 | doc_or_id, self._delete_params(locals()), doc_cls=doc_cls 278 | ) 279 | 280 | def delete_by_query( 281 | self, q, index=None, doc_type=None, routing=None, 282 | conflicts=None, refresh=None, timeout=None, 283 | scroll=None, scroll_size=None, 284 | wait_for_completion=None, requests_per_second=None, 285 | **kwargs 286 | ): 287 | return self._do_request( 288 | self.get_compiler().compiled_delete_by_query, 289 | q, self._search_params(locals()) 290 | ) 291 | 292 | def bulk( 293 | self, actions, index=None, doc_type=None, refresh=None, 294 | timeout=None, consistency=None, replication=None, **kwargs 295 | ): 296 | return self._do_request( 297 | self.get_compiler().compiled_bulk, 298 | actions, self._bulk_params(locals()) 299 | ) 300 | 301 | def refresh(self, index=None, **kwargs): 302 | params = self._preprocess_params(locals()) 303 | return self._refresh_result( 304 | self._client.indices.refresh(**params) 305 | ) 306 | 307 | def flush(self, index=None, **kwargs): 308 | params = self._preprocess_params(locals()) 309 | return self._flush_result( 310 | self._client.indices.flush(**params) 311 | ) 312 | 313 | def flush_synced(self, index=None, **kwargs): 314 | params = self._preprocess_params(locals()) 315 | return self._flush_result( 316 | self._client.indices.flush_synced(**params) 317 | ) 318 | -------------------------------------------------------------------------------- /elasticmagic/types.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import datetime 3 | import inspect 4 | import re 5 | import copy 6 | 7 | import dateutil.parser 8 | 9 | 10 | def instantiate(typeobj, *args, **kwargs): 11 | if inspect.isclass(typeobj): 12 | return typeobj(*args, **kwargs) 13 | return typeobj 14 | 15 | 16 | class ValidationError(ValueError): 17 | pass 18 | 19 | 20 | class Type(object): 21 | python_type = None 22 | 23 | def __init__(self): 24 | self.sub_type = None 25 | self.doc_cls = None 26 | 27 | def to_python(self, value): 28 | if value is None: 29 | return None 30 | if self.python_type is not None: 31 | return self.python_type(value) 32 | return value 33 | 34 | def to_python_single(self, value): 35 | return self.to_python(value) 36 | 37 | def from_python(self, value, compiler, validate=False): 38 | return value 39 | 40 | 41 | class String(Type): 42 | __visit_name__ = 'string' 43 | 44 | python_type = str 45 | 46 | def from_python(self, value, compiler, validate=False): 47 | return str(value) 48 | 49 | 50 | class Keyword(String): 51 | __visit_name__ = 'keyword' 52 | 53 | 54 | class Text(String): 55 | __visit_name__ = 'text' 56 | 57 | 58 | class _Int(Type): 59 | MIN_VALUE = None 60 | MAX_VALUE = None 61 | 62 | python_type = int 63 | 64 | def from_python(self, value, compiler, validate=False): 65 | if validate: 66 | try: 67 | value = int(value) 68 | except (ValueError, TypeError): 69 | raise ValidationError( 70 | "Cannot parse value as integer: {!r}".format(value) 71 | ) 72 | 73 | if value < self.MIN_VALUE or value > self.MAX_VALUE: 74 | raise ValidationError( 75 | 'Value must be in range: {} not in [{}, {}]'.format( 76 | value, self.MIN_VALUE, self.MAX_VALUE 77 | ) 78 | ) 79 | return value 80 | 81 | 82 | class Byte(_Int): 83 | __visit_name__ = 'byte' 84 | 85 | MIN_VALUE = -(1 << 7) 86 | MAX_VALUE = (1 << 7) - 1 87 | 88 | 89 | class Short(_Int): 90 | __visit_name__ = 'short' 91 | 92 | MIN_VALUE = -(1 << 15) 93 | MAX_VALUE = (1 << 15) - 1 94 | 95 | 96 | class Integer(_Int): 97 | __visit_name__ = 'integer' 98 | 99 | MIN_VALUE = -(1 << 31) 100 | MAX_VALUE = (1 << 31) - 1 101 | 102 | 103 | class Long(_Int): 104 | __visit_name__ = 'long' 105 | 106 | MIN_VALUE = -(1 << 63) 107 | MAX_VALUE = (1 << 63) - 1 108 | 109 | 110 | class _Float(Type): 111 | python_type = float 112 | 113 | def from_python(self, value, compiler, validate=False): 114 | if validate: 115 | try: 116 | value = float(value) 117 | except (ValueError, TypeError): 118 | raise ValidationError( 119 | "Cannot parse value as integer: {!r}".format(value) 120 | ) 121 | return value 122 | 123 | 124 | class Float(_Float): 125 | __visit_name__ = 'float' 126 | 127 | 128 | class Double(_Float): 129 | __visit_name__ = 'double' 130 | 131 | 132 | class Date(Type): 133 | __visit_name__ = 'date' 134 | 135 | python_type = datetime.datetime 136 | 137 | # def __init__(self, format=None): 138 | # self.format = format 139 | 140 | def to_python(self, value): 141 | if value is None: 142 | return None 143 | return dateutil.parser.parse(value) 144 | 145 | def from_python(self, value, compiler, validate=True): 146 | if validate: 147 | if not isinstance(value, datetime.datetime): 148 | raise ValidationError('Value must be datetime.datetime object') 149 | return value 150 | 151 | 152 | class Boolean(Type): 153 | __visit_name__ = 'boolean' 154 | 155 | python_type = bool 156 | 157 | def to_python(self, value): 158 | if value is None: 159 | return None 160 | if value is False or value == 0 or value in ('', 'false', 'F'): 161 | return False 162 | return True 163 | 164 | def from_python(self, value, compiler, validate=True): 165 | return bool(value) 166 | 167 | 168 | class Binary(Type): 169 | __visit_name__ = 'binary' 170 | 171 | python_type = str 172 | 173 | def to_python(self, value): 174 | if value is None: 175 | return None 176 | return base64.b64decode(value) 177 | 178 | def from_python(self, value, compiler, validate=False): 179 | try: 180 | return base64.b64encode(value).decode() 181 | except (ValueError, TypeError): 182 | if validate: 183 | raise ValidationError( 184 | 'Cannot decode value from base64: {!r}'.format(value) 185 | ) 186 | else: 187 | raise 188 | 189 | 190 | class Ip(Type): 191 | __visit_name__ = 'ip' 192 | 193 | IPV4_REGEXP = re.compile( 194 | r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}' 195 | r'(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$' 196 | ) 197 | 198 | python_type = str 199 | 200 | def from_python(self, value, compiler, validate=False): 201 | if validate: 202 | try: 203 | if not self.IPV4_REGEXP.match(value): 204 | raise ValidationError( 205 | 'Not valid IPv4 address: {}'.format(value)) 206 | except (TypeError, ValueError) as e: 207 | raise ValidationError(*e.args) 208 | return value 209 | 210 | 211 | class Object(Type): 212 | __visit_name__ = 'object' 213 | 214 | def __init__(self, doc_cls): 215 | self.doc_cls = doc_cls 216 | 217 | @property 218 | def python_type(self): 219 | return self.doc_cls 220 | 221 | def to_python(self, value): 222 | if value is None: 223 | return None 224 | if isinstance(value, self.doc_cls): 225 | return value 226 | return self.doc_cls(_hit={'_source': value}) 227 | 228 | def from_python(self, value, compiler, validate=False): 229 | if isinstance(value, self.doc_cls): 230 | return value.to_source(compiler, validate=validate) 231 | return value 232 | 233 | 234 | class Nested(Object): 235 | __visit_name__ = 'nested' 236 | 237 | 238 | class List(Type): 239 | python_type = list 240 | 241 | def __init__(self, sub_type): 242 | self.sub_type = instantiate(sub_type) 243 | 244 | @property 245 | def __visit_name__(self): 246 | return self.sub_type.__visit_name__ 247 | 248 | @property 249 | def doc_cls(self): 250 | return self.sub_type.doc_cls 251 | 252 | def to_python(self, value): 253 | if value is None: 254 | return None 255 | if not isinstance(value, self.python_type): 256 | value = [value] 257 | return [self.sub_type.to_python(v) for v in value] 258 | 259 | def to_python_single(self, value): 260 | v = self.to_python(value) 261 | if v: 262 | return v[0] 263 | 264 | def from_python(self, value, compiler, validate=False): 265 | if not isinstance(value, self.python_type): 266 | value = [value] 267 | return [ 268 | self.sub_type.from_python(v, compiler, validate=validate) 269 | for v in value 270 | ] 271 | 272 | 273 | class GeoPoint(Type): 274 | __visit_name__ = 'geo_point' 275 | 276 | LAT_LON_SEPARATOR = ',' 277 | 278 | python_type = dict 279 | 280 | def to_python(self, value): 281 | if value is None: 282 | return None 283 | if isinstance(value, (list, tuple)): 284 | value = list(reversed(value)) 285 | if isinstance(value, str): 286 | if self.LAT_LON_SEPARATOR in value: 287 | value = list(value.split(self.LAT_LON_SEPARATOR)) 288 | elif isinstance(value, dict): 289 | value = [value.get('lat'), value.get('lon')] 290 | return {'lat': float(value[0]), 'lon': float(value[1])} 291 | 292 | def from_python(self, value, compiler, validate=False): 293 | if validate: 294 | if not isinstance(value, dict): 295 | raise ValidationError( 296 | 'Value must be dictionary: {!r}'.format(value)) 297 | if len(value) != 2 or 'lat' not in value or 'lon' not in value: 298 | raise ValidationError( 299 | "Only 'lat' and 'lon' keys must present in the dictionary" 300 | ) 301 | try: 302 | value = {'lat': float(value['lat']), 303 | 'lon': float(value['lon'])} 304 | except (ValueError, TypeError): 305 | raise ValidationError('Lat/lon must be floats: {!r}', value) 306 | return value 307 | 308 | 309 | class Completion(Type): 310 | __visit_name__ = 'completion' 311 | 312 | python_type = dict 313 | 314 | def to_python(self, value): 315 | return value 316 | 317 | def from_python(self, value, compiler, validate=False): 318 | if validate: 319 | if isinstance(value, str): 320 | return value 321 | 322 | if not isinstance(value, dict): 323 | raise ValidationError( 324 | 'Value must be dictionary: {!r}'.format(value)) 325 | if not ('input' in value and value['input']): 326 | raise ValidationError( 327 | "'input' key must be present: {!r}".format(value)) 328 | if not isinstance(value['input'], (str, list, tuple)): 329 | raise ValidationError( 330 | "'input' must be either str or list/tuple: {!r}" 331 | .format(value)) 332 | if 'output' in value and not isinstance(value['output'], str): 333 | raise ValidationError( 334 | "'output' must be str: {!r}".format(value)) 335 | if 'payload' in value and not isinstance(value['payload'], dict): 336 | raise ValidationError( 337 | "'payload' must be dict or absent: {!r}".format(value)) 338 | if 'weight' in value: 339 | if isinstance(value['weight'], int): 340 | if value['weight'] < 0: 341 | raise ValidationError( 342 | "'weight' must be greater equal 0: {!r}" 343 | .format(value['weight'])) 344 | elif isinstance(value['weight'], str): 345 | if not value['weight'].isdigit(): 346 | raise ValidationError( 347 | "'weight' must represent positive integer: {!r}" 348 | .format(value['weight'])) 349 | else: 350 | raise ValidationError( 351 | "'weight' must positive integer, or string" 352 | " representing positive integer or absent: {!r}" 353 | .format(value)) 354 | # we copy value to be sure it won't be chaged in future. 355 | value = copy.deepcopy(value) 356 | return value 357 | 358 | 359 | class Percolator(Type): 360 | __visit_name__ = 'percolator' 361 | 362 | python_type = dict 363 | 364 | def from_python(self, value, compiler, validate=False): 365 | if hasattr(value, 'to_elastic'): 366 | value = value.to_elastic(compiler) 367 | if validate: 368 | if not isinstance(value, dict): 369 | raise ValidationError( 370 | 'Value must be dictionary or expression: {!r}' 371 | .format(value) 372 | ) 373 | return value 374 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /tests/test_types.py: -------------------------------------------------------------------------------- 1 | import binascii 2 | import datetime 3 | 4 | import pytest 5 | 6 | from elasticmagic.compiler import Compiler_7_0 7 | from elasticmagic.document import DynamicDocument 8 | from elasticmagic.types import ( 9 | Type, String, Byte, Short, Integer, Long, Float, Double, Date, Boolean, 10 | Binary, Ip, Object, List, GeoPoint, Completion, ValidationError, 11 | ) 12 | 13 | 14 | def test_type(): 15 | t = Type() 16 | assert t.to_python(None) is None 17 | assert t.to_python('test') == 'test' 18 | assert t.to_python(123) == 123 19 | assert t.from_python(None, Compiler_7_0) is None 20 | assert t.from_python('test', Compiler_7_0) == 'test' 21 | assert t.from_python('test', Compiler_7_0, validate=True) == 'test' 22 | assert t.from_python(123, Compiler_7_0, validate=True) == 123 23 | 24 | 25 | def test_string(): 26 | t = String() 27 | assert t.to_python(None) is None 28 | assert t.to_python('test') == 'test' 29 | assert t.to_python(123) == '123' 30 | assert t.from_python('test', Compiler_7_0) == 'test' 31 | assert t.from_python('test', Compiler_7_0, validate=True) == 'test' 32 | assert t.from_python(123, Compiler_7_0, validate=True) == '123' 33 | 34 | 35 | def test_byte(): 36 | t = Byte() 37 | assert t.to_python(None) is None 38 | with pytest.raises(ValueError): 39 | t.to_python('test') 40 | assert t.to_python(123) == 123 41 | assert t.to_python('123') == 123 42 | assert t.from_python('test', Compiler_7_0) == 'test' 43 | with pytest.raises(ValidationError): 44 | t.from_python('test', Compiler_7_0, validate=True) 45 | with pytest.raises(ValidationError): 46 | t.from_python(128, Compiler_7_0, validate=True) 47 | 48 | 49 | def test_short(): 50 | t = Short() 51 | assert t.to_python(None) is None 52 | with pytest.raises(ValueError): 53 | t.to_python('test') 54 | assert t.to_python(123) == 123 55 | assert t.to_python('123') == 123 56 | assert t.from_python('test', Compiler_7_0) == 'test' 57 | with pytest.raises(ValidationError): 58 | t.from_python('test', Compiler_7_0, validate=True) 59 | with pytest.raises(ValidationError): 60 | t.from_python(1 << 15, Compiler_7_0, validate=True) 61 | 62 | 63 | def test_integer(): 64 | t = Integer() 65 | assert t.to_python(None) is None 66 | with pytest.raises(ValueError): 67 | t.to_python('test') 68 | assert t.to_python(123) == 123 69 | assert t.to_python('123') == 123 70 | assert t.from_python('test', Compiler_7_0) == 'test' 71 | with pytest.raises(ValidationError): 72 | t.from_python('test', Compiler_7_0, validate=True) 73 | with pytest.raises(ValidationError): 74 | t.from_python(1 << 31, Compiler_7_0, validate=True) 75 | 76 | 77 | def test_float(): 78 | t = Float() 79 | assert t.to_python(None) is None 80 | with pytest.raises(ValueError): 81 | t.to_python('test') 82 | assert t.to_python(123) == 123 83 | assert t.to_python('123') == 123 84 | assert t.from_python('test', Compiler_7_0) == 'test' 85 | with pytest.raises(ValidationError): 86 | t.from_python('test', Compiler_7_0, validate=True) 87 | assert t.from_python('128', Compiler_7_0, validate=True) == \ 88 | pytest.approx(128.0) 89 | assert t.from_python(128, Compiler_7_0, validate=True) == \ 90 | pytest.approx(128.0) 91 | 92 | 93 | def test_double(): 94 | t = Double() 95 | assert t.to_python(None) is None 96 | with pytest.raises(ValueError): 97 | t.to_python('test') 98 | assert t.to_python(123) == 123 99 | assert t.to_python('123') == 123 100 | assert t.from_python('test', Compiler_7_0) == 'test' 101 | with pytest.raises(ValidationError): 102 | t.from_python('test', Compiler_7_0, validate=True) 103 | assert t.from_python('128', Compiler_7_0, validate=True) == \ 104 | pytest.approx(128.0) 105 | assert t.from_python(128, Compiler_7_0, validate=True) == \ 106 | pytest.approx(128.0) 107 | 108 | 109 | def test_date(): 110 | t = Date() 111 | assert t.to_python(None) is None 112 | assert t.to_python('2009-11-15T14:12:12') == \ 113 | datetime.datetime(2009, 11, 15, 14, 12, 12) 114 | with pytest.raises(ValueError): 115 | t.to_python('test') 116 | with pytest.raises(ValueError): 117 | t.from_python('test', Compiler_7_0) 118 | with pytest.raises(ValidationError): 119 | t.from_python('test', Compiler_7_0, validate=True) 120 | assert \ 121 | t.from_python( 122 | datetime.datetime(2009, 11, 15, 14, 12, 12), 123 | Compiler_7_0 124 | ) == \ 125 | datetime.datetime(2009, 11, 15, 14, 12, 12) 126 | 127 | 128 | def test_boolean(): 129 | t = Boolean() 130 | assert t.to_python(None) is None 131 | assert t.to_python(False) is False 132 | assert t.to_python(True) is True 133 | assert t.to_python(0) is False 134 | assert t.to_python(1) is True 135 | assert t.to_python('false') is False 136 | assert t.to_python('F') is False 137 | assert t.to_python('') is False 138 | assert t.to_python('true') is True 139 | assert t.to_python('T') is True 140 | assert t.from_python(False, Compiler_7_0) is False 141 | assert t.from_python(True, Compiler_7_0) is True 142 | assert t.from_python(0, Compiler_7_0) is False 143 | assert t.from_python(1, Compiler_7_0) is True 144 | assert t.from_python('true', Compiler_7_0) is True 145 | assert t.from_python('false', Compiler_7_0) is True 146 | 147 | 148 | def test_binary(): 149 | t = Binary() 150 | assert t.to_python(None) is None 151 | assert t.to_python('dGVzdA==') == b'test' 152 | with pytest.raises(binascii.Error): 153 | t.to_python('dGVzdA=') 154 | assert t.from_python(b'test', Compiler_7_0) == 'dGVzdA==' 155 | with pytest.raises(TypeError): 156 | t.from_python(True, Compiler_7_0) 157 | with pytest.raises(ValidationError): 158 | t.from_python(True, Compiler_7_0, validate=True) 159 | 160 | 161 | def test_ip(): 162 | t = Ip() 163 | assert t.to_python(None) is None 164 | assert t.to_python('8.8.8.8') == '8.8.8.8' 165 | assert t.from_python('8.8.8.8', Compiler_7_0) == '8.8.8.8' 166 | assert t.from_python('8.8.8.', Compiler_7_0) == '8.8.8.' 167 | assert t.from_python(8888, Compiler_7_0) == 8888 168 | assert t.from_python('8.8.8.8', Compiler_7_0, validate=True) == '8.8.8.8' 169 | with pytest.raises(ValidationError): 170 | t.from_python('8.8.8.', Compiler_7_0, validate=True) 171 | with pytest.raises(ValidationError): 172 | t.from_python(8888, Compiler_7_0, validate=True) 173 | 174 | 175 | def test_object(): 176 | t = Object(DynamicDocument) 177 | assert t.to_python(None) is None 178 | doc = t.to_python({'name': 'Test', 'status': 1}) 179 | assert doc.name == 'Test' 180 | assert doc.status == 1 181 | 182 | 183 | def test_list(): 184 | t = List(Integer) 185 | with pytest.raises(ValueError): 186 | t.to_python('test') 187 | with pytest.raises(ValueError): 188 | t.to_python(['test']) 189 | assert t.to_python(123) == [123] 190 | assert t.to_python('123') == [123] 191 | assert t.to_python([1, '2']) == [1, 2] 192 | assert t.to_python_single([1, '2']) == 1 193 | assert t.from_python('test', Compiler_7_0) == ['test'] 194 | with pytest.raises(ValidationError): 195 | t.from_python('test', Compiler_7_0, validate=True) 196 | with pytest.raises(ValidationError): 197 | t.from_python(['test'], Compiler_7_0, validate=True) 198 | with pytest.raises(ValidationError): 199 | t.from_python(1 << 31, Compiler_7_0, validate=True) 200 | with pytest.raises(ValidationError): 201 | t.from_python([1 << 31], Compiler_7_0, validate=True) 202 | 203 | 204 | def test_geo_point(): 205 | t = GeoPoint() 206 | assert t.to_python(None) == None 207 | assert t.to_python('41.12,-71.34') == {'lat': 41.12, 'lon': -71.34} 208 | assert t.to_python([-71.34, 41.12]) == {'lat': 41.12, 'lon': -71.34} 209 | assert t.to_python({'lat': 41.12, 'lon': -71.32}) == \ 210 | {'lat': 41.12, 'lon': -71.32} 211 | assert t.from_python({'lon': -71.34, 'lat': 41.12}, Compiler_7_0) == \ 212 | {'lon': -71.34, 'lat': 41.12} 213 | assert t.from_python('drm3btev3e86', Compiler_7_0) == 'drm3btev3e86' 214 | assert t.from_python('41.12,-71.34', Compiler_7_0) == '41.12,-71.34' 215 | assert t.from_python([-71.34, 41.12], Compiler_7_0) == [-71.34, 41.12] 216 | with pytest.raises(ValidationError): 217 | t.from_python( 218 | {'lon': -71.34, 'lat': 41.12, 'description': 'Test'}, 219 | Compiler_7_0, 220 | validate=True 221 | ) 222 | with pytest.raises(ValidationError): 223 | t.from_python('drm3btev3e86', Compiler_7_0, validate=True) 224 | with pytest.raises(ValidationError): 225 | t.from_python([-71.34], Compiler_7_0, validate=True) 226 | with pytest.raises(ValidationError): 227 | t.from_python(['1test', '2test'], Compiler_7_0, validate=True) 228 | with pytest.raises(ValidationError): 229 | t.from_python( 230 | {'lat': 'lon', 'lon': 'lat'}, Compiler_7_0, validate=True 231 | ) 232 | 233 | 234 | def test_completion(): 235 | t = Completion() 236 | assert t.to_python(None) is None 237 | assert t.from_python('complete this', Compiler_7_0, validate=True) == \ 238 | 'complete this' 239 | assert \ 240 | t.from_python( 241 | {'input': 'Complete this'}, Compiler_7_0, validate=True 242 | ) == \ 243 | {'input': 'Complete this'} 244 | assert \ 245 | t.from_python( 246 | {'input': ['Complete this']}, Compiler_7_0, validate=True 247 | ) == \ 248 | {'input': ['Complete this']} 249 | assert \ 250 | t.from_python( 251 | {'input': 'Complete this', 'weight': 1}, 252 | Compiler_7_0, 253 | validate=True 254 | ) == \ 255 | {'input': 'Complete this', 'weight': 1} 256 | assert \ 257 | t.from_python( 258 | {'input': 'Complete this', 'output': 'complete'}, 259 | Compiler_7_0, 260 | validate=True 261 | ) == \ 262 | {'input': 'Complete this', 'output': 'complete'} 263 | assert \ 264 | t.from_python( 265 | { 266 | 'input': ['Complete this', 'Complete'], 267 | 'output': 'complete', 268 | 'weight': 100500, 269 | 'payload': {'hits': 123} 270 | }, 271 | Compiler_7_0, 272 | validate=True 273 | ) == \ 274 | {'input': ['Complete this', 'Complete'], 275 | 'output': 'complete', 276 | 'weight': 100500, 277 | 'payload': {'hits': 123}} 278 | 279 | with pytest.raises(ValidationError): 280 | t.from_python([''], Compiler_7_0, validate=True) 281 | with pytest.raises(ValidationError): 282 | t.from_python({'input': ''}, Compiler_7_0, validate=True) 283 | with pytest.raises(ValidationError): 284 | t.from_python({'input': None}, Compiler_7_0, validate=True) 285 | with pytest.raises(ValidationError): 286 | t.from_python({'input': {'foo': 'bar'}}, Compiler_7_0, validate=True) 287 | with pytest.raises(ValidationError): 288 | t.from_python( 289 | {'input': 'foo', 'weight': -1}, 290 | Compiler_7_0, 291 | validate=True 292 | ) 293 | with pytest.raises(ValidationError): 294 | t.from_python( 295 | {'input': 'foo', 'weight': None}, 296 | Compiler_7_0, 297 | validate=True 298 | ) 299 | with pytest.raises(ValidationError): 300 | t.from_python( 301 | {'input': 'foo', 'weight': ''}, 302 | Compiler_7_0, 303 | validate=True 304 | ) 305 | with pytest.raises(ValidationError): 306 | t.from_python( 307 | {'input': 'foo', 'weight': -1}, 308 | Compiler_7_0, 309 | validate=True 310 | ) 311 | with pytest.raises(ValidationError): 312 | t.from_python( 313 | {'input': 'foo', 'output': -1}, 314 | Compiler_7_0, 315 | validate=True 316 | ) 317 | with pytest.raises(ValidationError): 318 | t.from_python( 319 | {'input': 'foo', 'payload': ''}, 320 | Compiler_7_0, 321 | validate=True 322 | ) 323 | --------------------------------------------------------------------------------