├── .dockerignore ├── .gitattributes ├── .github ├── titleLint.yml ├── CODEOWNERS ├── workflows │ ├── license.yml │ ├── pull_request.yml │ └── pypipublish.yml ├── stale.yml └── PULL_REQUEST_TEMPLATE.md ├── tests ├── __init__.py └── unit │ ├── __init__.py │ ├── api │ ├── __init__.py │ ├── table │ │ ├── __init__.py │ │ ├── fixtures.py │ │ ├── test_search_table_filter.py │ │ └── test_search_table_api.py │ ├── dashboard │ │ ├── __init__.py │ │ ├── fixtures.py │ │ ├── test_search_dashboard_api.py │ │ └── test_search_dashboard_filter.py │ └── document │ │ ├── __init__.py │ │ ├── test_document_user_api.py │ │ ├── test_document_table_api.py │ │ ├── test_document_users_api.py │ │ └── test_document_tables_api.py │ ├── proxy │ ├── __init__.py │ ├── test_statsd_utilities.py │ ├── test_atlas.py │ └── test_elasticsearch.py │ ├── test_app.py │ └── test_swagger.py ├── search_service ├── api │ ├── __init__.py │ ├── swagger_doc │ │ ├── healthcheck.yml │ │ ├── document │ │ │ ├── table_delete.yml │ │ │ ├── user_delete.yml │ │ │ ├── user_post.yml │ │ │ ├── user_put.yml │ │ │ ├── table_post.yml │ │ │ └── table_put.yml │ │ ├── user.yml │ │ ├── table │ │ │ ├── search_table.yml │ │ │ └── search_table_filter.yml │ │ ├── dashboard │ │ │ ├── search_dashboard.yml │ │ │ └── search_dashboard_filter.yml │ │ └── template.yml │ ├── healthcheck.py │ ├── user.py │ ├── base.py │ ├── table.py │ ├── dashboard.py │ └── document.py ├── models │ ├── __init__.py │ ├── tag.py │ ├── search_result.py │ ├── base.py │ ├── user.py │ ├── dashboard.py │ └── table.py ├── exception.py ├── search_wsgi.py ├── proxy │ ├── __init__.py │ ├── base.py │ ├── statsd_utilities.py │ ├── atlas.py │ └── elasticsearch.py ├── config.py └── __init__.py ├── NOTICE ├── MANIFEST.in ├── .dependabot └── config.yml ├── CODE_OF_CONDUCT.md ├── .gitignore ├── public.Dockerfile ├── .editorconfig ├── requirements.txt ├── setup.cfg ├── setup.py ├── Makefile ├── docs └── atlas-search.md ├── README.md └── LICENSE /.dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | -------------------------------------------------------------------------------- /.github/titleLint.yml: -------------------------------------------------------------------------------- 1 | regex: (build|ci|docs|feat|fix|perf|refactor|style|test|chore|other): .* 2 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /tests/unit/api/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /search_service/api/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /tests/unit/api/table/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /tests/unit/proxy/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | amundsensearchlibrary 2 | Copyright 2018-2019 Lyft Inc. 3 | 4 | This product includes software developed at Lyft Inc. 5 | -------------------------------------------------------------------------------- /search_service/models/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /tests/unit/api/dashboard/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /tests/unit/api/document/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include requirements.txt 2 | include search_service/api/swagger_doc/*.yml 3 | include search_service/api/swagger_doc/*/*.yml 4 | -------------------------------------------------------------------------------- /.dependabot/config.yml: -------------------------------------------------------------------------------- 1 | version: 1 2 | update_configs: 3 | - package_manager: "python" 4 | directory: "/" 5 | update_schedule: "monthly" 6 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | This project is governed by [Amundsen's code of conduct](https://github.com/amundsen-io/amundsen/blob/master/CODE_OF_CONDUCT.md). 2 | All contributors and participants agree to abide by its terms. 3 | -------------------------------------------------------------------------------- /search_service/exception.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | class NotFoundException(Exception): 6 | def __init__(self, message: str) -> None: 7 | super().__init__(message) 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | *.pyc 3 | *.pyo 4 | *.pyt 5 | *.pytc 6 | *.egg-info 7 | .*.swp 8 | .DS_Store 9 | build/ 10 | dist/ 11 | venv/ 12 | venv3/ 13 | .cache/ 14 | .idea/ 15 | .vscode/ 16 | .coverage 17 | *coverage.xml 18 | .mypy_cache 19 | .pytest_cache/ 20 | *htmlcov/ 21 | -------------------------------------------------------------------------------- /public.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.7-slim 2 | WORKDIR /app 3 | RUN pip3 install gunicorn 4 | 5 | COPY requirements.txt /app/requirements.txt 6 | RUN pip3 install -r requirements.txt 7 | 8 | COPY . /app 9 | RUN python3 setup.py install 10 | 11 | CMD [ "python3", "search_service/search_wsgi.py" ] 12 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/healthcheck.yml: -------------------------------------------------------------------------------- 1 | Healthcheck 2 | Used to verify application is healthy 3 | --- 4 | tags: 5 | - 'healthcheck' 6 | responses: 7 | 200: 8 | description: Application is running 9 | content: 10 | string: 11 | description: 'Always empty' 12 | example: '' 13 | -------------------------------------------------------------------------------- /search_service/api/healthcheck.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Tuple 5 | from flasgger import swag_from 6 | 7 | 8 | @swag_from('swagger_doc/healthcheck.yml') 9 | def healthcheck() -> Tuple[str, int]: 10 | return '', 200 11 | -------------------------------------------------------------------------------- /search_service/models/tag.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import attr 5 | 6 | from marshmallow_annotations.ext.attrs import AttrsSchema 7 | 8 | 9 | @attr.s(auto_attribs=True, kw_only=True) 10 | class Tag: 11 | tag_name: str 12 | 13 | def __init__(self, tag_name: str): 14 | self.tag_name = tag_name 15 | 16 | 17 | class TagSchema(AttrsSchema): 18 | class Meta: 19 | target = Tag 20 | register_as_scheme = True 21 | -------------------------------------------------------------------------------- /search_service/search_wsgi.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | 6 | from search_service import create_app 7 | 8 | """ 9 | Entry Point to Flask. 10 | """ 11 | 12 | config_module_class = (os.getenv('SEARCH_SVC_CONFIG_MODULE_CLASS') or 13 | 'search_service.config.LocalConfig') 14 | 15 | application = create_app(config_module_class=config_module_class) 16 | 17 | if __name__ == "__main__": 18 | application.run(host='0.0.0.0', port=5001) 19 | -------------------------------------------------------------------------------- /search_service/models/search_result.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Any, List 5 | 6 | 7 | class SearchResult: 8 | def __init__(self, *, 9 | total_results: int, 10 | results: List[Any]) -> None: 11 | self.total_results = total_results 12 | self.results = results 13 | 14 | def __repr__(self) -> str: 15 | return 'SearchResult(total_results={!r}, results{!r})'.format(self.total_results, self.results) 16 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig helps developers define and maintain consistent 2 | # coding styles between different editors and IDEs 3 | # editorconfig.org 4 | 5 | root = true 6 | 7 | 8 | [*] 9 | 10 | # Change these settings to your own preference 11 | indent_style = space 12 | indent_size = 4 13 | 14 | # We recommend you to keep these unchanged 15 | end_of_line = lf 16 | charset = utf-8 17 | trim_trailing_whitespace = true 18 | insert_final_newline = true 19 | 20 | [*.json] 21 | indent_size = 2 22 | 23 | [*.yaml] 24 | indent_size = 2 25 | 26 | [*.md] 27 | trim_trailing_whitespace = false 28 | 29 | [Makefile] 30 | indent_style = tab 31 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | amundsen-common>=0.3.0,<1.0 2 | aniso8601==3.0.0 3 | pyatlasclient==1.0.3 4 | click==6.7 5 | elasticsearch==6.2.0 6 | elasticsearch-dsl==6.1.0 7 | flake8==3.5.0 8 | flake8-tidy-imports==1.1.0 9 | flasgger==0.9.3 10 | Flask==1.0.2 11 | Flask-RESTful==0.3.6 12 | flask-cors==3.0.8 13 | itsdangerous==0.24 14 | Jinja2>=2.10.1 15 | jsonschema==2.6.0 16 | marshmallow>=2.15.3,<3.0 17 | marshmallow-annotations>=2.4.0,<3.0 18 | mock==2.0.0 19 | mypy==0.660 20 | pytest==3.5.1 21 | pytest-cov==2.5.1 22 | pytest-mock==1.1 23 | pytz==2018.4 24 | six==1.11.0 25 | statsd==3.2.1 26 | typing==3.6.4 27 | Werkzeug==0.15.3 28 | wheel==0.31.1 29 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Codeowners file by GitHub 2 | # Reference: https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners 3 | # Each line is a file pattern followed by one or more owners. 4 | # Order is important; the last matching pattern takes the most 5 | # precedence. 6 | 7 | # These owners will be the default owners for everything in 8 | # the repo. Unless a later match takes precedence, 9 | # @amundsen-io/amundsen-committerswill be requested for 10 | # review when someone opens a pull request. 11 | * @amundsen-io/amundsen-committers 12 | 13 | *.py @feng-tao @jinhyukchang @allisonsuarez @dikshathakur3119 @verdan @bolkedebruin @mgorsk1 14 | -------------------------------------------------------------------------------- /.github/workflows/license.yml: -------------------------------------------------------------------------------- 1 | name: license 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Set up Golang 18 | uses: actions/setup-go@v2 19 | - name: Install addlicense 20 | run: | 21 | export PATH=${PATH}:`go env GOPATH`/bin 22 | go get -v -u github.com/google/addlicense 23 | - name: Check license 24 | run: | 25 | export PATH=${PATH}:`go env GOPATH`/bin 26 | addlicense -check -l mit -c "Amundsen" $(find $PWD -type f -name '*.py') -------------------------------------------------------------------------------- /search_service/models/base.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from abc import ABCMeta, abstractmethod 5 | from typing import Set 6 | 7 | 8 | class Base(metaclass=ABCMeta): 9 | """ 10 | A base class for ES model 11 | """ 12 | 13 | @abstractmethod 14 | def get_id(cls) -> str: 15 | # return a document id in ES 16 | pass 17 | 18 | @abstractmethod 19 | def get_attrs(cls) -> Set: 20 | # return a set of attributes for the class 21 | pass 22 | 23 | @staticmethod 24 | @abstractmethod 25 | def get_type() -> str: 26 | # return a type string for the class 27 | pass 28 | -------------------------------------------------------------------------------- /tests/unit/test_app.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import unittest 5 | 6 | from flask import current_app 7 | 8 | from search_service import create_app 9 | 10 | 11 | class AppTest(unittest.TestCase): 12 | """ 13 | Test the service if it can stand-up 14 | """ 15 | 16 | def setUp(self) -> None: 17 | config_module_class = 'search_service.config.LocalConfig' 18 | self.app = create_app(config_module_class=config_module_class) 19 | self.app_context = self.app.app_context() 20 | self.app_context.push() 21 | 22 | def tearDown(self) -> None: 23 | self.app_context.pop() 24 | 25 | def test_app_exists(self) -> None: 26 | self.assertFalse(current_app is None) 27 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | format = pylint 3 | exclude = .svc,CVS,.bzr,.hg,.git,__pycache__,venv 4 | max-complexity = 10 5 | max-line-length = 120 6 | ignore = I201 7 | 8 | [pep8] 9 | max-line-length = 120 10 | 11 | # modify --cov-fail-under parameter after adding unit tests 12 | [pytest] 13 | addopts = --cov=search_service --cov-fail-under=0 --cov-report=term-missing:skip-covered --cov-report=xml --cov-report=html -vvv 14 | 15 | [coverage:run] 16 | branch = True 17 | 18 | [coverage:xml] 19 | output = build/coverage.xml 20 | 21 | [coverage:html] 22 | directory = build/coverage_html 23 | 24 | [coverage:report] 25 | exclude_lines = 26 | pragma: no cover 27 | from * 28 | import * 29 | 30 | [mypy] 31 | python_version = 3.6 32 | disallow_untyped_defs = True 33 | ignore_missing_imports = True 34 | strict_optional = True 35 | warn_no_return = True 36 | -------------------------------------------------------------------------------- /.github/stale.yml: -------------------------------------------------------------------------------- 1 | # Number of days of inactivity before an issue becomes stale 2 | daysUntilStale: 14 3 | # Number of days of inactivity before a stale issue is closed 4 | daysUntilClose: 21 5 | # Issues with these labels will never be considered stale 6 | exemptLabels: 7 | - keep fresh 8 | # Label to use when marking an issue as stale 9 | staleLabel: stale 10 | # Comment to post when marking an issue as stale. Set to `false` to disable 11 | markComment: > 12 | This issue has been automatically marked as stale because it has not had 13 | recent activity. It will be closed if no further activity occurs. 14 | # Comment to post when closing a stale issue. Set to `false` to disable 15 | closeComment: > 16 | This issue has been automatically closed for inactivity. If you still wish to 17 | make these changes, please open a new pull request or reopen this one. 18 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/document/table_delete.yml: -------------------------------------------------------------------------------- 1 | Delete table document by id 2 | Delete table document in ElasticSearch. 3 | --- 4 | tags: 5 | - 'document_table' 6 | parameters: 7 | - name: document_id 8 | in: path 9 | type: string 10 | schema: 11 | type: string 12 | required: true 13 | - name: index 14 | in: query 15 | type: string 16 | schema: 17 | type: string 18 | default: table_search_index 19 | required: false 20 | responses: 21 | 200: 22 | description: Empty json response 23 | content: 24 | application/json: 25 | schema: 26 | $ref: '#/components/schemas/EmptyResponse' 27 | 500: 28 | description: Exception encountered while deleting document 29 | content: 30 | application/json: 31 | schema: 32 | $ref: '#/components/schemas/ErrorResponse' 33 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/document/user_delete.yml: -------------------------------------------------------------------------------- 1 | Delete user document by id 2 | Deletes user document by id in ElasticSearch. 3 | --- 4 | tags: 5 | - 'document_user' 6 | parameters: 7 | - name: document_id 8 | in: path 9 | type: string 10 | schema: 11 | type: string 12 | required: true 13 | - name: index 14 | in: query 15 | type: string 16 | schema: 17 | type: string 18 | default: user_search_index 19 | required: false 20 | responses: 21 | 200: 22 | description: Empty json response 23 | content: 24 | application/json: 25 | schema: 26 | $ref: '#/components/schemas/EmptyResponse' 27 | 500: 28 | description: Exception encountered while deleting document 29 | content: 30 | application/json: 31 | schema: 32 | $ref: '#/components/schemas/ErrorResponse' 33 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | 6 | from setuptools import setup, find_packages 7 | 8 | __version__ = '2.4.1' 9 | 10 | requirements_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'requirements.txt') 11 | with open(requirements_path) as requirements_file: 12 | requirements = requirements_file.readlines() 13 | 14 | setup( 15 | name='amundsen-search', 16 | version=__version__, 17 | description='Search Service for Amundsen', 18 | url='https://github.com/amundsen-io/amundsensearchlibrary.git', 19 | maintainer='Amundsen TSC', 20 | maintainer_email='amundsen-tsc@lists.lfai.foundation', 21 | packages=find_packages(exclude=['tests*']), 22 | include_package_data=True, 23 | zip_safe=False, 24 | dependency_links=[], 25 | install_requires=requirements, 26 | python_requires=">=3.6" 27 | ) 28 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/user.yml: -------------------------------------------------------------------------------- 1 | Search for user 2 | Used by the frontend API to search for users 3 | --- 4 | tags: 5 | - 'search_user' 6 | parameters: 7 | - name: query_term 8 | in: query 9 | type: string 10 | schema: 11 | type: string 12 | required: true 13 | - name: page_index 14 | in: query 15 | type: integer 16 | schema: 17 | type: integer 18 | default: 0 19 | required: false 20 | - name: index 21 | in: query 22 | type: string 23 | schema: 24 | type: string 25 | default: 'user_search_index' 26 | required: false 27 | responses: 28 | 200: 29 | description: user search results 30 | content: 31 | application/json: 32 | schema: 33 | $ref: '#/components/schemas/SearchUserResults' 34 | 500: 35 | description: Exception encountered while getting user 36 | content: 37 | application/json: 38 | schema: 39 | $ref: '#/components/schemas/ErrorResponse' 40 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/table/search_table.yml: -------------------------------------------------------------------------------- 1 | Table search 2 | This is used by the frontend API to search table information. 3 | --- 4 | tags: 5 | - 'search' 6 | parameters: 7 | - name: query_term 8 | in: query 9 | type: string 10 | schema: 11 | type: string 12 | required: true 13 | - name: page_index 14 | in: query 15 | type: integer 16 | schema: 17 | type: integer 18 | default: 0 19 | required: false 20 | - name: index 21 | in: query 22 | type: string 23 | schema: 24 | type: string 25 | default: 'table_search_index' 26 | required: false 27 | responses: 28 | 200: 29 | description: table result information 30 | content: 31 | application/json: 32 | schema: 33 | $ref: '#/components/schemas/SearchTableResults' 34 | 500: 35 | description: Exception encountered while searching 36 | content: 37 | application/json: 38 | schema: 39 | $ref: '#/components/schemas/ErrorResponse' 40 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/dashboard/search_dashboard.yml: -------------------------------------------------------------------------------- 1 | Dashboard search 2 | This is used by the frontend API to search dashboard information. 3 | --- 4 | tags: 5 | - 'search_dashboard' 6 | parameters: 7 | - name: query_term 8 | in: query 9 | type: string 10 | schema: 11 | type: string 12 | required: true 13 | - name: page_index 14 | in: query 15 | type: integer 16 | schema: 17 | type: integer 18 | default: 0 19 | required: false 20 | - name: index 21 | in: query 22 | type: string 23 | schema: 24 | type: string 25 | default: 'dashboard_search_index' 26 | required: false 27 | responses: 28 | 200: 29 | description: dashboard result information 30 | content: 31 | application/json: 32 | schema: 33 | $ref: '#/components/schemas/SearchDashboardResults' 34 | 500: 35 | description: Exception encountered while searching 36 | content: 37 | application/json: 38 | schema: 39 | $ref: '#/components/schemas/ErrorResponse' 40 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/document/user_post.yml: -------------------------------------------------------------------------------- 1 | Creates users document 2 | Creates users document in ElasticSearch. 3 | --- 4 | tags: 5 | - 'document_user' 6 | parameters: 7 | - name: index 8 | in: query 9 | type: string 10 | schema: 11 | type: string 12 | default: user_search_index 13 | required: false 14 | - name: body 15 | in: body 16 | schema: 17 | type: object 18 | name: data 19 | properties: 20 | data: 21 | type: array 22 | description: 'List of users' 23 | items: 24 | $ref: '#/components/schemas/UserFields' 25 | description: 'Users to create' 26 | required: true 27 | responses: 28 | 200: 29 | description: Empty json response 30 | content: 31 | string: 32 | description: 'Index that was used' 33 | example: 'user_search_index' 34 | 500: 35 | description: Exception encountered while creating document 36 | content: 37 | application/json: 38 | schema: 39 | $ref: '#/components/schemas/ErrorResponse' 40 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/document/user_put.yml: -------------------------------------------------------------------------------- 1 | Updates users document 2 | Updates users document in ElasticSearch. 3 | --- 4 | tags: 5 | - 'document_user' 6 | parameters: 7 | - name: index 8 | in: query 9 | type: string 10 | schema: 11 | type: string 12 | default: user_search_index 13 | required: false 14 | - name: body 15 | in: body 16 | schema: 17 | type: object 18 | name: data 19 | properties: 20 | data: 21 | type: array 22 | description: 'List of users' 23 | items: 24 | $ref: '#/components/schemas/UserFields' 25 | description: 'Users to update' 26 | required: true 27 | responses: 28 | 200: 29 | description: Empty json response 30 | content: 31 | string: 32 | description: 'Index that was used' 33 | example: 'user_search_index' 34 | 500: 35 | description: Exception encountered while updating document 36 | content: 37 | application/json: 38 | schema: 39 | $ref: '#/components/schemas/ErrorResponse' 40 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | IMAGE := amundsendev/amundsen-search 2 | VERSION:= $(shell grep -m 1 '__version__' setup.py | cut -d '=' -f 2 | tr -d "'" | tr -d '[:space:]') 3 | 4 | .PHONY: test 5 | clean: 6 | find . -name \*.pyc -delete 7 | find . -name __pycache__ -delete 8 | rm -rf dist/ 9 | 10 | .PHONY: test_unit 11 | test_unit: 12 | python3 -bb -m pytest tests 13 | 14 | .PHONY: lint 15 | lint: 16 | flake8 . 17 | 18 | .PHONY: mypy 19 | mypy: 20 | mypy --ignore-missing-imports --strict-optional --warn-no-return . 21 | 22 | .PHONY: test 23 | test: test_unit lint mypy 24 | 25 | .PHONY: image 26 | image: 27 | docker build -f public.Dockerfile -t ${IMAGE}:latest . 28 | 29 | .PHONY: image-version 30 | image-version: 31 | docker build -f public.Dockerfile -t ${IMAGE}:${VERSION} . 32 | 33 | .PHONY: push-image-version 34 | push-image-version: 35 | docker push ${IMAGE}:${VERSION} 36 | 37 | .PHONY: push-image 38 | push-image: 39 | docker push ${IMAGE}:latest 40 | 41 | .PHONY: build-push-image 42 | build-push-image-latest: image push-image 43 | build-push-image-version: image-version push-image-version 44 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/document/table_post.yml: -------------------------------------------------------------------------------- 1 | Creates tables document 2 | Creates tables document in ElasticSearch. 3 | --- 4 | tags: 5 | - 'document_table' 6 | parameters: 7 | - name: index 8 | in: query 9 | type: string 10 | schema: 11 | type: string 12 | default: 'table_search_index' 13 | required: false 14 | - name: body 15 | in: body 16 | schema: 17 | type: object 18 | name: data 19 | properties: 20 | data: 21 | type: array 22 | description: 'List of tables' 23 | items: 24 | $ref: '#/components/schemas/TableFields' 25 | description: 'Tables to create' 26 | required: true 27 | responses: 28 | 200: 29 | description: Empty json response 30 | content: 31 | string: 32 | description: 'Index that was used' 33 | example: 'table_search_index' 34 | 500: 35 | description: Exception encountered while creating document 36 | content: 37 | application/json: 38 | schema: 39 | $ref: '#/components/schemas/ErrorResponse' 40 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/document/table_put.yml: -------------------------------------------------------------------------------- 1 | Updates tables document 2 | Updates tables document in ElasticSearch. 3 | --- 4 | tags: 5 | - 'document_table' 6 | parameters: 7 | - name: index 8 | in: query 9 | type: string 10 | schema: 11 | type: string 12 | default: 'table_search_index' 13 | required: false 14 | - name: body 15 | in: body 16 | schema: 17 | type: object 18 | name: data 19 | properties: 20 | data: 21 | type: array 22 | description: 'List of tables' 23 | items: 24 | $ref: '#/components/schemas/TableFields' 25 | description: 'Tables to update' 26 | required: true 27 | responses: 28 | 200: 29 | description: Empty json response 30 | content: 31 | string: 32 | description: 'Index that was used' 33 | example: 'table_search_index' 34 | 500: 35 | description: Exception encountered while updating document 36 | content: 37 | application/json: 38 | schema: 39 | $ref: '#/components/schemas/ErrorResponse' 40 | -------------------------------------------------------------------------------- /.github/workflows/pull_request.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | on: pull_request 5 | jobs: 6 | pre-commit: 7 | runs-on: ubuntu-18.04 8 | steps: 9 | - name: Checkout 10 | uses: actions/checkout@v1 11 | - name: Setup python 3.6 12 | uses: actions/setup-python@v1 13 | with: 14 | python-version: 3.6 15 | test-unit: 16 | runs-on: ubuntu-18.04 17 | strategy: 18 | matrix: 19 | python-version: ['3.6.x', '3.7.x'] 20 | steps: 21 | - name: Checkout 22 | uses: actions/checkout@v1 23 | - name: Setup python ${{ matrix.python-version }} 24 | uses: actions/setup-python@v1 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | - name: Install dependencies 28 | run: pip3 install -r requirements.txt && pip3 install .[all] && pip3 install codecov 29 | - name: Run python unit tests 30 | run: make test 31 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/table/search_table_filter.yml: -------------------------------------------------------------------------------- 1 | Table search 2 | This is used by the frontend API to search table information. 3 | --- 4 | tags: 5 | - 'search_table' 6 | paths: 7 | /search_table: 8 | post: 9 | summary: This is used by the frontend API to search table information. 10 | requestBody: 11 | description: The json data passed from the frontend API to execute a search. 12 | required: true 13 | content: 14 | application/json: 15 | schema: 16 | type: object 17 | properties: 18 | index: 19 | type: string 20 | page_index: 21 | type: integer 22 | query_term: 23 | type: string 24 | search_request: 25 | type: object 26 | responses: 27 | 200: 28 | description: table result information with query string 29 | content: 30 | application/json: 31 | schema: 32 | $ref: '#/components/schemas/SearchTableResults' 33 | 500: 34 | description: Exception encountered while searching 35 | content: 36 | application/json: 37 | schema: 38 | $ref: '#/components/schemas/ErrorResponse' 39 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/dashboard/search_dashboard_filter.yml: -------------------------------------------------------------------------------- 1 | Dashboard search 2 | This is used by the frontend API to search dashboard information. 3 | --- 4 | tags: 5 | - 'search_dashboard_filter' 6 | paths: 7 | /search_dashboard: 8 | post: 9 | summary: This is used by the frontend API to search dashboard information. 10 | requestBody: 11 | description: The json data passed from the frontend API to execute a search. 12 | required: true 13 | content: 14 | application/json: 15 | schema: 16 | type: object 17 | properties: 18 | index: 19 | type: string 20 | page_index: 21 | type: integer 22 | query_term: 23 | type: string 24 | search_request: 25 | type: object 26 | responses: 27 | 200: 28 | description: dashboard result information with query string 29 | content: 30 | application/json: 31 | schema: 32 | $ref: '#/components/schemas/SearchDashboardResults' 33 | 500: 34 | description: Exception encountered while searching 35 | content: 36 | application/json: 37 | schema: 38 | $ref: '#/components/schemas/ErrorResponse' 39 | -------------------------------------------------------------------------------- /.github/workflows/pypipublish.yml: -------------------------------------------------------------------------------- 1 | 2 | name: Build and Deploy 3 | on: 4 | push: 5 | branches: 6 | - master 7 | tags: 8 | - '*' 9 | jobs: 10 | build-and-publish-python-module: 11 | name: Build and publish python module to pypi 12 | runs-on: ubuntu-18.04 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v1 16 | - name: Setup python 3.6 17 | uses: actions/setup-python@v1 18 | with: 19 | python-version: 3.6 20 | - name: Add wheel dependency 21 | run: pip install wheel 22 | - name: Generate dist 23 | run: python setup.py sdist bdist_wheel 24 | - name: Publish to PyPI 25 | if: startsWith(github.event.ref, 'refs/tags') 26 | uses: pypa/gh-action-pypi-publish@master 27 | with: 28 | user: __token__ 29 | password: ${{ secrets.pypi_password }} 30 | build-and-publish-docker-image: 31 | name: Build and publish docker image 32 | runs-on: ubuntu-18.04 33 | steps: 34 | - name: Checkout 35 | uses: actions/checkout@v2 36 | - name: Login with docker 37 | run: docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }} 38 | - name: Publish to Registry for latest 39 | if: success() 40 | run: make build-push-image-latest 41 | - name: Publish to Registry for version 42 | if: startsWith(github.event.ref, 'refs/tags') 43 | run: make build-push-image-version 44 | -------------------------------------------------------------------------------- /tests/unit/api/document/test_document_user_api.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import unittest 5 | 6 | from http import HTTPStatus 7 | from mock import patch, Mock, MagicMock 8 | 9 | from search_service.api.document import DocumentUserAPI 10 | from search_service import create_app 11 | 12 | 13 | class TestDocumentUserAPI(unittest.TestCase): 14 | def setUp(self) -> None: 15 | self.app = create_app(config_module_class='search_service.config.Config') 16 | self.app_context = self.app.app_context() 17 | self.app_context.push() 18 | 19 | def tear_down(self) -> None: 20 | self.app_context.pop() 21 | 22 | @patch('search_service.api.document.reqparse.RequestParser') 23 | @patch('search_service.api.document.get_proxy_client') 24 | def test_delete(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 25 | mock_proxy = get_proxy.return_value = Mock() 26 | RequestParser().parse_args.return_value = dict(data='[]', index='fake_index') 27 | 28 | response = DocumentUserAPI().delete(document_id='fake id') 29 | self.assertEqual(list(response)[1], HTTPStatus.OK) 30 | mock_proxy.delete_document.assert_called_with(data=['fake id'], index='fake_index') 31 | 32 | def test_should_not_reach_delete_without_id(self) -> None: 33 | response = self.app.test_client().delete('/document_user') 34 | 35 | self.assertEqual(response.status_code, 405) 36 | -------------------------------------------------------------------------------- /tests/unit/api/document/test_document_table_api.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import unittest 5 | 6 | from http import HTTPStatus 7 | from mock import patch, Mock, MagicMock 8 | 9 | from search_service.api.document import DocumentTableAPI 10 | from search_service import create_app 11 | 12 | 13 | class TestDocumentTableAPI(unittest.TestCase): 14 | def setUp(self) -> None: 15 | self.app = create_app(config_module_class='search_service.config.Config') 16 | self.app_context = self.app.app_context() 17 | self.app_context.push() 18 | 19 | def tear_down(self) -> None: 20 | self.app_context.pop() 21 | 22 | @patch('search_service.api.document.reqparse.RequestParser') 23 | @patch('search_service.api.document.get_proxy_client') 24 | def test_delete(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 25 | mock_proxy = get_proxy.return_value = Mock() 26 | RequestParser().parse_args.return_value = dict(data='[]', index='fake_index') 27 | 28 | response = DocumentTableAPI().delete(document_id='fake id') 29 | self.assertEqual(list(response)[1], HTTPStatus.OK) 30 | mock_proxy.delete_document.assert_called_with(data=['fake id'], index='fake_index') 31 | 32 | def test_should_not_reach_delete_without_id(self) -> None: 33 | response = self.app.test_client().delete('/document_table') 34 | 35 | self.assertEqual(response.status_code, 405) 36 | -------------------------------------------------------------------------------- /search_service/proxy/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from threading import Lock 5 | 6 | from flask import current_app 7 | 8 | from search_service import config 9 | from search_service.proxy.base import BaseProxy 10 | from werkzeug.utils import import_string 11 | 12 | _proxy_client = None 13 | _proxy_client_lock = Lock() 14 | 15 | DEFAULT_PAGE_SIZE = 10 16 | 17 | 18 | def get_proxy_client() -> BaseProxy: 19 | """ 20 | Provides singleton proxy client based on the config 21 | :return: Proxy instance of any subclass of BaseProxy 22 | """ 23 | global _proxy_client 24 | 25 | if _proxy_client: 26 | return _proxy_client 27 | 28 | with _proxy_client_lock: 29 | if _proxy_client: 30 | return _proxy_client 31 | else: 32 | obj = current_app.config[config.PROXY_CLIENT_KEY] 33 | 34 | # Gather all the configuration to create a Proxy Client 35 | host = current_app.config[config.PROXY_ENDPOINT] 36 | user = current_app.config[config.PROXY_USER] 37 | password = current_app.config[config.PROXY_PASSWORD] 38 | client = import_string(current_app.config[config.PROXY_CLIENT]) 39 | 40 | # number of results per search page 41 | page_size = current_app.config.get(config.SEARCH_PAGE_SIZE_KEY, DEFAULT_PAGE_SIZE) 42 | 43 | _proxy_client = client(host=host, user=user, password=password, client=obj, page_size=page_size) 44 | 45 | return _proxy_client 46 | -------------------------------------------------------------------------------- /tests/unit/api/dashboard/fixtures.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from search_service.models.dashboard import Dashboard 5 | 6 | 7 | def mock_proxy_results() -> Dashboard: 8 | return Dashboard(uri='dashboard_uri', 9 | cluster='gold', 10 | group_name='mode_dashboard_group', 11 | group_url='mode_dashboard_group_url', 12 | product='mode', 13 | name='mode_dashboard', 14 | url='mode_dashboard_url', 15 | description='test_dashboard', 16 | last_successful_run_timestamp=1000) 17 | 18 | 19 | def mock_json_response() -> dict: 20 | return { 21 | "chart_names": [], 22 | "uri": 'dashboard_uri', 23 | "cluster": 'gold', 24 | "group_name": 'mode_dashboard_group', 25 | "group_url": 'mode_dashboard_group_url', 26 | "product": 'mode', 27 | "name": 'mode_dashboard', 28 | "url": 'mode_dashboard_url', 29 | "description": 'test_dashboard', 30 | "last_successful_run_timestamp": 1000, 31 | } 32 | 33 | 34 | def default_json_response() -> dict: 35 | return { 36 | "chart_names": [], 37 | "uri": None, 38 | "cluster": None, 39 | "group_name": None, 40 | "group_url": None, 41 | "product": None, 42 | "name": None, 43 | "url": None, 44 | "description": None, 45 | "last_successful_run_timestamp": 0, 46 | } 47 | -------------------------------------------------------------------------------- /search_service/models/user.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | from typing import Optional, Set, List 6 | 7 | import attr 8 | from amundsen_common.models.user import User as CommonUser 9 | from marshmallow_annotations.ext.attrs import AttrsSchema 10 | 11 | from .base import Base 12 | 13 | 14 | @attr.s(auto_attribs=True, kw_only=True) 15 | class User(Base, CommonUser): 16 | """ 17 | This represents the part of a user stored in the search proxy 18 | """ 19 | manager_email: Optional[str] = None 20 | 21 | def get_id(self) -> str: 22 | # uses the user email as the document id in ES 23 | return self.email if self.email else '' 24 | 25 | @classmethod 26 | def get_attrs(cls) -> Set: 27 | return { 28 | 'full_name', 29 | 'first_name', 30 | 'last_name', 31 | 'team_name', 32 | 'email', 33 | 'manager_email', 34 | 'github_username', 35 | 'is_active', 36 | 'employee_type', 37 | 'role_name', 38 | } 39 | 40 | @staticmethod 41 | def get_type() -> str: 42 | return 'user' 43 | 44 | 45 | class UserSchema(AttrsSchema): 46 | class Meta: 47 | target = User 48 | register_as_scheme = True 49 | 50 | 51 | @attr.s(auto_attribs=True, kw_only=True) 52 | class SearchUserResult: 53 | total_results: int = attr.ib() 54 | results: List[User] = attr.ib(factory=list) 55 | 56 | 57 | class SearchUserResultSchema(AttrsSchema): 58 | class Meta: 59 | target = SearchUserResult 60 | register_as_scheme = True 61 | -------------------------------------------------------------------------------- /search_service/models/dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Set, List 5 | 6 | import attr 7 | 8 | from marshmallow_annotations.ext.attrs import AttrsSchema 9 | from amundsen_common.models.dashboard import DashboardSummary, DashboardSummarySchema 10 | 11 | from search_service.models.base import Base 12 | 13 | 14 | @attr.s(auto_attribs=True, kw_only=True) 15 | class Dashboard(Base, 16 | DashboardSummary): 17 | """ 18 | This represents the part of a dashboard stored in the search proxy 19 | """ 20 | 21 | def get_id(self) -> str: 22 | # uses the table key as the document id in ES 23 | return self.name 24 | 25 | @classmethod 26 | def get_attrs(cls) -> Set: 27 | return { 28 | 'uri', 29 | 'cluster', 30 | 'group_name', 31 | 'group_url', 32 | 'product', 33 | 'name', 34 | 'url', 35 | 'description', 36 | 'last_successful_run_timestamp' 37 | } 38 | 39 | @staticmethod 40 | def get_type() -> str: 41 | return 'dashboard' 42 | 43 | 44 | class DashboardSchema(DashboardSummarySchema): 45 | class Meta: 46 | target = Dashboard 47 | register_as_scheme = True 48 | 49 | 50 | @attr.s(auto_attribs=True, kw_only=True) 51 | class SearchDashboardResult: 52 | total_results: int = attr.ib() 53 | results: List[Dashboard] = attr.ib(factory=list) 54 | 55 | 56 | class SearchDashboardResultSchema(AttrsSchema): 57 | class Meta: 58 | target = SearchDashboardResult 59 | register_as_scheme = True 60 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 16 | 17 | ### Summary of Changes 18 | 19 | _Include a summary of changes then remove this line_ 20 | 21 | ### Tests 22 | 23 | _What tests did you add or modify and why? If no tests were added or modified, explain why. Remove this line_ 24 | 25 | ### Documentation 26 | 27 | _What documentation did you add or modify and why? Add any relevant links then remove this line_ 28 | 29 | ### CheckList 30 | 31 | Make sure you have checked **all** steps below to ensure a timely review. 32 | 33 | - [ ] PR title addresses the issue accurately and concisely. Example: "Updates the version of Flask to v1.0.2" 34 | - In case you are adding a dependency, check if the license complies with the [ASF 3rd Party License Policy](https://www.apache.org/legal/resolved.html#category-x). 35 | - [ ] PR includes a summary of changes. 36 | - [ ] PR adds unit tests, updates existing unit tests, **OR** documents why no test additions or modifications are needed. 37 | - [ ] In case of new functionality, my PR adds documentation that describes how to use it. 38 | - All the public functions and the classes in the PR contain docstrings that explain what it does 39 | - [ ] PR passes `make test` 40 | -------------------------------------------------------------------------------- /search_service/api/user.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from http import HTTPStatus 5 | from typing import Iterable, Any 6 | 7 | from flasgger import swag_from 8 | from flask_restful import Resource, reqparse 9 | 10 | from search_service.proxy import get_proxy_client 11 | from search_service.models.user import SearchUserResultSchema 12 | 13 | 14 | USER_INDEX = 'user_search_index' 15 | 16 | 17 | class SearchUserAPI(Resource): 18 | """ 19 | Search Table API 20 | """ 21 | USER_INDEX = 'user_search_index' 22 | 23 | def __init__(self) -> None: 24 | self.proxy = get_proxy_client() 25 | 26 | self.parser = reqparse.RequestParser(bundle_errors=True) 27 | 28 | self.parser.add_argument('query_term', required=True, type=str) 29 | self.parser.add_argument('page_index', required=False, default=0, type=int) 30 | self.parser.add_argument('index', required=False, default=SearchUserAPI.USER_INDEX, type=str) 31 | 32 | super(SearchUserAPI, self).__init__() 33 | 34 | @swag_from('swagger_doc/user.yml') 35 | def get(self) -> Iterable[Any]: 36 | """ 37 | Fetch search results based on query_term. 38 | :return: list of search results. List can be empty if query 39 | doesn't match any result 40 | """ 41 | args = self.parser.parse_args(strict=True) 42 | 43 | try: 44 | 45 | results = self.proxy.fetch_user_search_results( 46 | query_term=args['query_term'], 47 | page_index=args['page_index'], 48 | index=args.get('index') 49 | ) 50 | 51 | return SearchUserResultSchema().dump(results).data, HTTPStatus.OK 52 | 53 | except RuntimeError: 54 | 55 | err_msg = 'Exception encountered while processing search request' 56 | return {'message': err_msg}, HTTPStatus.INTERNAL_SERVER_ERROR 57 | -------------------------------------------------------------------------------- /tests/unit/api/document/test_document_users_api.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import unittest 5 | 6 | from http import HTTPStatus 7 | from mock import patch, Mock, MagicMock 8 | 9 | from search_service.api.document import DocumentUsersAPI 10 | from search_service import create_app 11 | 12 | 13 | class TestDocumentUsersAPI(unittest.TestCase): 14 | def setUp(self) -> None: 15 | self.app = create_app(config_module_class='search_service.config.Config') 16 | self.app_context = self.app.app_context() 17 | self.app_context.push() 18 | 19 | def tear_down(self) -> None: 20 | self.app_context.pop() 21 | 22 | @patch('search_service.api.document.reqparse.RequestParser') 23 | @patch('search_service.api.document.get_proxy_client') 24 | def test_post(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 25 | mock_proxy = get_proxy.return_value = Mock() 26 | RequestParser().parse_args.return_value = dict(data='{}', index='fake_index') 27 | 28 | response = DocumentUsersAPI().post() 29 | self.assertEqual(list(response)[1], HTTPStatus.OK) 30 | mock_proxy.create_document.assert_called_with(data=[], index='fake_index') 31 | 32 | @patch('search_service.api.document.reqparse.RequestParser') 33 | @patch('search_service.api.document.get_proxy_client') 34 | def test_put(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 35 | mock_proxy = get_proxy.return_value = Mock() 36 | RequestParser().parse_args.return_value = dict(data='{}', index='fake_index') 37 | 38 | response = DocumentUsersAPI().put() 39 | self.assertEqual(list(response)[1], HTTPStatus.OK) 40 | mock_proxy.update_document.assert_called_with(data=[], index='fake_index') 41 | 42 | def test_should_not_reach_create_with_id(self) -> None: 43 | response = self.app.test_client().post('/document_user/1') 44 | 45 | self.assertEqual(response.status_code, 405) 46 | 47 | def test_should_not_reach_update_with_id(self) -> None: 48 | response = self.app.test_client().put('/document_user/1') 49 | 50 | self.assertEqual(response.status_code, 405) 51 | -------------------------------------------------------------------------------- /tests/unit/api/document/test_document_tables_api.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import unittest 5 | 6 | from http import HTTPStatus 7 | from mock import patch, Mock, MagicMock 8 | 9 | from search_service.api.document import DocumentTablesAPI 10 | from search_service import create_app 11 | 12 | 13 | class TestDocumentTablesAPI(unittest.TestCase): 14 | def setUp(self) -> None: 15 | self.app = create_app(config_module_class='search_service.config.Config') 16 | self.app_context = self.app.app_context() 17 | self.app_context.push() 18 | 19 | def tear_down(self) -> None: 20 | self.app_context.pop() 21 | 22 | @patch('search_service.api.document.reqparse.RequestParser') 23 | @patch('search_service.api.document.get_proxy_client') 24 | def test_post(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 25 | mock_proxy = get_proxy.return_value = Mock() 26 | RequestParser().parse_args.return_value = dict(data='[]', index='fake_index') 27 | 28 | response = DocumentTablesAPI().post() 29 | self.assertEqual(list(response)[1], HTTPStatus.OK) 30 | mock_proxy.create_document.assert_called_with(data=[], index='fake_index') 31 | 32 | @patch('search_service.api.document.reqparse.RequestParser') 33 | @patch('search_service.api.document.get_proxy_client') 34 | def test_put(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 35 | mock_proxy = get_proxy.return_value = Mock() 36 | RequestParser().parse_args.return_value = dict(data='{}', index='fake_index') 37 | 38 | response = DocumentTablesAPI().put() 39 | self.assertEqual(list(response)[1], HTTPStatus.OK) 40 | mock_proxy.update_document.assert_called_with(data=[], index='fake_index') 41 | 42 | def test_should_not_reach_create_with_id(self) -> None: 43 | response = self.app.test_client().post('/document_table/1') 44 | 45 | self.assertEqual(response.status_code, 405) 46 | 47 | def test_should_not_reach_update_with_id(self) -> None: 48 | response = self.app.test_client().put('/document_table/1') 49 | 50 | self.assertEqual(response.status_code, 405) 51 | -------------------------------------------------------------------------------- /search_service/config.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | 6 | ELASTICSEARCH_INDEX_KEY = 'ELASTICSEARCH_INDEX' 7 | SEARCH_PAGE_SIZE_KEY = 'SEARCH_PAGE_SIZE' 8 | STATS_FEATURE_KEY = 'STATS' 9 | 10 | PROXY_ENDPOINT = 'PROXY_ENDPOINT' 11 | PROXY_USER = 'PROXY_USER' 12 | PROXY_PASSWORD = 'PROXY_PASSWORD' 13 | PROXY_CLIENT = 'PROXY_CLIENT' 14 | PROXY_CLIENT_KEY = 'PROXY_CLIENT_KEY' 15 | PROXY_CLIENTS = { 16 | 'ELASTICSEARCH': 'search_service.proxy.elasticsearch.ElasticsearchProxy', 17 | 'ATLAS': 'search_service.proxy.atlas.AtlasProxy' 18 | } 19 | 20 | 21 | class Config: 22 | LOG_FORMAT = '%(asctime)s.%(msecs)03d [%(levelname)s] %(module)s.%(funcName)s:%(lineno)d (%(process)d:'\ 23 | '%(threadName)s) - %(message)s' 24 | LOG_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S%z' 25 | LOG_LEVEL = 'INFO' 26 | 27 | # Path to the logging configuration file to be used by `fileConfig()` method 28 | # https://docs.python.org/3.7/library/logging.config.html#logging.config.fileConfig 29 | # LOG_CONFIG_FILE = 'search_service/logging.conf' 30 | LOG_CONFIG_FILE = None 31 | 32 | # Config used by ElastichSearch 33 | ELASTICSEARCH_INDEX = 'table_search_index' 34 | 35 | SWAGGER_ENABLED = os.environ.get('SWAGGER_ENABLED', False) 36 | 37 | 38 | class LocalConfig(Config): 39 | DEBUG = False 40 | TESTING = False 41 | STATS = False 42 | LOCAL_HOST = '0.0.0.0' 43 | PROXY_PORT = '9200' 44 | PROXY_ENDPOINT = os.environ.get('PROXY_ENDPOINT', 45 | 'http://{LOCAL_HOST}:{PORT}'.format( 46 | LOCAL_HOST=LOCAL_HOST, 47 | PORT=PROXY_PORT) 48 | ) 49 | PROXY_CLIENT = PROXY_CLIENTS[os.environ.get('PROXY_CLIENT', 'ELASTICSEARCH')] 50 | PROXY_CLIENT_KEY = os.environ.get('PROXY_CLIENT_KEY') 51 | PROXY_USER = os.environ.get('CREDENTIALS_PROXY_USER', 'elastic') 52 | PROXY_PASSWORD = os.environ.get('CREDENTIALS_PROXY_PASSWORD', 'elastic') 53 | 54 | SWAGGER_ENABLED = True 55 | SWAGGER_TEMPLATE_PATH = os.path.join('api', 'swagger_doc', 'template.yml') 56 | SWAGGER = { 57 | 'openapi': '3.0.2', 58 | 'title': 'Search Service', 59 | 'uiversion': 3 60 | } 61 | -------------------------------------------------------------------------------- /search_service/models/table.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import List, Optional, Set 5 | 6 | import attr 7 | from marshmallow_annotations.ext.attrs import AttrsSchema 8 | 9 | from .base import Base 10 | from search_service.models.tag import Tag 11 | 12 | 13 | @attr.s(auto_attribs=True, kw_only=True) 14 | class Table(Base): 15 | """ 16 | This represents the part of a table stored in the search proxy 17 | """ 18 | database: str 19 | cluster: str 20 | schema: str 21 | name: str 22 | key: str 23 | display_name: Optional[str] = None 24 | tags: List[Tag] 25 | badges: List[Tag] 26 | description: Optional[str] = None 27 | last_updated_timestamp: int 28 | # The following properties are lightly-transformed properties from the normal table object: 29 | column_names: List[str] 30 | column_descriptions: List[str] = [] 31 | programmatic_descriptions: List[str] = [] 32 | # The following are search-only properties: 33 | total_usage: int = 0 34 | schema_description: Optional[str] = attr.ib(default=None) 35 | 36 | def get_id(self) -> str: 37 | # uses the table key as the document id in ES 38 | return self.key 39 | 40 | @classmethod 41 | def get_attrs(cls) -> Set: 42 | return { 43 | 'name', 44 | 'key', 45 | 'description', 46 | 'cluster', 47 | 'database', 48 | 'schema', 49 | 'column_names', 50 | 'tags', 51 | 'badges', 52 | 'last_updated_timestamp', 53 | 'display_name', 54 | 'programmatic_descriptions', 55 | 'total_usage', 56 | 'schema_description' 57 | } 58 | 59 | @staticmethod 60 | def get_type() -> str: 61 | return 'table' 62 | 63 | 64 | class TableSchema(AttrsSchema): 65 | class Meta: 66 | target = Table 67 | register_as_scheme = True 68 | 69 | 70 | @attr.s(auto_attribs=True, kw_only=True) 71 | class SearchTableResult: 72 | total_results: int = attr.ib() 73 | results: List[Table] = attr.ib(factory=list) 74 | 75 | 76 | class SearchTableResultSchema(AttrsSchema): 77 | class Meta: 78 | target = SearchTableResult 79 | register_as_scheme = True 80 | -------------------------------------------------------------------------------- /search_service/api/base.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from http import HTTPStatus 5 | from typing import Any, Dict, Iterable # noqa: F401 6 | 7 | from flask_restful import Resource, reqparse 8 | from marshmallow_annotations.ext.attrs import AttrsSchema 9 | 10 | from search_service.proxy import get_proxy_client 11 | 12 | 13 | class BaseFilterAPI(Resource): 14 | """ 15 | Base Filter API for search filtering 16 | 17 | This API should be generic enough to support every search filter use case. 18 | """ 19 | 20 | def __init__(self, *, schema: AttrsSchema, index: str) -> None: 21 | self.proxy = get_proxy_client() 22 | self.schema = schema 23 | self.index = index 24 | self.parser = reqparse.RequestParser(bundle_errors=True) 25 | 26 | self.parser.add_argument('page_index', required=False, default=0, type=int) 27 | self.parser.add_argument('query_term', required=False, type=str) 28 | self.parser.add_argument('search_request', type=dict) 29 | 30 | super(BaseFilterAPI, self).__init__() 31 | 32 | def post(self) -> Iterable[Any]: 33 | """ 34 | Fetch search results based on the page_index, query_term, and 35 | search_request dictionary posted in the request JSON. 36 | :return: json payload of schema. 37 | doesn't match any tables 38 | """ 39 | args = self.parser.parse_args(strict=True) 40 | page_index = args.get('page_index') # type: int 41 | 42 | search_request = args.get('search_request') # type: Dict 43 | if search_request is None: 44 | msg = 'The search request payload is not available in the request' 45 | return {'message': msg}, HTTPStatus.BAD_REQUEST 46 | 47 | query_term = args.get('query_term') # type: str 48 | if ':' in query_term: 49 | msg = 'The query term contains an invalid character' 50 | return {'message': msg}, HTTPStatus.BAD_REQUEST 51 | 52 | try: 53 | results = self.proxy.fetch_search_results_with_filter( 54 | search_request=search_request, 55 | query_term=query_term, 56 | page_index=page_index, 57 | index=self.index 58 | ) 59 | 60 | return self.schema().dump(results).data, HTTPStatus.OK 61 | except RuntimeError as e: 62 | raise e 63 | -------------------------------------------------------------------------------- /search_service/proxy/base.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from abc import ABCMeta, abstractmethod 5 | from typing import Any, Dict, List, Union 6 | 7 | from search_service.models.dashboard import SearchDashboardResult 8 | from search_service.models.table import SearchTableResult 9 | from search_service.models.user import SearchUserResult 10 | 11 | 12 | class BaseProxy(metaclass=ABCMeta): 13 | """ 14 | Base Proxy, which behaves like an interface for all 15 | the proxy clients available in the amundsen search service 16 | """ 17 | 18 | @abstractmethod 19 | def fetch_table_search_results(self, *, 20 | query_term: str, 21 | page_index: int = 0, 22 | index: str = '') -> SearchTableResult: 23 | pass 24 | 25 | @abstractmethod 26 | def fetch_user_search_results(self, *, 27 | query_term: str, 28 | page_index: int = 0, 29 | index: str = '') -> SearchUserResult: 30 | pass 31 | 32 | @abstractmethod 33 | def update_document(self, *, 34 | data: List[Dict[str, Any]], 35 | index: str = '') -> str: 36 | pass 37 | 38 | @abstractmethod 39 | def create_document(self, *, 40 | data: List[Dict[str, Any]], 41 | index: str = '') -> str: 42 | pass 43 | 44 | @abstractmethod 45 | def delete_document(self, *, 46 | data: List[str], 47 | index: str = '') -> str: 48 | pass 49 | 50 | @abstractmethod 51 | def fetch_search_results_with_filter(self, *, 52 | query_term: str, 53 | search_request: dict, 54 | page_index: int = 0, 55 | index: str = '') -> Union[SearchTableResult, 56 | SearchDashboardResult]: 57 | pass 58 | 59 | @abstractmethod 60 | def fetch_dashboard_search_results(self, *, 61 | query_term: str, 62 | page_index: int = 0, 63 | index: str = '') -> SearchDashboardResult: 64 | pass 65 | -------------------------------------------------------------------------------- /tests/unit/api/table/fixtures.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from search_service.models.table import Table 5 | from search_service.models.tag import Tag 6 | 7 | 8 | def mock_proxy_results() -> Table: 9 | return Table(name='hello', 10 | key='world', 11 | description='des1', 12 | cluster='clust', 13 | database='db', 14 | display_name=None, 15 | schema='schema', 16 | column_names=['col1', 'col2'], 17 | tags=[Tag(tag_name='tag')], 18 | badges=[Tag(tag_name='badge1')], 19 | last_updated_timestamp=1568324871, 20 | schema_description='schema description', 21 | programmatic_descriptions=[]) 22 | 23 | 24 | def mock_default_proxy_results() -> Table: 25 | return Table(name='', 26 | key='', 27 | description='', 28 | cluster='', 29 | database='', 30 | display_name='', 31 | schema='', 32 | column_names=[], 33 | tags=[], 34 | badges=[], 35 | last_updated_timestamp=0, 36 | schema_description='', 37 | programmatic_descriptions=[]) 38 | 39 | 40 | def mock_json_response() -> dict: 41 | return { 42 | "name": "hello", 43 | "key": "world", 44 | "description": "des1", 45 | "display_name": None, 46 | "cluster": "clust", 47 | "database": "db", 48 | "schema": "schema", 49 | "column_names": ["col1", "col2"], 50 | "tags": [{'tag_name': 'tag'}], 51 | "badges": [{'tag_name': 'badge1'}], 52 | "last_updated_timestamp": 1568324871, 53 | "schema_description": 'schema description', 54 | 'programmatic_descriptions': [], 55 | 'total_usage': 0, 56 | 'column_descriptions': [] 57 | } 58 | 59 | 60 | def default_json_response() -> dict: 61 | return { 62 | "name": '', 63 | "key": '', 64 | "description": '', 65 | "cluster": '', 66 | "database": '', 67 | "display_name": '', 68 | "schema": '', 69 | "column_names": [], 70 | "tags": [], 71 | "badges": [], 72 | "last_updated_timestamp": 0, 73 | "schema_description": '', 74 | 'programmatic_descriptions': [], 75 | 'total_usage': 0, 76 | 'column_descriptions': [] 77 | } 78 | -------------------------------------------------------------------------------- /search_service/api/table.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from http import HTTPStatus 5 | from typing import Any, Iterable # noqa: F401 6 | 7 | from flask_restful import Resource, reqparse 8 | from flasgger import swag_from 9 | 10 | from search_service.api.base import BaseFilterAPI 11 | from search_service.models.table import SearchTableResultSchema 12 | from search_service.proxy import get_proxy_client 13 | 14 | 15 | TABLE_INDEX = 'table_search_index' 16 | 17 | 18 | class SearchTableAPI(Resource): 19 | """ 20 | Search Table API 21 | """ 22 | 23 | def __init__(self) -> None: 24 | self.proxy = get_proxy_client() 25 | 26 | self.parser = reqparse.RequestParser(bundle_errors=True) 27 | 28 | self.parser.add_argument('query_term', required=True, type=str) 29 | self.parser.add_argument('page_index', required=False, default=0, type=int) 30 | self.parser.add_argument('index', required=False, default=TABLE_INDEX, type=str) 31 | 32 | super(SearchTableAPI, self).__init__() 33 | 34 | @swag_from('swagger_doc/table/search_table.yml') 35 | def get(self) -> Iterable[Any]: 36 | """ 37 | Fetch search results based on query_term. 38 | 39 | :return: list of table results. List can be empty if query 40 | doesn't match any tables 41 | """ 42 | args = self.parser.parse_args(strict=True) 43 | 44 | try: 45 | 46 | results = self.proxy.fetch_table_search_results( 47 | query_term=args.get('query_term'), 48 | page_index=args.get('page_index'), 49 | index=args.get('index') 50 | ) 51 | 52 | return SearchTableResultSchema().dump(results).data, HTTPStatus.OK 53 | 54 | except RuntimeError: 55 | 56 | err_msg = 'Exception encountered while processing search request' 57 | return {'message': err_msg}, HTTPStatus.INTERNAL_SERVER_ERROR 58 | 59 | 60 | class SearchTableFilterAPI(BaseFilterAPI): 61 | """ 62 | Search Filter for table 63 | """ 64 | 65 | def __init__(self) -> None: 66 | super().__init__(schema=SearchTableResultSchema, 67 | index=TABLE_INDEX) 68 | 69 | @swag_from('swagger_doc/table/search_table_filter.yml') 70 | def post(self) -> Iterable[Any]: 71 | try: 72 | return super().post() 73 | except RuntimeError: 74 | err_msg = 'Exception encountered while processing search request' 75 | return {'message': err_msg}, HTTPStatus.INTERNAL_SERVER_ERROR 76 | -------------------------------------------------------------------------------- /docs/atlas-search.md: -------------------------------------------------------------------------------- 1 | # Atlas search investigation 2 | There are several approaches to integrate searching within [Apache Atlas](https://atlas.apache.org/ "Apache Atlas"), we describe multiple options below: 3 | 4 | - Use REST API's 5 | 6 | Directly using the Atlas API's is quick to implement and easy to setup for administrators. Atlas uses a search engine 7 | underwater (embedded Solr) to perform search queries, thus in theory this method should scale up. Disadvantages are that 8 | we are limited to the REST api that Atlas offers, we could potentially add functionality via pull requests and extend 9 | the search capabilities. The [advanced search](https://atlas.apache.org/Search-Advanced.html "Apache Atlas Advanced Search") 10 | provides a DSL which contains basic forms of aggregation and arithmetic. 11 | 12 | - Use Data Builder to fill Elasticsearch from Atlas 13 | 14 | Adopting Atlas within the Data Builder to fill Elasticsearch is a relatively straightforward way of staying 15 | compatible with the Neo4j database. It could either be pulling data from Atlas or being pushed by Kafka. This method 16 | requires a setup of Elasticsearch and Airflow, which increases the amount of infrastructure and maintenance. 17 | Another disadvantage is that with a big inflow of metadata this method might not scale as well as the other methods. 18 | 19 | - Use underlying Solr or Elasticsearch from Apache Atlas 20 | 21 | Within Atlas there is the possibility to open up either Solr or the experimental Elasticsearch. It depends on janusgraph 22 | (the behind the scenes graph database) which populates the search engine. Therefore the search engine would not be compatible with 23 | the data builder setup. Adoption of such a search engine would require either new queries, some kind of transformer 24 | within the search engine, or changes within Atlas itself. 25 | 26 | ## Discussion 27 | Both the REST API approach and the data builder approach can be implemented and be configurable. Both approaches have 28 | their own benefits, the data builder together provides a more fine-tuned search whereas the Atlas REST API comes out 29 | of the box with Atlas. The last approach of using the underlying search engine from Atlas provides direct access 30 | to all the meta data with a decent search API. However, integration would be less straight forward as the indexes would 31 | differ from the data builders search engine loader. 32 | 33 | 34 | The focus is initially to implement the REST API approach and afterwards potentially implement an Atlas data extractor 35 | and importer within the Amundsen Data Builder. So that administrators have more flexibility in combining data sources. 36 | -------------------------------------------------------------------------------- /tests/unit/api/dashboard/test_search_dashboard_api.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from http import HTTPStatus 5 | from unittest import TestCase 6 | 7 | from mock import patch, Mock 8 | 9 | from search_service import create_app 10 | from search_service.models.search_result import SearchResult 11 | from tests.unit.api.dashboard.fixtures import mock_json_response, mock_proxy_results 12 | 13 | 14 | class TestSearchDashboardAPI(TestCase): 15 | def setUp(self) -> None: 16 | self.app = create_app(config_module_class='search_service.config.Config') 17 | self.app_context = self.app.app_context() 18 | self.app_context.push() 19 | 20 | self.mock_client = patch('search_service.api.dashboard.get_proxy_client') 21 | self.mock_proxy = self.mock_client.start().return_value = Mock() 22 | 23 | def tear_down(self) -> None: 24 | self.app_context.pop() 25 | self.mock_client.stop() 26 | 27 | def test_should_get_result_for_search(self) -> None: 28 | result = mock_proxy_results() 29 | self.mock_proxy.fetch_dashboard_search_results.return_value = SearchResult(total_results=1, results=[result]) 30 | 31 | response = self.app.test_client().get('/search_dashboard?query_term=searchterm') 32 | expected_response = { 33 | "total_results": 1, 34 | "results": [mock_json_response()] 35 | } 36 | 37 | self.assertEqual(response.json, expected_response) 38 | self.assertEqual(response.status_code, HTTPStatus.OK) 39 | self.mock_proxy.fetch_dashboard_search_results.assert_called_with(query_term='searchterm', page_index=0, 40 | index='dashboard_search_index') 41 | 42 | def test_should_give_empty_result_when_there_are_no_results_from_proxy(self) -> None: 43 | self.mock_proxy.fetch_dashboard_search_results.return_value = \ 44 | SearchResult(total_results=0, results=[]) 45 | 46 | response = self.app.test_client().get('/search_dashboard?query_term=searchterm') 47 | 48 | expected_response = { 49 | "total_results": 0, 50 | "results": [] 51 | } 52 | self.assertEqual(response.json, expected_response) 53 | 54 | def test_should_fail_without_query_term(self) -> None: 55 | response = self.app.test_client().get('/search_dashboard') 56 | 57 | self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) 58 | 59 | def test_should_fail_when_proxy_fails(self) -> None: 60 | self.mock_proxy.fetch_dashboard_search_results.side_effect = RuntimeError('search failed') 61 | 62 | response = self.app.test_client().get('/search_dashboard?query_term=searchterm') 63 | 64 | self.assertEqual(response.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) 65 | -------------------------------------------------------------------------------- /search_service/api/dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | from http import HTTPStatus 6 | from typing import Iterable, Any 7 | 8 | from flasgger import swag_from 9 | from flask_restful import Resource, reqparse # noqa: I201 10 | 11 | from search_service.api.base import BaseFilterAPI 12 | from search_service.exception import NotFoundException 13 | from search_service.models.dashboard import SearchDashboardResultSchema 14 | from search_service.proxy import get_proxy_client 15 | 16 | 17 | DASHBOARD_INDEX = 'dashboard_search_index' 18 | 19 | LOGGING = logging.getLogger(__name__) 20 | 21 | 22 | class SearchDashboardAPI(Resource): 23 | """ 24 | Search Dashboard API 25 | """ 26 | 27 | def __init__(self) -> None: 28 | self.proxy = get_proxy_client() 29 | 30 | self.parser = reqparse.RequestParser(bundle_errors=True) 31 | 32 | self.parser.add_argument('query_term', required=True, type=str) 33 | self.parser.add_argument('page_index', required=False, default=0, type=int) 34 | self.parser.add_argument('index', required=False, default=DASHBOARD_INDEX, type=str) 35 | 36 | super(SearchDashboardAPI, self).__init__() 37 | 38 | @swag_from('swagger_doc/dashboard/search_dashboard.yml') 39 | def get(self) -> Iterable[Any]: 40 | """ 41 | Fetch dashboard search results based on query_term. 42 | 43 | :return: list of dashboard results. List can be empty if query 44 | doesn't match any dashboards 45 | """ 46 | args = self.parser.parse_args(strict=True) 47 | try: 48 | results = self.proxy.fetch_dashboard_search_results( 49 | query_term=args.get('query_term'), 50 | page_index=args['page_index'], 51 | index=args['index'] 52 | ) 53 | 54 | return SearchDashboardResultSchema().dump(results).data, HTTPStatus.OK 55 | 56 | except NotFoundException: 57 | return {'message': 'query_term does not exist'}, HTTPStatus.NOT_FOUND 58 | 59 | except Exception: 60 | 61 | err_msg = 'Exception encountered while processing search request' 62 | LOGGING.exception(err_msg) 63 | return {'message': err_msg}, HTTPStatus.INTERNAL_SERVER_ERROR 64 | 65 | 66 | class SearchDashboardFilterAPI(BaseFilterAPI): 67 | """ 68 | Search Filter for Dashboard 69 | """ 70 | 71 | def __init__(self) -> None: 72 | super().__init__(schema=SearchDashboardResultSchema, 73 | index=DASHBOARD_INDEX) 74 | 75 | @swag_from('swagger_doc/dashboard/search_dashboard_filter.yml') 76 | def post(self) -> Iterable[Any]: 77 | try: 78 | return super().post() 79 | except RuntimeError: 80 | err_msg = 'Exception encountered while processing search request' 81 | return {'message': err_msg}, HTTPStatus.INTERNAL_SERVER_ERROR 82 | -------------------------------------------------------------------------------- /search_service/proxy/statsd_utilities.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | from threading import Lock 6 | from typing import Any, Dict, Callable # noqa: F401 7 | 8 | from flask import current_app 9 | from statsd import StatsClient 10 | 11 | from search_service import config 12 | 13 | LOGGER = logging.getLogger(__name__) 14 | 15 | _STATSD_POOL = {} # type: Dict[str, StatsClient] 16 | _STATSD_POOL_LOCK = Lock() 17 | 18 | 19 | def timer_with_counter(f: Callable) -> Any: 20 | """ 21 | A function decorator that adds statsd timer and statsd counter on success or fail 22 | statsd prefix will is from the fuction's module and metric name is from function name itself. 23 | Note that config.STATS needs to be True to emit metrics 24 | 25 | e.g: decorating function neo4j_proxy,get_table will emit: 26 | - metadata_service.proxy.neo4j_proxy.get_table.success.count 27 | - metadata_service.proxy.neo4j_proxy.get_table.fail.count 28 | - metadata_service.proxy.neo4j_proxy.get_table.timer 29 | 30 | More information on statsd: https://statsd.readthedocs.io/en/v3.2.1/index.html 31 | For statsd daemon not following default settings, refer to doc above to configure environment variables 32 | 33 | :param f: 34 | """ 35 | def wrapper(*args: Any, **kwargs: Any) -> Any: 36 | statsd_client = _get_statsd_client(prefix=f.__module__) 37 | if not statsd_client: 38 | return f(*args, **kwargs) 39 | 40 | with statsd_client.timer(f.__name__): 41 | if LOGGER.isEnabledFor(logging.DEBUG): 42 | LOGGER.debug('Calling function with emitting statsd metrics on prefix {}'.format(f.__name__)) 43 | try: 44 | result = f(*args, **kwargs) 45 | statsd_client.incr('{}.success'.format(f.__name__)) 46 | return result 47 | except Exception as e: 48 | statsd_client.incr('{}.fail'.format(f.__name__)) 49 | raise e 50 | 51 | return wrapper 52 | 53 | 54 | def _get_statsd_client(*, prefix: str) -> StatsClient: 55 | """ 56 | Object pool method that reuse already created StatsClient based on prefix 57 | :param prefix: 58 | """ 59 | if not current_app.config[config.STATS_FEATURE_KEY]: 60 | # return if stats feature is not enabled 61 | return None 62 | else: 63 | if prefix not in _STATSD_POOL: 64 | with _STATSD_POOL_LOCK: 65 | if prefix not in _STATSD_POOL: 66 | LOGGER.info('Instantiate StatsClient with prefix {}'.format(prefix)) 67 | statsd_client = StatsClient(prefix=prefix) 68 | _STATSD_POOL[prefix] = statsd_client 69 | return statsd_client 70 | 71 | if LOGGER.isEnabledFor(logging.DEBUG): 72 | LOGGER.debug('Reuse StatsClient with prefix {}'.format(prefix)) 73 | return _STATSD_POOL[prefix] 74 | -------------------------------------------------------------------------------- /tests/unit/test_swagger.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import unittest 5 | 6 | from typing import Any, Dict 7 | 8 | from search_service import create_app 9 | 10 | 11 | class TestSwagger(unittest.TestCase): 12 | 13 | def setUp(self) -> None: 14 | config_module_class = 'search_service.config.LocalConfig' 15 | self.app = create_app(config_module_class=config_module_class) 16 | self.app_context = self.app.app_context() 17 | self.app_context.push() 18 | 19 | def tearDown(self) -> None: 20 | self.app_context.pop() 21 | 22 | def test_should_get_swagger_docs(self) -> None: 23 | response = self.app.test_client().get('/apidocs/') 24 | self.assertEqual(response.status_code, 200) 25 | 26 | def test_should_get_swagger_json(self) -> None: 27 | response = self.app.test_client().get('/apispec_1.json') 28 | 29 | self.assertEqual(response.status_code, 200) 30 | 31 | def test_should_have_a_component_from_each_reference(self) -> None: 32 | response = self.app.test_client().get('/apispec_1.json') 33 | 34 | for reference in list(TestSwagger.find('$ref', response.json)): 35 | path_to_component = reference[2:].split('/') 36 | 37 | json_response_to_reduce = response.json 38 | for key in path_to_component: 39 | try: 40 | json_response_to_reduce = json_response_to_reduce[key] 41 | except KeyError: 42 | self.fail(f'The following $ref does not have a valid component to reference. $ref: {reference}') 43 | 44 | # This is a requirement from Flasgger not Swagger 45 | def test_should_have_type_for_each_query_parameter(self) -> None: 46 | response = self.app.test_client().get('/apispec_1.json') 47 | 48 | for request_params in list(TestSwagger.find('parameters', response.json)): 49 | for param in request_params: 50 | if param['in'] == 'query' and 'type' not in param.keys(): 51 | self.fail(f'The following query parameter is missing a type: {param}') 52 | 53 | def test_should_have_all_endpoints_in_swagger(self) -> None: 54 | paths_excluded_from_swagger = ['/apidocs/index.html', '/apispec_1.json', '/apidocs/', 55 | '/flasgger_static/{path:filename}', '/static/{path:filename}'] 56 | 57 | response = self.app.test_client().get('/apispec_1.json') 58 | 59 | paths_in_swagger = response.json.get('paths').keys() 60 | for endpoint in [rule.rule for rule in self.app.url_map.iter_rules()]: 61 | endpoint = endpoint.replace('<', '{').replace('>', '}') 62 | if endpoint not in paths_excluded_from_swagger and endpoint not in paths_in_swagger: 63 | self.fail(f'The following endpoint is not in swagger: {endpoint}') 64 | 65 | @staticmethod 66 | def find(key: str, json_response: Dict[str, Any]) -> Any: 67 | for json_key, json_value in json_response.items(): 68 | if json_key == key: 69 | yield json_value 70 | elif isinstance(json_value, dict): 71 | for result in TestSwagger.find(key, json_value): 72 | yield result 73 | -------------------------------------------------------------------------------- /tests/unit/api/table/test_search_table_filter.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import unittest 5 | 6 | from http import HTTPStatus 7 | from mock import patch, MagicMock 8 | 9 | from search_service import create_app 10 | 11 | 12 | class SearchTableFilterTest(unittest.TestCase): 13 | def setUp(self) -> None: 14 | self.app = create_app(config_module_class='search_service.config.LocalConfig') 15 | self.app_context = self.app.app_context() 16 | self.app_context.push() 17 | self.mock_index = 'table_search_index' 18 | self.mock_term = 'test' 19 | self.mock_page_index = 0 20 | self.mock_search_request = { 21 | 'type': 'AND', 22 | 'filters': { 23 | 'database': ['db1', 'db2'] 24 | } 25 | } 26 | self.url = '/search_table' 27 | 28 | def tear_down(self) -> None: 29 | self.app_context.pop() 30 | 31 | @patch('search_service.api.table.reqparse.RequestParser') 32 | @patch('search_service.api.base.get_proxy_client') 33 | def test_post(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 34 | mock_proxy = get_proxy() 35 | RequestParser().parse_args.return_value = dict(index=self.mock_index, 36 | page_index=self.mock_page_index, 37 | query_term=self.mock_term, 38 | search_request=self.mock_search_request) 39 | 40 | self.app.test_client().post(self.url) 41 | mock_proxy.fetch_search_results_with_filter.assert_called_with(index=self.mock_index, 42 | page_index=self.mock_page_index, 43 | query_term=self.mock_term, 44 | search_request=self.mock_search_request) 45 | 46 | @patch('search_service.api.table.reqparse.RequestParser') 47 | @patch('search_service.api.base.get_proxy_client') 48 | def test_post_return_400_if_no_search_request(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 49 | RequestParser().parse_args.return_value = dict(index=self.mock_index, 50 | query_term=self.mock_term) 51 | 52 | response = self.app.test_client().post(self.url) 53 | self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) 54 | 55 | @patch('search_service.api.table.reqparse.RequestParser') 56 | @patch('search_service.api.base.get_proxy_client') 57 | def test_post_return_400_if_bad_query_term(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 58 | RequestParser().parse_args.return_value = dict(index=self.mock_index, 59 | page_index=self.mock_page_index, 60 | query_term='column:bad_syntax', 61 | search_request=self.mock_search_request) 62 | 63 | response = self.app.test_client().post(self.url) 64 | self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) 65 | -------------------------------------------------------------------------------- /tests/unit/api/dashboard/test_search_dashboard_filter.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import unittest 5 | 6 | from http import HTTPStatus 7 | from mock import patch, MagicMock 8 | 9 | from search_service import create_app 10 | 11 | 12 | class SearchDashboardFilterTest(unittest.TestCase): 13 | def setUp(self) -> None: 14 | self.app = create_app(config_module_class='search_service.config.LocalConfig') 15 | self.app_context = self.app.app_context() 16 | self.app_context.push() 17 | self.mock_index = 'dashboard_search_index' 18 | self.mock_term = 'test' 19 | self.mock_page_index = 0 20 | self.mock_search_request = { 21 | 'type': 'AND', 22 | 'filters': { 23 | 'product': ['mode'] 24 | } 25 | } 26 | self.url = '/search_dashboard_filter' 27 | 28 | def tear_down(self) -> None: 29 | self.app_context.pop() 30 | 31 | @patch('search_service.api.dashboard.reqparse.RequestParser') 32 | @patch('search_service.api.base.get_proxy_client') 33 | def test_post(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 34 | mock_proxy = get_proxy() 35 | RequestParser().parse_args.return_value = dict(index=self.mock_index, 36 | page_index=self.mock_page_index, 37 | query_term=self.mock_term, 38 | search_request=self.mock_search_request) 39 | 40 | self.app.test_client().post(self.url) 41 | mock_proxy.fetch_search_results_with_filter.assert_called_with(index=self.mock_index, 42 | page_index=self.mock_page_index, 43 | query_term=self.mock_term, 44 | search_request=self.mock_search_request) 45 | 46 | @patch('search_service.api.dashboard.reqparse.RequestParser') 47 | @patch('search_service.api.base.get_proxy_client') 48 | def test_post_return_400_if_no_search_request(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 49 | RequestParser().parse_args.return_value = dict(index=self.mock_index, 50 | query_term=self.mock_term) 51 | 52 | response = self.app.test_client().post(self.url) 53 | self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) 54 | 55 | @patch('search_service.api.dashboard.reqparse.RequestParser') 56 | @patch('search_service.api.base.get_proxy_client') 57 | def test_post_return_400_if_bad_query_term(self, get_proxy: MagicMock, RequestParser: MagicMock) -> None: 58 | RequestParser().parse_args.return_value = dict(index=self.mock_index, 59 | page_index=self.mock_page_index, 60 | query_term='name:bad_syntax', 61 | search_request=self.mock_search_request) 62 | 63 | response = self.app.test_client().post(self.url) 64 | self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) 65 | -------------------------------------------------------------------------------- /tests/unit/api/table/test_search_table_api.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from http import HTTPStatus 5 | from unittest import TestCase 6 | 7 | from mock import patch, Mock 8 | 9 | from search_service import create_app 10 | from search_service.models.table import SearchTableResult 11 | from tests.unit.api.table.fixtures import mock_json_response, mock_proxy_results, default_json_response, \ 12 | mock_default_proxy_results 13 | 14 | 15 | class TestSearchTableAPI(TestCase): 16 | 17 | def setUp(self) -> None: 18 | self.app = create_app(config_module_class='search_service.config.Config') 19 | self.app_context = self.app.app_context() 20 | self.app_context.push() 21 | 22 | self.mock_client = patch('search_service.api.table.get_proxy_client') 23 | self.mock_proxy = self.mock_client.start().return_value = Mock() 24 | 25 | def tear_down(self) -> None: 26 | self.app_context.pop() 27 | self.mock_client.stop() 28 | 29 | def test_should_get_result_for_search(self) -> None: 30 | result = mock_proxy_results() 31 | self.mock_proxy.fetch_table_search_results.return_value = SearchTableResult(total_results=1, results=[result]) 32 | 33 | response = self.app.test_client().get('/search?query_term=searchterm') 34 | 35 | expected_response = { 36 | "total_results": 1, 37 | "results": [mock_json_response()] 38 | } 39 | 40 | self.assertEqual(response.json, expected_response) 41 | self.assertEqual(response.status_code, HTTPStatus.OK) 42 | self.mock_proxy.fetch_table_search_results.assert_called_with(query_term='searchterm', page_index=0, 43 | index='table_search_index') 44 | 45 | def test_should_give_empty_result_when_there_are_no_results_from_proxy(self) -> None: 46 | self.mock_proxy.fetch_table_search_results.return_value = \ 47 | SearchTableResult(total_results=0, results=[]) 48 | 49 | response = self.app.test_client().get('/search?query_term=searchterm') 50 | 51 | expected_response = { 52 | "total_results": 0, 53 | "results": [] 54 | } 55 | self.assertEqual(response.json, expected_response) 56 | 57 | def test_should_get_default_response_values_when_values_not_in_proxy_response(self) -> None: 58 | self.mock_proxy.fetch_table_search_results.return_value = \ 59 | SearchTableResult(total_results=1, 60 | results=[mock_default_proxy_results()]) 61 | 62 | response = self.app.test_client().get('/search?query_term=searchterm') 63 | 64 | expected_response = { 65 | "total_results": 1, 66 | "results": [default_json_response()] 67 | } 68 | 69 | self.assertEqual(response.json, expected_response) 70 | 71 | def test_should_fail_without_query_term(self) -> None: 72 | response = self.app.test_client().get('/search') 73 | 74 | self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) 75 | 76 | def test_should_fail_when_proxy_fails(self) -> None: 77 | self.mock_proxy.fetch_table_search_results.side_effect = RuntimeError('search failed') 78 | 79 | response = self.app.test_client().get('/search?query_term=searchterm') 80 | 81 | self.assertEqual(response.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) 82 | -------------------------------------------------------------------------------- /tests/unit/proxy/test_statsd_utilities.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from mock import patch, MagicMock 5 | from flask import current_app 6 | from statsd import StatsClient 7 | import unittest 8 | 9 | from search_service import create_app 10 | from search_service.proxy import statsd_utilities 11 | from search_service.proxy.statsd_utilities import _get_statsd_client 12 | from search_service.proxy.elasticsearch import ElasticsearchProxy 13 | 14 | 15 | class TestStatsdUtilities(unittest.TestCase): 16 | def setUp(self) -> None: 17 | self.app = create_app(config_module_class='search_service.config.LocalConfig') 18 | self.app_context = self.app.app_context() 19 | self.app_context.push() 20 | 21 | def test_no_statsd_client(self) -> None: 22 | with patch.object(StatsClient, '__init__'): 23 | statsd_client = _get_statsd_client(prefix='foo') 24 | self.assertIsNone(statsd_client) 25 | 26 | def test_get_statsd_client(self) -> None: 27 | with patch.object(current_app, 'config') as mock_config, \ 28 | patch.object(StatsClient, '__init__', return_value=None): 29 | mock_config.return_value.single.return_value = True 30 | 31 | statsd_client1 = _get_statsd_client(prefix='test') 32 | self.assertIsNotNone(statsd_client1) 33 | 34 | def test_same_statsd_client_for_same_prefix(self) -> None: 35 | with patch.object(current_app, 'config') as mock_config, \ 36 | patch.object(StatsClient, '__init__', return_value=None) as mock_statsd_init: 37 | mock_config.return_value.single.return_value = True 38 | 39 | statsd_client1 = _get_statsd_client(prefix='test_same') 40 | self.assertIsNotNone(statsd_client1) 41 | statsd_client2 = _get_statsd_client(prefix='test_same') 42 | self.assertIsNotNone(statsd_client2) 43 | self.assertEqual(statsd_client1, statsd_client2) 44 | 45 | self.assertEqual(mock_statsd_init.call_count, 1) 46 | 47 | def test_different_statsd_client_for_different_prefix(self) -> None: 48 | with patch.object(current_app, 'config') as mock_config, \ 49 | patch.object(StatsClient, '__init__', return_value=None) as mock_statsd_init: 50 | mock_config.return_value.single.return_value = True 51 | 52 | statsd_client1 = _get_statsd_client(prefix='test_diff') 53 | self.assertIsNotNone(statsd_client1) 54 | 55 | statsd_client2 = _get_statsd_client(prefix='test_diff2') 56 | self.assertIsNotNone(statsd_client2) 57 | 58 | self.assertNotEqual(statsd_client1, statsd_client2) 59 | self.assertEqual(mock_statsd_init.call_count, 2) 60 | 61 | @patch('elasticsearch_dsl.Search.execute') 62 | def test_with_elasticsearch_proxy(self, 63 | mock_search: MagicMock) -> None: 64 | 65 | mock_elasticsearch_client = MagicMock() 66 | es_proxy = ElasticsearchProxy(client=mock_elasticsearch_client) 67 | 68 | with patch.object(statsd_utilities, '_get_statsd_client') as mock_statsd_client: 69 | mock_success_incr = MagicMock() 70 | mock_statsd_client.return_value.incr = mock_success_incr 71 | 72 | es_proxy.fetch_table_search_results(query_term='DOES_NOT_MATTER') 73 | 74 | self.assertEqual(mock_success_incr.call_count, 1) 75 | -------------------------------------------------------------------------------- /search_service/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import ast 5 | import importlib 6 | import os 7 | import logging 8 | import logging.config 9 | import sys 10 | 11 | from flask import Flask, Blueprint 12 | from flask_restful import Api 13 | from flask_cors import CORS 14 | from typing import Dict, Any # noqa: F401 15 | from flasgger import Swagger 16 | 17 | from search_service.api.dashboard import SearchDashboardAPI, SearchDashboardFilterAPI 18 | from search_service.api.table import SearchTableAPI, SearchTableFilterAPI 19 | from search_service.api.user import SearchUserAPI 20 | from search_service.api.document import DocumentUserAPI, DocumentTableAPI, DocumentTablesAPI, DocumentUsersAPI 21 | from search_service.api.healthcheck import healthcheck 22 | 23 | # For customized flask use below arguments to override. 24 | FLASK_APP_MODULE_NAME = os.getenv('FLASK_APP_MODULE_NAME') 25 | FLASK_APP_CLASS_NAME = os.getenv('FLASK_APP_CLASS_NAME') 26 | FLASK_APP_KWARGS_DICT_STR = os.getenv('FLASK_APP_KWARGS_DICT') 27 | ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) 28 | 29 | # Environment Variable to enable cors 30 | CORS_ENABLED = os.environ.get('CORS_ENABLED', False) 31 | 32 | 33 | def create_app(*, config_module_class: str) -> Flask: 34 | """ 35 | Creates app in function so that flask with flask extensions can be 36 | initialized with specific config. Here it defines the route of APIs 37 | so that it can be seen in one place where implementation is separated. 38 | 39 | Config is being fetched via module.class name where module.class name 40 | can be passed through environment variable. 41 | This is to make config fetched through runtime PYTHON_PATH so that 42 | Config class can be easily injected. 43 | More on: http://flask.pocoo.org/docs/1.0/config/ 44 | 45 | :param config_module_class: name of the config 46 | :return: Flask 47 | """ 48 | if FLASK_APP_MODULE_NAME and FLASK_APP_CLASS_NAME: 49 | print(f'Using requested Flask module {FLASK_APP_MODULE_NAME} ' 50 | f'and class {FLASK_APP_CLASS_NAME}', file=sys.stderr) 51 | class_obj = getattr( 52 | importlib.import_module(FLASK_APP_MODULE_NAME), 53 | FLASK_APP_CLASS_NAME 54 | ) 55 | 56 | flask_kwargs_dict = {} # type: Dict[str, Any] 57 | if FLASK_APP_KWARGS_DICT_STR: 58 | print(f'Using kwargs {FLASK_APP_KWARGS_DICT_STR} to instantiate Flask', 59 | file=sys.stderr) 60 | flask_kwargs_dict = ast.literal_eval(FLASK_APP_KWARGS_DICT_STR) 61 | 62 | app = class_obj(__name__, **flask_kwargs_dict) 63 | 64 | else: 65 | app = Flask(__name__) 66 | 67 | if CORS_ENABLED: 68 | CORS(app) 69 | config_module_class = \ 70 | os.getenv('SEARCH_SVC_CONFIG_MODULE_CLASS') or config_module_class 71 | app.config.from_object(config_module_class) 72 | 73 | if app.config.get('LOG_CONFIG_FILE'): 74 | logging.config.fileConfig(app.config.get('LOG_CONFIG_FILE'), disable_existing_loggers=False) 75 | else: 76 | logging.basicConfig(format=app.config.get('LOG_FORMAT'), datefmt=app.config.get('LOG_DATE_FORMAT')) 77 | logging.getLogger().setLevel(app.config.get('LOG_LEVEL')) 78 | 79 | logging.info('Creating app with config name {}' 80 | .format(config_module_class)) 81 | logging.info('Created app with config name {}'.format(config_module_class)) 82 | 83 | api_bp = Blueprint('api', __name__) 84 | api_bp.add_url_rule('/healthcheck', 'healthcheck', healthcheck) 85 | api = Api(api_bp) 86 | # Table Search API 87 | 88 | api.add_resource(SearchTableFilterAPI, '/search_table') 89 | # TODO: Rename endpoint to be more generic and accept a resource type so that logic can be re-used 90 | api.add_resource(SearchTableAPI, '/search') 91 | 92 | # User Search API 93 | api.add_resource(SearchUserAPI, '/search_user') 94 | 95 | # Dashboard Search API 96 | api.add_resource(SearchDashboardAPI, '/search_dashboard') 97 | api.add_resource(SearchDashboardFilterAPI, '/search_dashboard_filter') 98 | 99 | # DocumentAPI 100 | # todo: needs to update to handle dashboard/user or other entities use cases. 101 | api.add_resource(DocumentTablesAPI, '/document_table') 102 | api.add_resource(DocumentTableAPI, '/document_table/') 103 | 104 | api.add_resource(DocumentUsersAPI, '/document_user') 105 | api.add_resource(DocumentUserAPI, '/document_user/') 106 | 107 | app.register_blueprint(api_bp) 108 | 109 | if app.config.get('SWAGGER_ENABLED'): 110 | Swagger(app, template_file=os.path.join(ROOT_DIR, app.config.get('SWAGGER_TEMPLATE_PATH')), parse=True) 111 | return app 112 | -------------------------------------------------------------------------------- /search_service/api/swagger_doc/template.yml: -------------------------------------------------------------------------------- 1 | openapi: '3.0.2' 2 | info: 3 | title: 'Search Service' 4 | description: 'Used to communicate with elasticsearch or apache atlas to get search results. Used by the frontend service' 5 | version: '1.1.12' 6 | 7 | components: 8 | schemas: 9 | ErrorResponse: 10 | type: object 11 | properties: 12 | message: 13 | type: string 14 | description: 'A simple description of what went wrong' 15 | example: 'An Exception encountered while processing your request' 16 | EmptyResponse: 17 | type: object 18 | properties: {} 19 | SearchTableResults: 20 | type: object 21 | properties: 22 | total_results: 23 | type: integer 24 | description: 'number of results' 25 | example: 10 26 | results: 27 | type: array 28 | items: 29 | $ref: '#/components/schemas/TableFields' 30 | SearchDashboardResults: 31 | type: object 32 | properties: 33 | total_results: 34 | type: integer 35 | description: 'number of results' 36 | example: 10 37 | results: 38 | type: array 39 | items: 40 | $ref: '#/components/schemas/DashboardFields' 41 | SearchUserResults: 42 | type: object 43 | properties: 44 | total_results: 45 | type: integer 46 | description: 'number of results' 47 | example: 10 48 | results: 49 | type: array 50 | items: 51 | $ref: '#/components/schemas/UserFields' 52 | TableFields: 53 | type: object 54 | properties: 55 | name: 56 | type: string 57 | description: 'name of table' 58 | example: 'table_name' 59 | key: 60 | type: string 61 | description: 'key format: {cluster}://{schema}.{database}/{table_name}' 62 | example: 'cluster://schema.db/table_name' 63 | description: 64 | type: string 65 | description: 'table description' 66 | example: 'this table holds revenue data' 67 | cluster: 68 | type: string 69 | description: 'table cluster' 70 | example: 'cluster' 71 | database: 72 | type: string 73 | description: 'table database' 74 | example: 'db' 75 | schema: 76 | type: string 77 | description: 'table schema' 78 | example: 'schema' 79 | column_names: 80 | type: array 81 | items: 82 | type: string 83 | description: 'list of column names' 84 | example: ['col1', 'col2'] 85 | tags: 86 | type: array 87 | items: 88 | type: string 89 | description: 'list of table tags' 90 | example: ['tag2', 'tag1'] 91 | last_updated_timestamp: 92 | type: integer 93 | description: 'table last updated time' 94 | example: 1568814420 95 | DashboardFields: 96 | type: object 97 | properties: 98 | uri: 99 | type: string 100 | description: 'dashboard uri' 101 | example: 'mode:product/name' 102 | cluster: 103 | type: string 104 | description: 'dashboard cluster' 105 | example: 'gold' 106 | group_name: 107 | type: string 108 | description: 'name of dashboard group' 109 | example: 'Mode dashboard group' 110 | group_url: 111 | type: string 112 | description: 'url of the dashboard group' 113 | example: 'Mode dashboard group://' 114 | product: 115 | type: string 116 | description: 'product of the dashboard group' 117 | example: 'mode' 118 | url: 119 | type: string 120 | description: 'url of the dashboard' 121 | example: 'mode ://report' 122 | description: 123 | type: string 124 | description: 'dashboard description' 125 | example: 'this dashboard has info about that metric' 126 | last_successful_run_timestamp: 127 | type: integer 128 | description: 'dashboard last successful run time' 129 | example: 1568814420 130 | UserFields: 131 | type: object 132 | properties: 133 | name: 134 | type: string 135 | description: 'user name' 136 | example: 'Harry Potter' 137 | first_name: 138 | type: string 139 | description: 'user first name' 140 | example: 'Harry' 141 | last_name: 142 | type: string 143 | description: 'user last name' 144 | example: 'Potter' 145 | team_name: 146 | type: string 147 | description: 'name of team user is on' 148 | example: 'Gryffindor' 149 | email: 150 | type: string 151 | description: 'users email address' 152 | example: 'harry.potter@hogwarts.edu' 153 | manager_email: 154 | type: string 155 | description: "email address for the user's manager" 156 | example: 'minerva.mcgonagall@hogwarts.edu' 157 | github_username: 158 | type: string 159 | description: "user's github username" 160 | example: 'wizard_coder' 161 | is_active: 162 | type: bool 163 | description: 'indicates if the user is still part of the platform' 164 | example: True 165 | employee_type: 166 | type: string 167 | description: 'the kinds of users' 168 | example: 'student' 169 | -------------------------------------------------------------------------------- /search_service/api/document.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | 6 | from http import HTTPStatus 7 | from typing import Tuple, Any 8 | 9 | from flasgger import swag_from 10 | from flask_restful import Resource, reqparse 11 | from search_service.proxy import get_proxy_client 12 | from search_service.proxy.base import BaseProxy 13 | from search_service.models.table import TableSchema 14 | from search_service.models.user import UserSchema 15 | from search_service.api.table import TABLE_INDEX 16 | from search_service.api.user import USER_INDEX 17 | 18 | LOGGER = logging.getLogger(__name__) 19 | 20 | 21 | class BaseDocumentAPI(Resource): 22 | def __init__(self, schema: Any, proxy: BaseProxy) -> None: 23 | self.schema = schema 24 | self.proxy = proxy 25 | self.parser = reqparse.RequestParser(bundle_errors=True) 26 | super(BaseDocumentAPI, self).__init__() 27 | 28 | def delete(self, *, document_id: str) -> Tuple[Any, int]: 29 | """ 30 | Uses the Elasticsearch bulk API to delete existing documents by id 31 | 32 | :param document_id: document id for document to be deleted 33 | :return: 34 | """ 35 | args = self.parser.parse_args() 36 | 37 | try: 38 | self.proxy.delete_document(data=[document_id], index=args.get('index')) 39 | return {}, HTTPStatus.OK 40 | except RuntimeError as e: 41 | err_msg = 'Exception encountered while deleting document ' 42 | LOGGER.error(err_msg + str(e)) 43 | return {'message': err_msg}, HTTPStatus.INTERNAL_SERVER_ERROR 44 | 45 | 46 | class BaseDocumentsAPI(Resource): 47 | def __init__(self, schema: Any, proxy: BaseProxy) -> None: 48 | self.schema = schema 49 | self.proxy = proxy 50 | self.parser = reqparse.RequestParser(bundle_errors=True) 51 | super(BaseDocumentsAPI, self).__init__() 52 | 53 | def post(self) -> Tuple[Any, int]: 54 | """ 55 | Uses the Elasticsearch bulk API to load data from JSON. Uses Elasticsearch 56 | index actions to create or update documents by id 57 | 58 | :param data: list of data objects to be indexed in Elasticsearch 59 | :return: name of new index 60 | """ 61 | self.parser.add_argument('data', required=True) 62 | args = self.parser.parse_args() 63 | 64 | try: 65 | data = self.schema(many=True, strict=False).loads(args.get('data')).data 66 | results = self.proxy.create_document(data=data, index=args.get('index')) 67 | return results, HTTPStatus.OK 68 | except RuntimeError as e: 69 | err_msg = 'Exception encountered while updating documents ' 70 | LOGGER.error(err_msg + str(e)) 71 | return {'message': err_msg}, HTTPStatus.INTERNAL_SERVER_ERROR 72 | 73 | def put(self) -> Tuple[Any, int]: 74 | """ 75 | Uses the Elasticsearch bulk API to update existing documents by id. Will 76 | ignore ids it doesn't recognize (ids are defined in models) 77 | 78 | :param data: list of data objects to be indexed in Elasticsearch 79 | :return: name of index 80 | """ 81 | self.parser.add_argument('data', required=True) 82 | args = self.parser.parse_args() 83 | 84 | try: 85 | data = self.schema(many=True, strict=False).loads(args.get('data')).data 86 | results = self.proxy.update_document(data=data, index=args.get('index')) 87 | return results, HTTPStatus.OK 88 | except RuntimeError as e: 89 | err_msg = 'Exception encountered while updating documents ' 90 | LOGGER.error(err_msg + str(e)) 91 | return {'message': err_msg}, HTTPStatus.INTERNAL_SERVER_ERROR 92 | 93 | 94 | class DocumentTableAPI(BaseDocumentAPI): 95 | 96 | def __init__(self) -> None: 97 | super().__init__(schema=TableSchema, proxy=get_proxy_client()) 98 | self.parser.add_argument('index', required=False, default=TABLE_INDEX, type=str) 99 | 100 | @swag_from('swagger_doc/document/table_delete.yml') 101 | def delete(self, *, document_id: str) -> Tuple[Any, int]: 102 | return super().delete(document_id=document_id) 103 | 104 | 105 | class DocumentUserAPI(BaseDocumentAPI): 106 | 107 | def __init__(self) -> None: 108 | super().__init__(schema=UserSchema, proxy=get_proxy_client()) 109 | self.parser.add_argument('index', required=False, default=USER_INDEX, type=str) 110 | 111 | @swag_from('swagger_doc/document/user_delete.yml') 112 | def delete(self, *, document_id: str) -> Tuple[Any, int]: 113 | return super().delete(document_id=document_id) 114 | 115 | 116 | class DocumentTablesAPI(BaseDocumentsAPI): 117 | 118 | def __init__(self) -> None: 119 | super().__init__(schema=TableSchema, proxy=get_proxy_client()) 120 | self.parser.add_argument('index', required=False, default=TABLE_INDEX, type=str) 121 | 122 | @swag_from('swagger_doc/document/table_post.yml') 123 | def post(self) -> Tuple[Any, int]: 124 | return super().post() 125 | 126 | @swag_from('swagger_doc/document/table_put.yml') 127 | def put(self) -> Tuple[Any, int]: 128 | return super().put() 129 | 130 | 131 | class DocumentUsersAPI(BaseDocumentsAPI): 132 | 133 | def __init__(self) -> None: 134 | super().__init__(schema=UserSchema, proxy=get_proxy_client()) 135 | self.parser.add_argument('index', required=False, default=USER_INDEX, type=str) 136 | 137 | @swag_from('swagger_doc/document/user_post.yml') 138 | def post(self) -> Tuple[Any, int]: 139 | return super().post() 140 | 141 | @swag_from('swagger_doc/document/user_put.yml') 142 | def put(self) -> Tuple[Any, int]: 143 | return super().put() 144 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Amundsen Search Service 2 | [![PyPI version](https://badge.fury.io/py/amundsen-search.svg)](https://badge.fury.io/py/amundsen-search) 3 | [![Coverage Status](https://img.shields.io/codecov/c/github/amundsen-io/amundsensearchlibrary/master.svg)](https://codecov.io/gh/amundsen-io/amundsensearchlibrary?branch=master) 4 | [![License](https://img.shields.io/:license-Apache%202-blue.svg)](LICENSE) 5 | [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg)](https://img.shields.io/badge/PRs-welcome-brightgreen.svg) 6 | [![Slack Status](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://join.slack.com/t/amundsenworkspace/shared_invite/enQtNTk2ODQ1NDU1NDI0LTc3MzQyZmM0ZGFjNzg5MzY1MzJlZTg4YjQ4YTU0ZmMxYWU2MmVlMzhhY2MzMTc1MDg0MzRjNTA4MzRkMGE0Nzk) 7 | 8 | Amundsen Search service serves a Restful API and is responsible for searching metadata. The service leverages [Elasticsearch](https://www.elastic.co/products/elasticsearch "Elasticsearch") for most of it's search capabilites. 9 | 10 | For information about Amundsen and our other services, visit the [main repository](https://github.com/amundsen-io/amundsen#amundsen) `README.md`. Please also see our instructions for a [quick start](https://github.com/amundsen-io/amundsen/blob/master/docs/installation.md#bootstrap-a-default-version-of-amundsen-using-docker) setup of Amundsen with dummy data, and an [overview of the architecture](https://github.com/amundsen-io/amundsen/blob/master/docs/architecture.md#architecture). 11 | 12 | ## Requirements 13 | 14 | - Python >= 3.6 15 | - elasticsearch 6.x (currently it doesn't support 7.x) 16 | 17 | ## Doc 18 | - https://www.amundsen.io/amundsen 19 | 20 | 21 | ## Instructions to start the Search service from distribution 22 | 23 | ```bash 24 | $ venv_path=[path_for_virtual_environment] 25 | $ python3 -m venv $venv_path 26 | $ source $venv_path/bin/activate 27 | $ pip3 install amundsen-search 28 | $ python3 search_service/search_wsgi.py 29 | 30 | # In a different terminal, verify the service is up by running 31 | $ curl -v http://localhost:5001/healthcheck 32 | ``` 33 | 34 | 35 | ## Instructions to start the Search service from source 36 | 37 | ```bash 38 | $ git clone https://github.com/amundsen-io/amundsensearchlibrary.git 39 | $ cd amundsensearchlibrary 40 | $ venv_path=[path_for_virtual_environment] 41 | $ python3 -m venv $venv_path 42 | $ source $venv_path/bin/activate 43 | $ pip3 install -r requirements.txt 44 | $ python3 setup.py install 45 | $ python3 search_service/search_wsgi.py 46 | 47 | # In a different terminal, verify the service is up by running 48 | $ curl -v http://localhost:5001/healthcheck 49 | ``` 50 | 51 | ## Instructions to start the service from Docker 52 | 53 | ```bash 54 | $ docker pull amundsendev/amundsen-search:latest 55 | $ docker run -p 5001:5001 amundsendev/amundsen-search 56 | # - alternative, for production environment with Gunicorn (see its homepage link below) 57 | $ ## docker run -p 5001:5001 amundsendev/amundsen-search gunicorn --bind 0.0.0.0:5001 search_service.search_wsgi 58 | 59 | # In a different terminal, verify the service is up by running 60 | $ curl -v http://localhost:5001/healthcheck 61 | ``` 62 | 63 | 64 | ## Production environment 65 | By default, Flask comes with a Werkzeug webserver, which is used for development. For production environments a production grade web server such as [Gunicorn](https://gunicorn.org/ "Gunicorn") should be used. 66 | 67 | ```bash 68 | $ pip3 install gunicorn 69 | $ gunicorn search_service.search_wsgi 70 | 71 | # In a different terminal, verify the service is up by running 72 | $ curl -v http://localhost:8000/healthcheck 73 | ``` 74 | For more imformation see the [Gunicorn configuration documentation](https://docs.gunicorn.org/en/latest/run.html "documentation"). 75 | 76 | ### Configuration outside local environment 77 | By default, Search service uses [LocalConfig](https://github.com/amundsen-io/amundsensearchlibrary/blob/master/search_service/config.py "LocalConfig") that looks for Elasticsearch running in localhost. 78 | In order to use different end point, you need to create a [Config](https://github.com/amundsen-io/amundsensearchlibrary/blob/master/search_service/config.py "Config") suitable for your use case. Once a config class has been created, it can be referenced by an [environment variable](https://github.com/amundsen-io/amundsensearchlibrary/blob/master/search_service/search_wsgi.py "environment variable"): `SEARCH_SVC_CONFIG_MODULE_CLASS` 79 | 80 | For example, in order to have different config for production, you can inherit Config class, create Production config and passing production config class into environment variable. Let's say class name is ProdConfig and it's in search_service.config module. then you can set as below: 81 | 82 | `SEARCH_SVC_CONFIG_MODULE_CLASS=search_service.config.ProdConfig` 83 | 84 | This way Search service will use production config in production environment. For more information on how the configuration is being loaded and used, here's reference from Flask [doc](http://flask.pocoo.org/docs/1.0/config/#development-production "doc"). 85 | 86 | # Developer guide 87 | ## Code style 88 | - PEP 8: Amundsen Search service follows [PEP8 - Style Guide for Python Code](https://www.python.org/dev/peps/pep-0008/ "PEP8 - Style Guide for Python Code"). 89 | - Typing hints: Amundsen Search service also utilizes [Typing hint](https://docs.python.org/3/library/typing.html "Typing hint") for better readability. 90 | 91 | ## API documentation 92 | We have Swagger documentation setup with OpenApi 3.0.2. This documentation is generated via [Flasgger](https://github.com/flasgger/flasgger). 93 | When adding or updating an API please make sure to update the documentation. To see the documentation run the application locally and go to `localhost:5001/apidocs/`. 94 | Currently the documentation only works with local configuration. 95 | 96 | ## Code structure 97 | Amundsen Search service consists of three packages, API, Models, and Proxy. 98 | 99 | ### [API package](https://github.com/amundsen-io/amundsensearchlibrary/tree/master/search_service/api "API package") 100 | A package that contains [Flask Restful resources](https://flask-restful.readthedocs.io/en/latest/api.html#flask_restful.Resource "Flask Restful resources") that serves Restful API request. 101 | The [routing of API](https://flask-restful.readthedocs.io/en/latest/quickstart.html#resourceful-routing "routing of API") is being registered [here](https://github.com/amundsen-io/amundsensearchlibrary/blob/master/search_service/__init__.py "here"). 102 | 103 | ### [Proxy package](https://github.com/amundsen-io/amundsensearchlibrary/tree/master/search_service/proxy "Proxy package") 104 | Proxy package contains proxy modules that talks dependencies of Search service. There are currently two modules in Proxy package, [Elasticsearch](https://github.com/amundsen-io/amundsensearchlibrary/blob/master/search_service/proxy/elasticsearch.py "Elasticsearch") and [Statsd](https://github.com/amundsen-io/amundsensearchlibrary/blob/master/search_service/proxy/statsd_utilities.py "Statsd"). 105 | 106 | ##### [Elasticsearch proxy module](https://github.com/amundsen-io/amundsensearchlibrary/blob/master/search_service/proxy/elasticsearch.py "Elasticsearch proxy module") 107 | [Elasticsearch](https://www.elastic.co/products/elasticsearch "Elasticsearch") proxy module serves various use case of searching metadata from Elasticsearch. It uses [Query DSL](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html "Query DSL") for the use case, execute the search query and transform into [model](https://github.com/amundsen-io/amundsensearchlibrary/tree/master/search_service/models "model"). 108 | 109 | ##### [Atlas proxy module](https://github.com/amundsen-io/amundsensearchlibrary/blob/master/search_service/proxy/atlas.py "Atlas proxy module") 110 | [Apache Atlas](https://atlas.apache.org/ "Apache Atlas") proxy module uses Atlas to serve the Atlas requests. At the moment the Basic Search REST API is used via the [Python Client](https://atlasclient.readthedocs.io/ "Atlas Client"). 111 | 112 | 113 | ##### [Statsd utilities module](https://github.com/amundsen-io/amundsensearchlibrary/blob/master/search_service/proxy/statsd_utilities.py "Statsd utilities module") 114 | [Statsd](https://github.com/etsy/statsd/wiki "Statsd") utilities module has methods / functions to support statsd to publish metrics. By default, statsd integration is disabled and you can turn in on from [Search service configuration](https://github.com/amundsen-io/amundsensearchlibrary/blob/master/search_service/config.py#L7 "Search service configuration"). 115 | For specific configuration related to statsd, you can configure it through [environment variable.](https://statsd.readthedocs.io/en/latest/configure.html#from-the-environment "environment variable.") 116 | 117 | ### [Models package](https://github.com/amundsen-io/amundsensearchlibrary/tree/master/search_service/models "Models package") 118 | Models package contains many modules where each module has many Python classes in it. These Python classes are being used as a schema and a data holder. All data exchange within Amundsen Search service use classes in Models to ensure validity of itself and improve readability and maintainability. 119 | 120 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "{}" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2018 Lyft, Inc. 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /search_service/proxy/atlas.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | from re import sub 6 | from typing import Any, List, Dict, Tuple, Optional 7 | 8 | from atlasclient.client import Atlas 9 | from atlasclient.exceptions import BadRequest 10 | from atlasclient.models import Entity, EntityCollection 11 | # default search page size 12 | from atlasclient.utils import parse_table_qualified_name 13 | 14 | from search_service.models.dashboard import SearchDashboardResult 15 | from search_service.models.table import SearchTableResult 16 | from search_service.models.user import SearchUserResult 17 | from search_service.models.table import Table 18 | from search_service.models.tag import Tag 19 | from search_service.proxy import BaseProxy 20 | from search_service.proxy.statsd_utilities import timer_with_counter 21 | 22 | LOGGER = logging.getLogger(__name__) 23 | 24 | 25 | class AtlasProxy(BaseProxy): 26 | """ 27 | AtlasSearch connection handler 28 | """ 29 | ATLAS_TABLE_ENTITY = 'Table' 30 | ATLAS_QN_ATTRIBUTE = 'qualifiedName' 31 | 32 | def __init__(self, *, 33 | host: str = None, 34 | user: str = '', 35 | password: str = '', 36 | client: Atlas = None, 37 | page_size: int = 10) -> None: 38 | self.atlas = client or Atlas(host, username=user, password=password) 39 | self.page_size = page_size 40 | 41 | @staticmethod 42 | def _extract_entities(collections: EntityCollection) -> List[Entity]: 43 | """ 44 | Helper method for flattening all collections from {collections} 45 | 46 | :return: list of all entities 47 | """ 48 | entities: List[Entity] = [] 49 | 50 | for collection in collections: 51 | entities.extend(collection.entities) 52 | return entities 53 | 54 | def _prepare_tables(self, response: EntityCollection, enhance_metadata: bool = False) -> List[Table]: 55 | """ 56 | Based on an Atlas {response} with table entities, we render Table objects. 57 | 58 | :param response: Collection of Atlas Entities 59 | :param enhance_metadata: Should Atlas be queried to acquire complete entity definitions (search might not 60 | return all available attributes) 61 | :return: List of Table objects 62 | """ 63 | 64 | result = list() 65 | 66 | # if condition is satisfied then we query Atlas again to collect all available information regarding each table 67 | # along with relationship information. This is helpful when using Atlas DSL as returned entities contain minimal 68 | # amount of attributes. 69 | if enhance_metadata: 70 | ids = list() 71 | 72 | for hit in response: 73 | ids.append(hit.guid) 74 | 75 | entities = self._extract_entities(self.atlas.entity_bulk(guid=ids, ignoreRelationships=False)) 76 | else: 77 | entities = response 78 | 79 | for entity in entities: 80 | entity_attrs = entity.attributes 81 | 82 | qn = parse_table_qualified_name(qualified_name=entity_attrs.get(self.ATLAS_QN_ATTRIBUTE)) 83 | 84 | entity_name = qn.get('table_name') or entity_attrs.get('name') 85 | db_name = qn.get('db_name', '') 86 | db_cluster = qn.get('cluster_name', '') 87 | 88 | tags: List[Tag] = [] 89 | 90 | for classification in entity.classificationNames or list(): 91 | tags.append(Tag(tag_name=classification)) 92 | 93 | badges: List[Tag] = tags 94 | 95 | table = Table(name=entity_name, 96 | key=f"{entity.typeName}://{db_cluster}.{db_name}/{entity_name}", 97 | description=entity_attrs.get('description'), 98 | cluster=db_cluster, 99 | database=entity.typeName, 100 | schema=db_name, 101 | tags=tags, 102 | badges=badges, 103 | column_names=[], 104 | last_updated_timestamp=entity_attrs.get('updateTime')) 105 | 106 | result.append(table) 107 | 108 | return result 109 | 110 | def _atlas_basic_search(self, query_params: Dict) -> Tuple[List[Table], int]: 111 | """ 112 | Conduct search using Atlas Basic Search API. 113 | 114 | :param query_params: A dictionary of query parameters needed to be pass to Basic Search Post method of Atlas 115 | :return: List of Table objects and approximate count of entities matching in Atlas 116 | """ 117 | 118 | try: 119 | # Fetch the table entities based on query terms 120 | search_results = self.atlas.search_basic.create(data=query_params) 121 | except BadRequest as ex: 122 | LOGGER.error(f"Fetching Tables Failed : {str(ex)}") 123 | return [], 0 124 | 125 | if not len(search_results.entities): 126 | return [], 0 127 | 128 | # noinspection PyProtectedMember 129 | results_count = search_results._data.get("approximateCount") 130 | 131 | results = self._prepare_tables(search_results.entities, enhance_metadata=False) 132 | 133 | return results, results_count 134 | 135 | def _prepare_basic_search_query(self, limit: int, page_index: int, query_term: Optional[str] = None, 136 | filters: Optional[List[Tuple[str, str, str]]] = None, 137 | operator: Optional[str] = None, 138 | classification: Optional[str] = None, 139 | entity_type: str = None) -> Dict[str, Any]: 140 | """ 141 | Render a query for Atlas Basic Search API. 142 | 143 | :param query_term: Search Query Term 144 | :param limit: 145 | :param page_index: 146 | :param fitlers: Optional list of tuples (field, condition, value) that will translate into entityFilters for 147 | narrowing down search results 148 | :param operator: 149 | :param entity_type: Which kind of entity this query will look for 150 | :return: Dictionary object prepared for Atlas client basic_search 151 | """ 152 | if not entity_type: 153 | entity_type = self.ATLAS_TABLE_ENTITY 154 | 155 | query: Dict[str, Any] = {'typeName': entity_type, 156 | 'excludeDeletedEntities': True, 157 | 'includeSubTypes': True, 158 | 'limit': limit, 159 | 'offset': page_index * self.page_size, 160 | 'sortBy': 'popularityScore', 161 | 'sortOrder': 'DESCENDING'} 162 | 163 | if query_term: 164 | query_term = f'*{query_term}*' 165 | query_term = sub('\\*+', '*', query_term) 166 | 167 | query['query'] = query_term 168 | 169 | # filters and query_term shouldn't be mixed 170 | if filters and not query_term: 171 | condition = operator or 'AND' 172 | criterion: List[Dict[str, str]] = list() 173 | 174 | for _query_filter in filters: 175 | attribute_name, operator, attribute_value = _query_filter 176 | 177 | # filters perform much better when wildcard is dot, not star 178 | attribute_value = sub('\\*', '.', attribute_value) 179 | query_filter = {'attributeName': attribute_name, 180 | 'operator': operator.upper(), 181 | 'attributeValue': attribute_value} 182 | 183 | criterion.append(query_filter) 184 | 185 | if len(criterion) > 1: 186 | query['entityFilters'] = {'condition': condition, 'criterion': criterion} 187 | elif len(criterion) == 1: 188 | query['entityFilters'] = criterion[0] 189 | elif classification: 190 | query['classification'] = classification 191 | 192 | return query 193 | 194 | @timer_with_counter 195 | def fetch_table_search_results(self, *, 196 | query_term: str, 197 | page_index: int = 0, 198 | index: str = '') -> SearchTableResult: 199 | """ 200 | Conduct a 'Basic Search' in Amundsen UI. 201 | 202 | Atlas Basic Search API is used for that operation. We search on `qualifiedName` field as 203 | (following Atlas documentation) any 'Referencable' entity 'can be searched for using a unique attribute called 204 | qualifiedName'. It provides best performance, simplicity and sorting by popularityScore. 205 | 206 | :param query_term: Search Query Term 207 | :param page_index: Index of search page user is currently on (for pagination) 208 | :param index: Search Index (different resource corresponding to different index) 209 | :return: SearchTableResult Object 210 | """ 211 | if not query_term: 212 | # return empty result for blank query term 213 | return SearchTableResult(total_results=0, results=[]) 214 | 215 | query_params = self._prepare_basic_search_query(self.page_size, page_index, query_term=query_term) 216 | 217 | tables, approx_count = self._atlas_basic_search(query_params) 218 | 219 | return SearchTableResult(total_results=approx_count, results=tables) 220 | 221 | def fetch_search_results_with_filter(self, *, 222 | query_term: str, 223 | search_request: dict, 224 | page_index: int = 0, 225 | index: str = '') -> SearchTableResult: 226 | """ 227 | Conduct an 'Advanced Search' to narrow down search results with a use of filters. 228 | 229 | Using Atlas Basic Search with filters to retrieve precise results and sort them by popularity score. 230 | 231 | 232 | :param query_term: A Search Query Term 233 | :param search_request: Values from Filters 234 | :param page_index: Index of search page user is currently on (for pagination) 235 | :param index: Search Index (different resource corresponding to different index) 236 | :return: SearchTableResult Object 237 | """ 238 | _filters = search_request.get('filters', dict()) 239 | 240 | db_filter_value = _filters.get('database') 241 | table_filter_value = _filters.get('table') 242 | cluster_filter_value = _filters.get('cluster') 243 | tags_filter_value = _filters.get('tag') 244 | 245 | filters = list() 246 | 247 | # qualifiedName follows pattern ${db}.${table}@${cluster} 248 | if db_filter_value: 249 | filters.append((self.ATLAS_QN_ATTRIBUTE, 'STARTSWITH', db_filter_value[0] + '.')) 250 | 251 | if cluster_filter_value: 252 | filters.append((self.ATLAS_QN_ATTRIBUTE, 'ENDSWITH', '@' + cluster_filter_value[0])) 253 | 254 | if table_filter_value: 255 | filters.append(('name', 'CONTAINS', table_filter_value[0])) 256 | 257 | # Currently Atlas doesn't allow mixing search by filters and classifications 258 | if filters: 259 | query_params = self._prepare_basic_search_query(self.page_size, page_index, 260 | filters=filters) 261 | elif tags_filter_value: 262 | query_params = self._prepare_basic_search_query(self.page_size, page_index, 263 | classification=tags_filter_value[0]) 264 | 265 | tables, approx_count = self._atlas_basic_search(query_params) 266 | 267 | return SearchTableResult(total_results=approx_count, results=tables) 268 | 269 | def fetch_user_search_results(self, *, 270 | query_term: str, 271 | page_index: int = 0, 272 | index: str = '') -> SearchUserResult: 273 | pass 274 | 275 | def update_document(self, *, data: List[Dict[str, Any]], index: str = '') -> str: 276 | raise NotImplementedError() 277 | 278 | def create_document(self, *, data: List[Dict[str, Any]], index: str = '') -> str: 279 | raise NotImplementedError() 280 | 281 | def delete_document(self, *, data: List[str], index: str = '') -> str: 282 | raise NotImplementedError() 283 | 284 | def fetch_dashboard_search_results(self, *, 285 | query_term: str, 286 | page_index: int = 0, 287 | index: str = '') -> SearchDashboardResult: 288 | pass 289 | -------------------------------------------------------------------------------- /tests/unit/proxy/test_atlas.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import unittest 5 | 6 | from mock import MagicMock, patch 7 | from typing import Any, Callable, Dict, List, Tuple 8 | 9 | from search_service import create_app, config 10 | from search_service.models.table import Table, SearchTableResult 11 | from search_service.models.tag import Tag 12 | from search_service.proxy import get_proxy_client 13 | 14 | 15 | class TestAtlasProxy(unittest.TestCase): 16 | maxDiff = None 17 | 18 | def to_class(self, entity: Dict[str, Any]) -> Any: 19 | class ObjectView(object): 20 | def __init__(self, dictionary: Dict[str, Any]): 21 | self.__dict__ = dictionary 22 | 23 | return ObjectView(entity) 24 | 25 | def setUp(self) -> None: 26 | self.app = create_app(config_module_class='search_service.config.LocalConfig') 27 | self.app_context = self.app.app_context() 28 | self.app_context.push() 29 | self.qn = False 30 | with self.app_context: 31 | from search_service.proxy.atlas import AtlasProxy 32 | self.proxy = AtlasProxy(host='DOES_NOT_MATTER:0000') 33 | self.proxy.atlas = MagicMock() 34 | self.qn = 'name' == "qualifiedName" 35 | self.entity_type = 'TEST_ENTITY' 36 | self.cluster = 'TEST_CLUSTER' 37 | self.db = 'TEST_DB' 38 | self.name = 'TEST_TABLE' 39 | self.table_uri = f'{self.entity_type}://{self.cluster}.{self.db}/{self.name}' 40 | 41 | self.classification_entity = { 42 | 'classifications': [ 43 | {'typeName': 'PII_DATA', 'name': 'PII_DATA'}, 44 | ] 45 | } 46 | self.test_column = { 47 | 'guid': 'DOESNT_MATTER', 48 | 'typeName': 'COLUMN', 49 | 'attributes': { 50 | 'qualifiedName': f"{self.db}.Table1.column@{self.cluster}", 51 | 'type': 'Managed', 52 | 'description': 'column description', 53 | 'position': 1 54 | } 55 | } 56 | 57 | self.db_entity = { 58 | 'guid': '-100', 59 | 'typeName': self.entity_type, 60 | 'attributes': { 61 | 'qualifiedName': self.db + "@" + self.cluster, 62 | 'name': self.db, 63 | 'clusterName': self.cluster 64 | } 65 | } 66 | 67 | self.entity1_name = 'Table1' 68 | self.entity1_description = 'Dummy Description' 69 | self.entity1 = { 70 | 'guid': '1', 71 | 'typeName': self.entity_type, 72 | 'classificationNames': [ 73 | 'PII_DATA' 74 | ], 75 | 'relationshipAttributes': { 76 | 'db': self.db_entity 77 | }, 78 | 'attributes': { 79 | 'updateTime': 123, 80 | 'name': self.entity1_name, 81 | 'qualifiedName': f"{self.db}.Table1@{self.cluster}", 82 | 'classifications': [ 83 | {'typeName': 'PII_DATA'} 84 | ], 85 | 'description': self.entity1_description, 86 | 'owner': 'dummy@email.com', 87 | 'columns': [self.test_column], 88 | 'db': self.db_entity 89 | }, 90 | 'classifications': self.classification_entity['classifications'] 91 | } 92 | 93 | self.entity2_name = 'Table2' 94 | self.entity2 = { 95 | 'guid': '2', 96 | 'typeName': self.entity_type, 97 | 'classificationNames': [], 98 | 'attributes': { 99 | 'updateTime': 234, 100 | 'qualifiedName': 'Table2', 101 | 'name': self.entity2_name, 102 | 'db': None, 103 | 'description': 'Dummy Description', 104 | 'owner': 'dummy@email.com', 105 | }, 106 | 'classifications': self.classification_entity['classifications'] 107 | } 108 | 109 | self.entities = { 110 | 'entities': [ 111 | self.entity1, 112 | self.entity2, 113 | ], 114 | } 115 | 116 | def _qualified(self, kind: str, name: str, table: str = None) -> str: 117 | if not self.qn: 118 | return name 119 | if kind == "db": 120 | return f"{name}@{self.cluster}" 121 | if kind == "column" and table: 122 | return f"{self.db}.{table}.{name}@{self.cluster}" 123 | if kind == "table": 124 | return f"{self.db}.{name}@{self.cluster}" 125 | return name 126 | 127 | @staticmethod 128 | def recursive_mock(start: Any) -> Any: 129 | """ 130 | The atlas client allows retrieval of data via __getattr__. 131 | That is why we build this method to recursively mock dictionary's to add 132 | the __getattr__ and to convert them into MagicMock. 133 | :param start: dictionary, list, or other 134 | :return: MagicMock, list with mocked items, or other 135 | """ 136 | if isinstance(start, dict): 137 | dict_mock = MagicMock() 138 | dict_mock.__getitem__.side_effect = start.__getitem__ 139 | dict_mock.__iter__.side_effect = start.__iter__ 140 | dict_mock.__contains__.side_effect = start.__contains__ 141 | dict_mock.get.side_effect = start.get 142 | for key, value in start.items(): 143 | value_mock = TestAtlasProxy.recursive_mock(value) 144 | dict_mock.__setattr__(key, value_mock) 145 | start[key] = value_mock 146 | return dict_mock 147 | elif isinstance(start, (list,)): 148 | return list(map(TestAtlasProxy.recursive_mock, start)) 149 | else: 150 | return start 151 | 152 | @staticmethod 153 | def dsl_inject(checks: List[Tuple[Callable[[str], bool], dict]]) -> Callable: 154 | """ 155 | helper method for returning results based on sql queries 156 | :param checks: 157 | :return: 158 | """ 159 | 160 | def search_dsl(query: str) -> Dict[str, Any]: 161 | for check, data in checks: 162 | if check(query): 163 | response = MagicMock() 164 | d = TestAtlasProxy.recursive_mock(data) 165 | d.__iter__.return_value = [d] 166 | d._data = { 167 | 'queryType': "DSL", 168 | 'queryText': query, 169 | **data 170 | } 171 | response.__iter__.return_value = [d] 172 | 173 | return response 174 | raise Exception(f"query not supported: {query}") 175 | 176 | return search_dsl 177 | 178 | @staticmethod 179 | def bulk_inject(entities: List[Dict[str, Any]]) -> Callable: 180 | """ 181 | provide an entity_bulk method for atlas 182 | :param entities: 183 | :return: 184 | """ 185 | 186 | # noinspection PyPep8Naming 187 | def guid_filter(guid: List, ignoreRelationships: bool = False) -> Any: 188 | return TestAtlasProxy.recursive_mock([{ 189 | 'entities': list(filter(lambda x: x['guid'] in guid, entities)) 190 | }]) 191 | 192 | return guid_filter 193 | 194 | def test_setup_client(self) -> None: 195 | with self.app_context: 196 | from search_service.proxy.atlas import AtlasProxy 197 | client = AtlasProxy( 198 | host="http://localhost:21000", 199 | user="admin", 200 | password="admin", 201 | page_size=1337 202 | ) 203 | self.assertEqual(client.atlas.base_url, "http://localhost:21000") 204 | self.assertEqual(client.atlas.client.request_params['headers']['Authorization'], 'Basic YWRtaW46YWRtaW4=') 205 | self.assertEqual(client.page_size, 1337) 206 | 207 | @patch('search_service.proxy._proxy_client', None) 208 | def test_setup_config(self) -> None: 209 | # Gather all the configuration to create a Proxy Client 210 | self.app.config[config.PROXY_ENDPOINT] = "http://localhost:21000" 211 | self.app.config[config.PROXY_USER] = "admin" 212 | self.app.config[config.PROXY_PASSWORD] = "admin" 213 | self.app.config[config.PROXY_CLIENT] = config.PROXY_CLIENTS['ATLAS'] 214 | self.app.config[config.SEARCH_PAGE_SIZE_KEY] = 1337 215 | 216 | client = get_proxy_client() 217 | self.assertEqual(client.atlas.base_url, "http://localhost:21000") # type: ignore 218 | self.assertEqual(client.atlas.client.request_params['headers']['Authorization'], # type: ignore 219 | 'Basic YWRtaW46YWRtaW4=') 220 | self.assertEqual(client.page_size, 1337) # type: ignore 221 | 222 | def test_search_normal(self) -> None: 223 | expected = SearchTableResult(total_results=2, 224 | results=[Table(name=self.entity1_name, 225 | key=f"{self.entity_type}://" 226 | f"{self.cluster}.{self.db}/" 227 | f"{self.entity1_name}", 228 | description=self.entity1_description, 229 | cluster=self.cluster, 230 | database=self.entity_type, 231 | schema=self.db, 232 | column_names=[], 233 | tags=[Tag(tag_name='PII_DATA')], 234 | badges=[Tag(tag_name='PII_DATA')], 235 | last_updated_timestamp=123)]) 236 | entity1 = self.to_class(self.entity1) 237 | entity_collection = MagicMock() 238 | entity_collection.entities = [entity1] 239 | entity_collection._data = {'approximateCount': 1} 240 | 241 | result = MagicMock(return_value=entity_collection) 242 | 243 | with patch.object(self.proxy.atlas.search_basic, 'create', result): 244 | resp = self.proxy.fetch_table_search_results(query_term="Table") 245 | self.assertEqual(resp.total_results, 1) 246 | self.assertIsInstance(resp.results[0], Table, "Search result received is not of 'Table' type!") 247 | self.assertDictEqual(vars(resp.results[0]), vars(expected.results[0]), 248 | "Search Result doesn't match with expected result!") 249 | 250 | def test_search_empty(self) -> None: 251 | expected = SearchTableResult(total_results=0, 252 | results=[]) 253 | self.proxy.atlas.search_dsl = self.dsl_inject([ 254 | (lambda dsl: "select count()" in dsl, 255 | {"attributes": {"name": ["count()"], "values": [[0]]}}), 256 | (lambda dsl: any(x in dsl for x in ["select table", "from Table"]), 257 | {'entities': []}) 258 | ]) 259 | self.proxy.atlas.entity_bulk = self.bulk_inject([ 260 | self.entity1, 261 | self.entity2, 262 | self.db_entity 263 | ]) 264 | resp = self.proxy.fetch_table_search_results(query_term="Table1") 265 | self.assertTrue(resp.total_results == 0, "there should no search results") 266 | self.assertIsInstance(resp, SearchTableResult, "Search result received is not of 'SearchResult' type!") 267 | self.assertDictEqual(vars(resp), vars(expected), 268 | "Search Result doesn't match with expected result!") 269 | 270 | def test_unknown_field(self) -> None: 271 | expected = SearchTableResult(total_results=0, 272 | results=[]) 273 | self.proxy.atlas.search_dsl = self.dsl_inject([ 274 | (lambda dsl: "select count()" in dsl, 275 | {"attributes": {"name": ["count()"], "values": [[0]]}}), 276 | (lambda dsl: any(x in dsl for x in ["select table", "from Table"]), 277 | {'entities': []}) 278 | ]) 279 | self.proxy.atlas.entity_bulk = self.bulk_inject([ 280 | self.entity1, 281 | self.entity2, 282 | self.db_entity 283 | ]) 284 | resp = self.proxy.fetch_table_search_results(query_term="unknown:Table1") 285 | self.assertTrue(resp.total_results == 0, "there should no search results") 286 | self.assertIsInstance(resp, SearchTableResult, "Search result received is not of 'SearchResult' type!") 287 | self.assertDictEqual(vars(resp), vars(expected), 288 | "Search Result doesn't match with expected result!") 289 | -------------------------------------------------------------------------------- /search_service/proxy/elasticsearch.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import uuid 6 | import itertools 7 | from typing import Any, List, Dict, Union 8 | 9 | from elasticsearch import Elasticsearch 10 | from elasticsearch_dsl import Search, query 11 | from elasticsearch.exceptions import NotFoundError 12 | from flask import current_app 13 | from amundsen_common.models.index_map import USER_INDEX_MAP, TABLE_INDEX_MAP 14 | 15 | from search_service import config 16 | from search_service.api.dashboard import DASHBOARD_INDEX 17 | from search_service.api.user import USER_INDEX 18 | from search_service.api.table import TABLE_INDEX 19 | from search_service.models.search_result import SearchResult 20 | from search_service.models.table import Table, SearchTableResult 21 | from search_service.models.user import SearchUserResult 22 | from search_service.models.user import User 23 | from search_service.models.dashboard import Dashboard, SearchDashboardResult 24 | from search_service.models.tag import Tag 25 | from search_service.proxy.base import BaseProxy 26 | from search_service.proxy.statsd_utilities import timer_with_counter 27 | 28 | # Default Elasticsearch index to use, if none specified 29 | DEFAULT_ES_INDEX = 'table_search_index' 30 | 31 | LOGGING = logging.getLogger(__name__) 32 | 33 | # mapping to translate request for table resources 34 | TABLE_MAPPING = { 35 | 'badges': 'badges', 36 | 'tag': 'tags', 37 | 'schema': 'schema.raw', 38 | 'table': 'name.raw', 39 | 'column': 'column_names.raw', 40 | 'database': 'database.raw', 41 | 'cluster': 'cluster.raw' 42 | } 43 | 44 | # Maps payload to a class 45 | TAG_MAPPING = { 46 | 'badges': Tag, 47 | 'tags': Tag 48 | } 49 | 50 | # mapping to translate request for dashboard resources 51 | DASHBOARD_MAPPING = { 52 | 'group_name': 'group_name.raw', 53 | 'name': 'name.raw', 54 | 'product': 'product', 55 | 'tag': 'tags', 56 | } 57 | 58 | 59 | class ElasticsearchProxy(BaseProxy): 60 | """ 61 | ElasticSearch connection handler 62 | """ 63 | 64 | def __init__(self, *, 65 | host: str = None, 66 | user: str = '', 67 | password: str = '', 68 | client: Elasticsearch = None, 69 | page_size: int = 10 70 | ) -> None: 71 | """ 72 | Constructs Elasticsearch client for interactions with the cluster. 73 | Allows caller to pass a fully constructed Elasticsearch client, {elasticsearch_client} 74 | or constructs one from the parameters provided. 75 | 76 | :param host: Elasticsearch host we should connect to 77 | :param auth_user: user name to use for authentication 78 | :param auth_pw: user password to use for authentication 79 | :param elasticsearch_client: Elasticsearch client to use, if provided 80 | :param page_size: Number of search results to return per request 81 | """ 82 | if client: 83 | self.elasticsearch = client 84 | else: 85 | http_auth = (user, password) if user else None 86 | self.elasticsearch = Elasticsearch(host, http_auth=http_auth) 87 | 88 | self.page_size = page_size 89 | 90 | def _get_search_result(self, page_index: int, 91 | client: Search, 92 | model: Any, 93 | search_result_model: Any = SearchResult) -> Any: 94 | """ 95 | Common helper function to get result. 96 | 97 | :param page_index: 98 | :param client: 99 | :param model: The model to import result(table, user etc) 100 | :return: 101 | """ 102 | if model is None: 103 | raise Exception('ES Doc model must be provided!') 104 | 105 | results = [] 106 | # Use {page_index} to calculate index of results to fetch from 107 | if page_index != -1: 108 | start_from = page_index * self.page_size 109 | end_at = start_from + self.page_size 110 | client = client[start_from:end_at] 111 | else: 112 | # if page index is -1, return everything 113 | client = client[0:client.count()] 114 | 115 | response = client.execute() 116 | 117 | for hit in response: 118 | try: 119 | # ES hit: {'_d_': {'key': xxx...} 120 | es_payload = hit.__dict__.get('_d_', {}) 121 | if not es_payload: 122 | raise Exception('The ES doc not contain required field') 123 | result = {} 124 | for attr, val in es_payload.items(): 125 | if attr in model.get_attrs(): 126 | result[attr] = self._get_instance(attr=attr, val=val) 127 | 128 | results.append(model(**result)) 129 | except Exception: 130 | LOGGING.exception('The record doesnt contain specified field.') 131 | 132 | return search_result_model(total_results=response.hits.total, 133 | results=results) 134 | 135 | def _get_instance(self, attr: str, val: Any) -> Any: 136 | if attr in TAG_MAPPING: 137 | # maps a given badge or tag to a tag class 138 | return [TAG_MAPPING[attr](tag_name=property_val) for property_val in val] # type: ignore 139 | else: 140 | return val 141 | 142 | def _search_helper(self, page_index: int, 143 | client: Search, 144 | query_name: dict, 145 | model: Any, 146 | search_result_model: Any = SearchResult) -> Any: 147 | """ 148 | Constructs Elasticsearch Query DSL to: 149 | 1. Use function score to customize scoring of search result. It currently uses "total_usage" field to score. 150 | `Link https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html`_ 151 | 2. Uses multi match query to search term in multiple fields. 152 | `Link https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-multi-match-query.html`_ 153 | 154 | :param page_index: 155 | :param client: 156 | :param query_name: name of query to query the ES 157 | :return: 158 | """ 159 | 160 | if query_name: 161 | q = query.Q(query_name) 162 | client = client.query(q) 163 | 164 | return self._get_search_result(page_index=page_index, 165 | client=client, 166 | model=model, 167 | search_result_model=search_result_model) 168 | 169 | @timer_with_counter 170 | def fetch_table_search_results(self, *, 171 | query_term: str, 172 | page_index: int = 0, 173 | index: str = '') -> SearchTableResult: 174 | """ 175 | Query Elasticsearch and return results as list of Table objects 176 | 177 | :param query_term: search query term 178 | :param page_index: index of search page user is currently on 179 | :param index: current index for search. Provide different index for different resource. 180 | :return: SearchResult Object 181 | """ 182 | current_index = index if index else \ 183 | current_app.config.get(config.ELASTICSEARCH_INDEX_KEY, DEFAULT_ES_INDEX) 184 | if not query_term: 185 | # return empty result for blank query term 186 | return SearchTableResult(total_results=0, results=[]) 187 | 188 | s = Search(using=self.elasticsearch, index=current_index) 189 | query_name = { 190 | "function_score": { 191 | "query": { 192 | "multi_match": { 193 | "query": query_term, 194 | "fields": ["display_name^1000", 195 | "name.raw^75", 196 | "name^5", 197 | "schema^3", 198 | "description^3", 199 | "column_names^2", 200 | "column_descriptions", 201 | "tags", 202 | "badges", 203 | "programmatic_descriptions"], 204 | } 205 | }, 206 | "field_value_factor": { 207 | "field": "total_usage", 208 | "modifier": "log2p" 209 | } 210 | } 211 | } 212 | 213 | return self._search_helper(page_index=page_index, 214 | client=s, 215 | query_name=query_name, 216 | model=Table, 217 | search_result_model=SearchTableResult) 218 | 219 | @staticmethod 220 | def get_model_by_index(index: str) -> Any: 221 | if index == TABLE_INDEX: 222 | return Table 223 | elif index == USER_INDEX: 224 | return User 225 | elif index == DASHBOARD_INDEX: 226 | return Dashboard 227 | 228 | raise Exception('Unable to map given index to a valid model') 229 | 230 | @staticmethod 231 | def parse_filters(filter_list: Dict, 232 | index: str) -> str: 233 | query_list = [] # type: List[str] 234 | if index == TABLE_INDEX: 235 | mapping = TABLE_MAPPING 236 | elif index == DASHBOARD_INDEX: 237 | mapping = DASHBOARD_MAPPING 238 | else: 239 | raise Exception(f'index {index} doesnt exist nor support search filter') 240 | for category, item_list in filter_list.items(): 241 | mapped_category = mapping.get(category) 242 | if mapped_category is None: 243 | LOGGING.warn(f'Unsupported filter category: {category} passed in list of filters') 244 | elif item_list is '' or item_list == ['']: 245 | LOGGING.warn(f'The filter value cannot be empty.In this case the filter {category} is ignored') 246 | else: 247 | query_list.append(mapped_category + ':' + '(' + ' OR '.join(item_list) + ')') 248 | 249 | if len(query_list) == 0: 250 | return '' 251 | 252 | return ' AND '.join(query_list) 253 | 254 | @staticmethod 255 | def validate_filter_values(search_request: dict) -> Any: 256 | if 'filters' in search_request: 257 | filter_values_list = search_request['filters'].values() 258 | # Ensure all values are arrays 259 | filter_values_list = list( 260 | map(lambda x: x if type(x) == list else [x], filter_values_list)) 261 | # Flatten the array of arrays 262 | filter_values_list = list(itertools.chain.from_iterable(filter_values_list)) 263 | # Check if / or : exist in any of the values 264 | if any(("/" in str(item) or ":" in str(item)) for item in (filter_values_list)): 265 | return False 266 | return True 267 | 268 | @staticmethod 269 | def parse_query_term(query_term: str, 270 | index: str) -> str: 271 | # TODO: Might be some issue with using wildcard & underscore 272 | # https://discuss.elastic.co/t/wildcard-search-with-underscore-is-giving-no-result/114010/8 273 | if index == TABLE_INDEX: 274 | query_term = f'(name:(*{query_term}*) OR name:({query_term}) ' \ 275 | f'OR schema:(*{query_term}*) OR schema:({query_term}) ' \ 276 | f'OR description:(*{query_term}*) OR description:({query_term}) ' \ 277 | f'OR column_names:(*{query_term}*) OR column_names:({query_term}) ' \ 278 | f'OR column_descriptions:(*{query_term}*) OR column_descriptions:({query_term}))' 279 | elif index == DASHBOARD_INDEX: 280 | query_term = f'(name:(*{query_term}*) OR name:({query_term}) ' \ 281 | f'OR group_name:(*{query_term}*) OR group_name:({query_term}) ' \ 282 | f'OR query_names:(*{query_term}*) OR query_names:({query_term}) ' \ 283 | f'OR description:(*{query_term}*) OR description:({query_term}) ' \ 284 | f'OR tags:(*{query_term}*) OR tags:({query_term}) ' \ 285 | f'OR badges:(*{query_term}*) OR badges:({query_term}) ' \ 286 | f'OR product:(*{query_term}*) OR product:({query_term}))' 287 | else: 288 | raise Exception(f'index {index} doesnt exist nor support search filter') 289 | return query_term 290 | 291 | @classmethod 292 | def convert_query_json_to_query_dsl(self, *, 293 | search_request: dict, 294 | query_term: str, 295 | index: str) -> str: 296 | """ 297 | Convert the generic query json to query DSL 298 | e.g 299 | ``` 300 | { 301 | 'type': 'AND' 302 | 'filters': { 303 | 'database': ['hive', 'bigquery'], 304 | 'schema': ['test-schema1', 'test-schema2'], 305 | 'table': ['*amundsen*'], 306 | 'column': ['*ds*'] 307 | 'tag': ['test-tag'] 308 | } 309 | } 310 | 311 | This generic JSON will convert into DSL depending on the backend engines. 312 | 313 | E.g in Elasticsearch, it will become 314 | 'database':('hive' OR 'bigquery') AND 315 | 'schema':('test-schema1' OR 'test-schema2') AND 316 | 'table':('*amundsen*') AND 317 | 'column':('*ds*') AND 318 | 'tag':('test-tag') 319 | ``` 320 | 321 | :param search_request: 322 | :param query_term: 323 | :param index: table_index, dashboard_index 324 | :return: The search engine query DSL 325 | """ 326 | filter_list = search_request.get('filters') 327 | add_query = '' 328 | query_dsl = '' 329 | if filter_list: 330 | valid_filters = self.validate_filter_values(search_request) 331 | if valid_filters is False: 332 | raise Exception( 333 | 'The search filters contain invalid characters and thus cannot be handled by ES') 334 | query_dsl = self.parse_filters(filter_list, 335 | index) 336 | 337 | if query_term: 338 | add_query = self.parse_query_term(query_term, 339 | index) 340 | 341 | if not query_dsl and not add_query: 342 | raise Exception('Unable to convert parameters to valid query dsl') 343 | 344 | result = '' 345 | if query_dsl and add_query: 346 | result = query_dsl + ' AND ' + add_query 347 | elif add_query and not query_dsl: 348 | result = add_query 349 | elif query_dsl and not add_query: 350 | result = query_dsl 351 | 352 | return result 353 | 354 | @timer_with_counter 355 | def fetch_search_results_with_filter(self, *, 356 | query_term: str, 357 | search_request: dict, 358 | page_index: int = 0, 359 | index: str = '') -> Union[SearchDashboardResult, 360 | SearchTableResult]: 361 | """ 362 | Query Elasticsearch and return results as list of Table objects 363 | :param search_request: A json representation of search request 364 | :param page_index: index of search page user is currently on 365 | :param index: current index for search. Provide different index for different resource. 366 | :return: SearchResult Object 367 | """ 368 | current_index = index if index else \ 369 | current_app.config.get(config.ELASTICSEARCH_INDEX_KEY, DEFAULT_ES_INDEX) # type: str 370 | if current_index == DASHBOARD_INDEX: 371 | search_model = SearchDashboardResult # type: Any 372 | elif current_index == TABLE_INDEX: 373 | search_model = SearchTableResult 374 | else: 375 | raise RuntimeError(f'the {index} doesnt have search filter support') 376 | if not search_request: 377 | # return empty result for blank query term 378 | return search_model(total_results=0, results=[]) 379 | 380 | try: 381 | query_string = self.convert_query_json_to_query_dsl(search_request=search_request, 382 | query_term=query_term, 383 | index=current_index) # type: str 384 | except Exception as e: 385 | LOGGING.exception(e) 386 | # return nothing if any exception is thrown under the hood 387 | return search_model(total_results=0, results=[]) 388 | 389 | s = Search(using=self.elasticsearch, index=current_index) 390 | 391 | query_name = { 392 | "function_score": { 393 | "query": { 394 | "query_string": { 395 | "query": query_string 396 | } 397 | }, 398 | "field_value_factor": { 399 | "field": "total_usage", 400 | "modifier": "log2p" 401 | } 402 | } 403 | } 404 | 405 | model = self.get_model_by_index(current_index) 406 | return self._search_helper(page_index=page_index, 407 | client=s, 408 | query_name=query_name, 409 | model=model, 410 | search_result_model=search_model) 411 | 412 | @timer_with_counter 413 | def fetch_user_search_results(self, *, 414 | query_term: str, 415 | page_index: int = 0, 416 | index: str = '') -> SearchUserResult: 417 | if not index: 418 | raise Exception('Index cant be empty for user search') 419 | if not query_term: 420 | # return empty result for blank query term 421 | return SearchUserResult(total_results=0, results=[]) 422 | 423 | s = Search(using=self.elasticsearch, index=index) 424 | 425 | # Don't use any weight(total_follow, total_own, total_use) 426 | query_name = { 427 | "function_score": { 428 | "query": { 429 | "multi_match": { 430 | "query": query_term, 431 | "fields": ["full_name.raw^30", 432 | "full_name^5", 433 | "first_name.raw^5", 434 | "last_name.raw^5", 435 | "first_name^3", 436 | "last_name^3", 437 | "email^3"], 438 | "operator": "and" 439 | } 440 | } 441 | } 442 | } 443 | 444 | return self._search_helper(page_index=page_index, 445 | client=s, 446 | query_name=query_name, 447 | model=User, 448 | search_result_model=SearchUserResult) 449 | 450 | @timer_with_counter 451 | def fetch_dashboard_search_results(self, *, 452 | query_term: str, 453 | page_index: int = 0, 454 | index: str = '') -> SearchDashboardResult: 455 | """ 456 | Fetch dashboard search result with fuzzy search 457 | 458 | :param query_term: 459 | :param page_index: 460 | :param index: 461 | :return: 462 | """ 463 | current_index = index if index else \ 464 | current_app.config.get(config.ELASTICSEARCH_INDEX_KEY, DEFAULT_ES_INDEX) 465 | 466 | if not query_term: 467 | # return empty result for blank query term 468 | return SearchDashboardResult(total_results=0, results=[]) 469 | s = Search(using=self.elasticsearch, index=current_index) 470 | 471 | query_name = { 472 | "function_score": { 473 | "query": { 474 | "multi_match": { 475 | "query": query_term, 476 | "fields": ["name.raw^75", 477 | "name^7", 478 | "group_name.raw^15", 479 | "group_name^7", 480 | "description^3", 481 | "query_names^3"] 482 | } 483 | }, 484 | "field_value_factor": { 485 | "field": "total_usage", 486 | "modifier": "log2p" 487 | } 488 | } 489 | } 490 | 491 | return self._search_helper(page_index=page_index, 492 | client=s, 493 | query_name=query_name, 494 | model=Dashboard, 495 | search_result_model=SearchDashboardResult) 496 | 497 | # The following methods are related to document API that needs to update 498 | @timer_with_counter 499 | def create_document(self, *, data: List[Table], index: str) -> str: 500 | """ 501 | Creates new index in elasticsearch, then routes traffic to the new index 502 | instead of the old one 503 | :return: str 504 | """ 505 | 506 | if not index: 507 | raise Exception('Index cant be empty for creating document') 508 | if not data: 509 | LOGGING.warn('Received no data to upload to Elasticsearch') 510 | return '' 511 | 512 | return self._create_document_helper(data=data, index=index) 513 | 514 | @timer_with_counter 515 | def update_document(self, *, data: List[Table], index: str) -> str: 516 | """ 517 | Updates the existing index in elasticsearch 518 | :return: str 519 | """ 520 | if not index: 521 | raise Exception('Index cant be empty for updating document') 522 | if not data: 523 | LOGGING.warn('Received no data to upload to Elasticsearch') 524 | return '' 525 | 526 | return self._update_document_helper(data=data, index=index) 527 | 528 | @timer_with_counter 529 | def delete_document(self, *, data: List[str], index: str) -> str: 530 | if not index: 531 | raise Exception('Index cant be empty for deleting document') 532 | if not data: 533 | LOGGING.warn('Received no data to upload to Elasticsearch') 534 | return '' 535 | 536 | return self._delete_document_helper(data=data, index=index) 537 | 538 | def _create_document_helper(self, data: List[Table], index: str) -> str: 539 | # fetch indices that use our chosen alias (should only ever return one in a list) 540 | indices = self._fetch_old_index(index) 541 | 542 | for i in indices: 543 | # build a list of elasticsearch actions for bulk upload 544 | actions = self._build_index_actions(data=data, index_key=i) 545 | 546 | # bulk create or update data 547 | self._bulk_helper(actions) 548 | 549 | return index 550 | 551 | def _update_document_helper(self, data: List[Table], index: str) -> str: 552 | # fetch indices that use our chosen alias (should only ever return one in a list) 553 | indices = self._fetch_old_index(index) 554 | 555 | for i in indices: 556 | # build a list of elasticsearch actions for bulk update 557 | actions = self._build_update_actions(data=data, index_key=i) 558 | 559 | # bulk update existing documents in index 560 | self._bulk_helper(actions) 561 | 562 | return index 563 | 564 | def _delete_document_helper(self, data: List[str], index: str) -> str: 565 | # fetch indices that use our chosen alias 566 | indices = self._fetch_old_index(index) 567 | 568 | # set the document type 569 | type = User.get_type() if index is USER_INDEX else Table.get_type() 570 | 571 | for i in indices: 572 | # build a list of elasticsearch actions for bulk deletion 573 | actions = self._build_delete_actions(data=data, index_key=i, type=type) 574 | 575 | # bulk delete documents in index 576 | self._bulk_helper(actions) 577 | 578 | return index 579 | 580 | def _build_index_actions(self, data: List[Table], index_key: str) -> List[Dict[str, Any]]: 581 | actions = list() 582 | for item in data: 583 | index_action = {'index': {'_index': index_key, '_type': item.get_type(), '_id': item.get_id()}} 584 | actions.append(index_action) 585 | actions.append(item.__dict__) 586 | return actions 587 | 588 | def _build_update_actions(self, data: List[Table], index_key: str) -> List[Dict[str, Any]]: 589 | actions = list() 590 | 591 | for item in data: 592 | actions.append({'update': {'_index': index_key, '_type': item.get_type(), '_id': item.get_id()}}) 593 | actions.append({'doc': item.__dict__}) 594 | return actions 595 | 596 | def _build_delete_actions(self, data: List[str], index_key: str, type: str) -> List[Dict[str, Any]]: 597 | return [{'delete': {'_index': index_key, '_id': id, '_type': type}} for id in data] 598 | 599 | def _bulk_helper(self, actions: List[Dict[str, Any]]) -> None: 600 | result = self.elasticsearch.bulk(actions) 601 | 602 | if result['errors']: 603 | # ES's error messages are nested within elasticsearch objects and can 604 | # fail silently if you aren't careful 605 | LOGGING.error('Error during Elasticsearch bulk actions') 606 | LOGGING.debug(result['items']) 607 | return 608 | 609 | def _fetch_old_index(self, alias: str) -> List[str]: 610 | """ 611 | Retrieve all indices that are currently tied to alias 612 | (Can most often expect only one index to be returned in this list) 613 | :return: list of elasticsearch indices 614 | """ 615 | try: 616 | indices = self.elasticsearch.indices.get_alias(alias).keys() 617 | return indices 618 | except NotFoundError: 619 | LOGGING.warn('Received index not found error from Elasticsearch', exc_info=True) 620 | 621 | # create a new index if there isn't already one that is usable 622 | new_index = self._create_index_helper(alias=alias) 623 | return [new_index] 624 | 625 | def _create_index_helper(self, alias: str) -> str: 626 | def _get_mapping(alias: str) -> str: 627 | if alias is USER_INDEX: 628 | return USER_INDEX_MAP 629 | elif alias is TABLE_INDEX: 630 | return TABLE_INDEX_MAP 631 | return '' 632 | index_key = str(uuid.uuid4()) 633 | mapping: str = _get_mapping(alias=alias) 634 | self.elasticsearch.indices.create(index=index_key, body=mapping) 635 | 636 | # alias our new index 637 | index_actions = {'actions': [{'add': {'index': index_key, 'alias': alias}}]} 638 | self.elasticsearch.indices.update_aliases(index_actions) 639 | return index_key 640 | -------------------------------------------------------------------------------- /tests/unit/proxy/test_elasticsearch.py: -------------------------------------------------------------------------------- 1 | # Copyright Contributors to the Amundsen project. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import unittest 5 | 6 | from unittest.mock import patch, MagicMock 7 | from typing import Any, Iterable, List # noqa: F401 8 | 9 | from search_service import create_app 10 | from search_service.api.user import USER_INDEX 11 | from search_service.api.table import TABLE_INDEX 12 | from search_service.proxy import get_proxy_client 13 | from search_service.proxy.elasticsearch import ElasticsearchProxy 14 | from search_service.models.search_result import SearchResult 15 | from search_service.models.dashboard import Dashboard 16 | from search_service.models.table import Table 17 | from search_service.models.tag import Tag 18 | from search_service.models.user import User 19 | 20 | 21 | class MockSearchResult: 22 | def __init__(self, *, 23 | name: str, 24 | key: str, 25 | description: str, 26 | cluster: str, 27 | database: str, 28 | schema: str, 29 | column_names: Iterable[str], 30 | tags: Iterable[Tag], 31 | badges: Iterable[Tag], 32 | last_updated_timestamp: int, 33 | programmatic_descriptions: List[str] = []) -> None: 34 | self.name = name 35 | self.key = key 36 | self.description = description 37 | self.cluster = cluster 38 | self.database = database 39 | self.schema = schema 40 | self.column_names = column_names 41 | self.tags = tags 42 | self.badges = badges 43 | self.last_updated_timestamp = last_updated_timestamp 44 | self.programmatic_descriptions = programmatic_descriptions 45 | 46 | 47 | class MockUserSearchResult: 48 | def __init__(self, *, 49 | first_name: str, 50 | last_name: str, 51 | full_name: str, 52 | team_name: str, 53 | email: str, 54 | manager_email: str, 55 | github_username: str, 56 | is_active: bool, 57 | employee_type: str, 58 | role_name: str, 59 | new_attr: str) -> None: 60 | self.full_name = full_name 61 | self.first_name = first_name 62 | self.last_name = last_name 63 | self.team_name = team_name 64 | self.email = email 65 | self.manager_email = manager_email 66 | self.github_username = github_username 67 | self.is_active = is_active 68 | self.employee_type = employee_type 69 | self.new_attr = new_attr 70 | self.role_name = role_name 71 | 72 | 73 | class Response: 74 | def __init__(self, 75 | result: Any): 76 | self._d_ = result 77 | 78 | 79 | class TestElasticsearchProxy(unittest.TestCase): 80 | 81 | def setUp(self) -> None: 82 | self.app = create_app(config_module_class='search_service.config.LocalConfig') 83 | self.app_context = self.app.app_context() 84 | self.app_context.push() 85 | 86 | mock_elasticsearch_client = MagicMock() 87 | self.es_proxy = ElasticsearchProxy(client=mock_elasticsearch_client) 88 | self.mock_badge = Tag(tag_name='name') 89 | self.mock_tag = Tag(tag_name='match') 90 | self.mock_empty_badge = [] # type: List[Tag] 91 | self.mock_empty_tag = [] # type: List[Tag] 92 | self.mock_result1 = MockSearchResult(name='test_table', 93 | key='test_key', 94 | description='test_description', 95 | cluster='gold', 96 | database='test_db', 97 | schema='test_schema', 98 | column_names=['test_col1', 'test_col2'], 99 | tags=self.mock_empty_tag, 100 | badges=self.mock_empty_badge, 101 | last_updated_timestamp=1527283287, 102 | programmatic_descriptions=[]) 103 | 104 | self.mock_result2 = MockSearchResult(name='test_table2', 105 | key='test_key2', 106 | description='test_description2', 107 | cluster='gold', 108 | database='test_db2', 109 | schema='test_schema2', 110 | column_names=['test_col1', 'test_col2'], 111 | tags=self.mock_empty_tag, 112 | badges=self.mock_empty_badge, 113 | last_updated_timestamp=1527283287) 114 | 115 | self.mock_result3 = Table(name='test_table3', 116 | key='test_key3', 117 | description='test_description3', 118 | cluster='gold', 119 | database='test_db3', 120 | schema='test_schema3', 121 | column_names=['test_col1', 'test_col2'], 122 | tags=[self.mock_tag], 123 | badges=[self.mock_badge], 124 | last_updated_timestamp=1527283287) 125 | 126 | self.mock_result4 = MockUserSearchResult(full_name='First Last', 127 | first_name='First', 128 | last_name='Last', 129 | team_name='Test team', 130 | email='test@email.com', 131 | github_username='ghub', 132 | manager_email='manager@email.com', 133 | is_active=True, 134 | employee_type='FTE', 135 | role_name='swe', 136 | new_attr='aaa') 137 | 138 | self.mock_dashboard_result = Dashboard(uri='dashboard_uri', 139 | cluster='gold', 140 | group_name='mode_dashboard_group', 141 | group_url='mode_dashboard_group_url', 142 | product='mode', 143 | name='mode_dashboard', 144 | url='mode_dashboard_url', 145 | description='test_dashboard', 146 | last_successful_run_timestamp=1000) 147 | 148 | def test_setup_client(self) -> None: 149 | self.es_proxy = ElasticsearchProxy( 150 | host="http://0.0.0.0:9200", 151 | user="elastic", 152 | password="elastic" 153 | ) 154 | a = self.es_proxy.elasticsearch 155 | for client in [a, a.cat, a.cluster, a.indices, a.ingest, a.nodes, a.snapshot, a.tasks]: 156 | self.assertEqual(client.transport.hosts[0]['host'], "0.0.0.0") 157 | self.assertEqual(client.transport.hosts[0]['port'], 9200) 158 | 159 | @patch('search_service.proxy.elasticsearch.Elasticsearch', autospec=True) 160 | def test_setup_client_with_username_and_password(self, elasticsearch_mock: MagicMock) -> None: 161 | self.es_proxy = ElasticsearchProxy( 162 | host='http://unit-test-host', 163 | user='unit-test-user', 164 | password='unit-test-pass' 165 | ) 166 | 167 | elasticsearch_mock.assert_called_once() 168 | elasticsearch_mock.assert_called_once_with( 169 | 'http://unit-test-host', 170 | http_auth=('unit-test-user', 'unit-test-pass') 171 | ) 172 | 173 | @patch('search_service.proxy.elasticsearch.Elasticsearch', autospec=True) 174 | def test_setup_client_without_username(self, elasticsearch_mock: MagicMock) -> None: 175 | self.es_proxy = ElasticsearchProxy( 176 | host='http://unit-test-host', 177 | user='' 178 | ) 179 | 180 | elasticsearch_mock.assert_called_once() 181 | elasticsearch_mock.assert_called_once_with('http://unit-test-host', http_auth=None) 182 | 183 | @patch('search_service.proxy._proxy_client', None) 184 | def test_setup_config(self) -> None: 185 | es: Any = get_proxy_client() 186 | a = es.elasticsearch 187 | for client in [a, a.cat, a.cluster, a.indices, a.ingest, a.nodes, a.snapshot, a.tasks]: 188 | self.assertEqual(client.transport.hosts[0]['host'], "0.0.0.0") 189 | self.assertEqual(client.transport.hosts[0]['port'], 9200) 190 | 191 | @patch('elasticsearch_dsl.Search.execute') 192 | def test_search_with_empty_query_string(self, mock_search: MagicMock) -> None: 193 | 194 | expected = SearchResult(total_results=0, results=[]) 195 | result = self.es_proxy.fetch_table_search_results(query_term='') 196 | 197 | # check the output was empty list 198 | self.assertDictEqual(vars(result), vars(expected), 199 | "Received non-empty search results!") 200 | 201 | # ensure elasticsearch_dsl Search endpoint was not called 202 | # assert_not_called doesn't work. See here: http://engineroom.trackmaven.com/blog/mocking-mistakes/ 203 | self.assertTrue(mock_search.call_count == 0) 204 | 205 | @patch('elasticsearch_dsl.Search.execute') 206 | def test_search_with_empty_result(self, 207 | mock_search: MagicMock) -> None: 208 | 209 | mock_results = MagicMock() 210 | mock_results.hits.total = 0 211 | mock_search.return_value = mock_results 212 | 213 | expected = SearchResult(total_results=0, results=[]) 214 | result = self.es_proxy.fetch_table_search_results(query_term='test_query_term') 215 | self.assertDictEqual(vars(result), vars(expected), 216 | "Received non-empty search results!") 217 | 218 | @patch('elasticsearch_dsl.Search.execute') 219 | def test_search_with_one_table_result(self, 220 | mock_search: MagicMock) -> None: 221 | 222 | mock_results = MagicMock() 223 | mock_results.hits.total = 1 224 | mock_results.__iter__.return_value = [Response(result=vars(self.mock_result1))] 225 | mock_search.return_value = mock_results 226 | 227 | expected = SearchResult(total_results=1, 228 | results=[Table(name='test_table', 229 | key='test_key', 230 | description='test_description', 231 | cluster='gold', 232 | database='test_db', 233 | schema='test_schema', 234 | column_names=['test_col1', 'test_col2'], 235 | tags=[], 236 | badges=self.mock_empty_badge, 237 | last_updated_timestamp=1527283287)]) 238 | 239 | resp = self.es_proxy.fetch_table_search_results(query_term='test_query_term') 240 | 241 | self.assertEqual(resp.total_results, expected.total_results, 242 | "search result is not of length 1") 243 | self.assertIsInstance(resp.results[0], 244 | Table, 245 | "Search result received is not of 'Table' type!") 246 | self.assertDictEqual(vars(resp.results[0]), vars(expected.results[0]), 247 | "Search Result doesn't match with expected result!") 248 | 249 | @patch('elasticsearch_dsl.Search.execute') 250 | def test_search_with_multiple_result(self, 251 | mock_search: MagicMock) -> None: 252 | 253 | mock_results = MagicMock() 254 | mock_results.hits.total = 2 255 | mock_results.__iter__.return_value = [Response(result=vars(self.mock_result1)), 256 | Response(result=vars(self.mock_result2))] 257 | mock_search.return_value = mock_results 258 | 259 | expected = SearchResult(total_results=2, 260 | results=[Table(name='test_table', 261 | key='test_key', 262 | description='test_description', 263 | cluster='gold', 264 | database='test_db', 265 | schema='test_schema', 266 | column_names=['test_col1', 'test_col2'], 267 | tags=[], 268 | badges=self.mock_empty_badge, 269 | last_updated_timestamp=1527283287), 270 | Table(name='test_table2', 271 | key='test_key2', 272 | description='test_description2', 273 | cluster='gold', 274 | database='test_db2', 275 | schema='test_schema2', 276 | column_names=['test_col1', 'test_col2'], 277 | tags=[], 278 | badges=self.mock_empty_badge, 279 | last_updated_timestamp=1527283287)]) 280 | 281 | resp = self.es_proxy.fetch_table_search_results(query_term='test_query_term') 282 | 283 | self.assertEqual(resp.total_results, expected.total_results, 284 | "search result is not of length 2") 285 | for i in range(2): 286 | self.assertIsInstance(resp.results[i], 287 | Table, 288 | "Search result received is not of 'Table' type!") 289 | self.assertDictEqual(vars(resp.results[i]), 290 | vars(expected.results[i]), 291 | "Search result doesn't match with expected result!") 292 | 293 | @patch('elasticsearch_dsl.Search.execute') 294 | def test_search_table_filter(self, mock_search: MagicMock) -> None: 295 | mock_results = MagicMock() 296 | mock_results.hits.total = 1 297 | mock_results.__iter__.return_value = [Response(result=vars(self.mock_result1))] 298 | mock_search.return_value = mock_results 299 | 300 | expected = SearchResult(total_results=1, 301 | results=[Table(name='test_table', 302 | key='test_key', 303 | description='test_description', 304 | cluster='gold', 305 | database='test_db', 306 | schema='test_schema', 307 | column_names=['test_col1', 'test_col2'], 308 | tags=self.mock_empty_tag, 309 | badges=self.mock_empty_badge, 310 | last_updated_timestamp=1527283287)]) 311 | search_request = { 312 | 'type': 'AND', 313 | 'filters': { 314 | 'database': ['hive', 'bigquery'], 315 | 'schema': ['test-schema1', 'test-schema2'], 316 | 'table': ['*amundsen*'], 317 | 'column': ['*ds*'], 318 | 'tag': ['test-tag'], 319 | } 320 | } 321 | resp = self.es_proxy.fetch_search_results_with_filter(search_request=search_request, query_term='test') 322 | 323 | self.assertEqual(resp.total_results, expected.total_results) 324 | self.assertIsInstance(resp.results[0], Table) 325 | self.assertDictEqual(vars(resp.results[0]), vars(expected.results[0])) 326 | 327 | def test_search_table_filter_return_no_results_if_no_search_request(self) -> None: 328 | resp = self.es_proxy.fetch_search_results_with_filter(search_request=None, query_term='test') 329 | 330 | self.assertEqual(resp.total_results, 0) 331 | self.assertEqual(resp.results, []) 332 | 333 | def test_search_table_filter_return_no_results_if_dsl_conversion_error(self) -> None: 334 | search_request = { 335 | 'type': 'AND', 336 | 'filters': {} 337 | } 338 | with patch.object(self.es_proxy, 'convert_query_json_to_query_dsl') as mock: 339 | mock.side_effect = MagicMock(side_effect=Exception('Test')) 340 | resp = self.es_proxy.fetch_search_results_with_filter(search_request=search_request, 341 | query_term='test') 342 | 343 | self.assertEqual(resp.total_results, 0) 344 | self.assertEqual(resp.results, []) 345 | 346 | def test_get_model_by_index_table(self) -> None: 347 | self.assertEqual(self.es_proxy.get_model_by_index(TABLE_INDEX), Table) 348 | 349 | def test_get_model_by_index_user(self) -> None: 350 | self.assertEqual(self.es_proxy.get_model_by_index(USER_INDEX), User) 351 | 352 | def test_get_model_by_index_raise_exception(self) -> None: 353 | self.assertRaises(Exception, self.es_proxy.convert_query_json_to_query_dsl, 'some_fake_index') 354 | 355 | def test_parse_filters_return_results(self) -> None: 356 | filter_list = { 357 | 'database': ['hive', 'bigquery'], 358 | 'schema': ['test-schema1', 'test-schema2'], 359 | 'table': ['*amundsen*'], 360 | 'column': ['*ds*'], 361 | 'tag': ['test-tag'], 362 | } 363 | expected_result = "database.raw:(hive OR bigquery) " \ 364 | "AND schema.raw:(test-schema1 OR test-schema2) " \ 365 | "AND name.raw:(*amundsen*) " \ 366 | "AND column_names.raw:(*ds*) " \ 367 | "AND tags:(test-tag)" 368 | self.assertEqual(self.es_proxy.parse_filters(filter_list, 369 | index=TABLE_INDEX), expected_result) 370 | 371 | def test_parse_filters_return_no_results(self) -> None: 372 | filter_list = { 373 | 'unsupported_category': ['fake'] 374 | } 375 | self.assertEqual(self.es_proxy.parse_filters(filter_list, 376 | index=TABLE_INDEX), '') 377 | 378 | def test_validate_wrong_filters_values(self) -> None: 379 | search_request = { 380 | "type": "AND", 381 | "filters": { 382 | "schema": ["test_schema:test_schema"], 383 | "table": ["test/table"] 384 | }, 385 | "query_term": "", 386 | "page_index": 0 387 | } 388 | self.assertEqual(self.es_proxy.validate_filter_values(search_request), False) 389 | 390 | def test_validate_accepted_filters_values(self) -> None: 391 | search_request = { 392 | "type": "AND", 393 | "filters": { 394 | "schema": ["test_schema"], 395 | "table": ["test_table"] 396 | }, 397 | "query_term": "a", 398 | "page_index": 0 399 | } 400 | self.assertEqual(self.es_proxy.validate_filter_values(search_request), True) 401 | 402 | def test_parse_query_term(self) -> None: 403 | term = 'test' 404 | expected_result = "(name:(*test*) OR name:(test) OR schema:(*test*) OR " \ 405 | "schema:(test) OR description:(*test*) OR description:(test) OR " \ 406 | "column_names:(*test*) OR column_names:(test) OR " \ 407 | "column_descriptions:(*test*) OR column_descriptions:(test))" 408 | self.assertEqual(self.es_proxy.parse_query_term(term, 409 | index=TABLE_INDEX), expected_result) 410 | 411 | def test_convert_query_json_to_query_dsl_term_and_filters(self) -> None: 412 | term = 'test' 413 | test_filters = { 414 | 'database': ['hive', 'bigquery'], 415 | 'schema': ['test-schema1', 'test-schema2'], 416 | 'table': ['*amundsen*'], 417 | 'column': ['*ds*'], 418 | 'tag': ['test-tag'], 419 | } 420 | search_request = { 421 | 'type': 'AND', 422 | 'filters': test_filters 423 | } 424 | 425 | expected_result = self.es_proxy.parse_filters(test_filters, index=TABLE_INDEX) + " AND " + \ 426 | self.es_proxy.parse_query_term(term, index=TABLE_INDEX) 427 | ret_result = self.es_proxy.convert_query_json_to_query_dsl(search_request=search_request, 428 | query_term=term, 429 | index=TABLE_INDEX) 430 | self.assertEqual(ret_result, expected_result) 431 | 432 | def test_convert_query_json_to_query_dsl_no_term(self) -> None: 433 | term = '' 434 | test_filters = { 435 | 'database': ['hive', 'bigquery'], 436 | } 437 | search_request = { 438 | 'type': 'AND', 439 | 'filters': test_filters 440 | } 441 | expected_result = self.es_proxy.parse_filters(test_filters, 442 | index=TABLE_INDEX) 443 | ret_result = self.es_proxy.convert_query_json_to_query_dsl(search_request=search_request, 444 | query_term=term, 445 | index=TABLE_INDEX) 446 | self.assertEqual(ret_result, expected_result) 447 | 448 | def test_convert_query_json_to_query_dsl_no_filters(self) -> None: 449 | term = 'test' 450 | search_request = { 451 | 'type': 'AND', 452 | 'filters': {} 453 | } 454 | expected_result = self.es_proxy.parse_query_term(term, 455 | index=TABLE_INDEX) 456 | ret_result = self.es_proxy.convert_query_json_to_query_dsl(search_request=search_request, 457 | query_term=term, 458 | index=TABLE_INDEX) 459 | self.assertEqual(ret_result, expected_result) 460 | 461 | def test_convert_query_json_to_query_dsl_raise_exception_no_term_or_filters(self) -> None: 462 | term = '' 463 | search_request = { 464 | 'type': 'AND', 465 | 'filters': {} 466 | } 467 | self.assertRaises(Exception, self.es_proxy.convert_query_json_to_query_dsl, search_request, term) 468 | 469 | @patch('elasticsearch_dsl.Search.execute') 470 | def test_search_with_one_user_result(self, 471 | mock_search: MagicMock) -> None: 472 | 473 | mock_results = MagicMock() 474 | mock_results.hits.total = 1 475 | mock_results.__iter__.return_value = [Response(result=vars(self.mock_result4))] 476 | mock_search.return_value = mock_results 477 | 478 | expected = SearchResult(total_results=1, 479 | results=[User(full_name='First Last', 480 | first_name='First', 481 | last_name='Last', 482 | team_name='Test team', 483 | email='test@email.com', 484 | github_username='ghub', 485 | manager_email='manager@email.com', 486 | is_active=True, 487 | role_name='swe', 488 | employee_type='FTE')]) 489 | 490 | resp = self.es_proxy.fetch_user_search_results(query_term='test_query_term', 491 | index='user_search_index') 492 | 493 | self.assertEqual(resp.total_results, expected.total_results, 494 | "search result is not of length 1") 495 | self.assertIsInstance(resp.results[0], 496 | User, 497 | "Search result received is not of 'Table' type!") 498 | self.assertDictEqual(vars(resp.results[0]), vars(expected.results[0]), 499 | "Search Result doesn't match with expected result!") 500 | 501 | def test_create_document_with_no_data(self) -> None: 502 | expected = '' 503 | result = self.es_proxy.create_document(data=None, index='table_search_index') 504 | print('result: {}'.format(result)) 505 | self.assertEqual(expected, result) 506 | 507 | @patch('uuid.uuid4') 508 | def test_create_document(self, mock_uuid: MagicMock) -> None: 509 | mock_elasticsearch = self.es_proxy.elasticsearch 510 | new_index_name = 'tester_index_name' 511 | mock_uuid.return_value = new_index_name 512 | mock_elasticsearch.indices.get_alias.return_value = dict([(new_index_name, {})]) 513 | start_data = [ 514 | Table(cluster='blue', column_names=['1', '2'], database='snowflake', 515 | schema='test_schema', description='A table for something', 516 | key='snowflake://blue.test_schema/bank_accounts', 517 | last_updated_timestamp=0, name='bank_accounts', tags=[], badges=self.mock_empty_badge, 518 | column_descriptions=['desc'], schema_description='schema description 1'), 519 | Table(cluster='blue', column_names=['5', '6'], database='snowflake', 520 | schema='test_schema', description='A table for lots of things!', 521 | key='snowflake://blue.test_schema/bitcoin_wallets', 522 | last_updated_timestamp=0, name='bitcoin_wallets', tags=[], badges=self.mock_empty_badge, 523 | schema_description='schema description 2', programmatic_descriptions=["test"]) 524 | ] 525 | expected_data = [ 526 | { 527 | 'index': { 528 | '_index': new_index_name, 529 | '_type': 'table', 530 | '_id': 'snowflake://blue.test_schema/bank_accounts' 531 | } 532 | }, 533 | { 534 | 'cluster': 'blue', 535 | 'column_names': ['1', '2'], 536 | 'column_descriptions': ['desc'], 537 | 'database': 'snowflake', 538 | 'schema': 'test_schema', 539 | 'description': 'A table for something', 540 | 'display_name': None, 541 | 'key': 'snowflake://blue.test_schema/bank_accounts', 542 | 'last_updated_timestamp': 0, 543 | 'name': 'bank_accounts', 544 | 'tags': [], 545 | 'badges': [], 546 | 'total_usage': 0, 547 | 'programmatic_descriptions': [], 548 | 'schema_description': 'schema description 1', 549 | }, 550 | { 551 | 'index': { 552 | '_index': new_index_name, 553 | '_type': 'table', 554 | '_id': 'snowflake://blue.test_schema/bitcoin_wallets' 555 | } 556 | }, 557 | { 558 | 'cluster': 'blue', 559 | 'column_names': ['5', '6'], 560 | 'column_descriptions': [], 561 | 'database': 'snowflake', 562 | 'schema': 'test_schema', 563 | 'description': 'A table for lots of things!', 564 | 'display_name': None, 565 | 'key': 'snowflake://blue.test_schema/bitcoin_wallets', 566 | 'last_updated_timestamp': 0, 567 | 'name': 'bitcoin_wallets', 568 | 'tags': [], 569 | 'badges': [], 570 | 'total_usage': 0, 571 | 'schema_description': 'schema description 2', 572 | 'programmatic_descriptions': ["test"] 573 | } 574 | ] 575 | mock_elasticsearch.bulk.return_value = {'errors': False} 576 | 577 | expected_alias = 'table_search_index' 578 | result = self.es_proxy.create_document(data=start_data, index=expected_alias) 579 | self.assertEqual(expected_alias, result) 580 | mock_elasticsearch.bulk.assert_called_with(expected_data) 581 | 582 | def test_update_document_with_no_data(self) -> None: 583 | expected = '' 584 | result = self.es_proxy.update_document(data=None, index='table_search_index') 585 | self.assertEqual(expected, result) 586 | 587 | @patch('uuid.uuid4') 588 | def test_update_document(self, mock_uuid: MagicMock) -> None: 589 | mock_elasticsearch = self.es_proxy.elasticsearch 590 | new_index_name = 'tester_index_name' 591 | mock_elasticsearch.indices.get_alias.return_value = dict([(new_index_name, {})]) 592 | mock_uuid.return_value = new_index_name 593 | table_key = 'snowflake://blue.test_schema/bitcoin_wallets' 594 | expected_alias = 'table_search_index' 595 | data = [ 596 | Table(cluster='blue', column_names=['5', '6'], database='snowflake', 597 | schema='test_schema', description='A table for lots of things!', 598 | key=table_key, last_updated_timestamp=0, name='bitcoin_wallets', 599 | tags=[], column_descriptions=['hello'], badges=self.mock_empty_badge, 600 | schema_description='schema description 1') 601 | ] 602 | expected_data = [ 603 | { 604 | 'update': { 605 | '_index': new_index_name, 606 | '_type': 'table', 607 | '_id': table_key 608 | } 609 | }, 610 | { 611 | 'doc': { 612 | 'cluster': 'blue', 613 | 'column_names': ['5', '6'], 614 | 'column_descriptions': ['hello'], 615 | 'database': 'snowflake', 616 | 'schema': 'test_schema', 617 | 'description': 'A table for lots of things!', 618 | 'display_name': None, 619 | 'key': table_key, 620 | 'last_updated_timestamp': 0, 621 | 'name': 'bitcoin_wallets', 622 | 'tags': [], 623 | 'badges': [], 624 | 'total_usage': 0, 625 | 'programmatic_descriptions': [], 626 | 'schema_description': 'schema description 1', 627 | } 628 | } 629 | ] 630 | result = self.es_proxy.update_document(data=data, index=expected_alias) 631 | self.assertEqual(expected_alias, result) 632 | mock_elasticsearch.bulk.assert_called_with(expected_data) 633 | 634 | @patch('uuid.uuid4') 635 | def test_delete_table_document(self, mock_uuid: MagicMock) -> None: 636 | mock_elasticsearch = self.es_proxy.elasticsearch 637 | new_index_name = 'tester_index_name' 638 | mock_uuid.return_value = new_index_name 639 | mock_elasticsearch.indices.get_alias.return_value = dict([(new_index_name, {})]) 640 | expected_alias = 'table_search_index' 641 | data = ['id1', 'id2'] 642 | 643 | expected_data = [ 644 | {'delete': {'_index': new_index_name, '_id': 'id1', '_type': 'table'}}, 645 | {'delete': {'_index': new_index_name, '_id': 'id2', '_type': 'table'}} 646 | ] 647 | result = self.es_proxy.delete_document(data=data, index=expected_alias) 648 | 649 | self.assertEqual(expected_alias, result) 650 | mock_elasticsearch.bulk.assert_called_with(expected_data) 651 | 652 | @patch('uuid.uuid4') 653 | def test_delete_user_document(self, mock_uuid: MagicMock) -> None: 654 | mock_elasticsearch = self.es_proxy.elasticsearch 655 | new_index_name = 'tester_index_name' 656 | mock_uuid.return_value = new_index_name 657 | mock_elasticsearch.indices.get_alias.return_value = dict([(new_index_name, {})]) 658 | expected_alias = 'user_search_index' 659 | data = ['id1', 'id2'] 660 | 661 | expected_data = [ 662 | {'delete': {'_index': new_index_name, '_id': 'id1', '_type': 'user'}}, 663 | {'delete': {'_index': new_index_name, '_id': 'id2', '_type': 'user'}} 664 | ] 665 | result = self.es_proxy.delete_document(data=data, index=expected_alias) 666 | 667 | self.assertEqual(expected_alias, result) 668 | mock_elasticsearch.bulk.assert_called_with(expected_data) 669 | 670 | def test_get_instance_string(self) -> None: 671 | result = self.es_proxy._get_instance('column', 'value') 672 | self.assertEqual('value', result) 673 | 674 | def test_get_instance_tag(self) -> None: 675 | result = self.es_proxy._get_instance('tags', ['value']) 676 | tags = [Tag(tag_name='value')] 677 | self.assertEqual(tags, result) 678 | 679 | def test_get_instance_badge(self) -> None: 680 | result = self.es_proxy._get_instance('badges', ['badge1']) 681 | badges = [Tag(tag_name='badge1')] 682 | self.assertEqual(badges, result) 683 | 684 | @patch('search_service.proxy.elasticsearch.ElasticsearchProxy._search_helper') 685 | def test_fetch_dashboard_search_results(self, 686 | mock_search: MagicMock) -> None: 687 | 688 | self.mock_dashboard_result = Dashboard(uri='dashboard_uri', 689 | cluster='gold', 690 | group_name='mode_dashboard_group', 691 | group_url='mode_dashboard_group_url', 692 | product='mode', 693 | name='mode_dashboard', 694 | url='mode_dashboard_url', 695 | description='test_dashboard', 696 | last_successful_run_timestamp=1000) 697 | 698 | mock_search.return_value = SearchResult(total_results=1, 699 | results=[self.mock_dashboard_result]) 700 | 701 | expected = SearchResult(total_results=1, 702 | results=[Dashboard(uri='dashboard_uri', 703 | cluster='gold', 704 | group_name='mode_dashboard_group', 705 | group_url='mode_dashboard_group_url', 706 | product='mode', 707 | name='mode_dashboard', 708 | url='mode_dashboard_url', 709 | description='test_dashboard', 710 | last_successful_run_timestamp=1000)]) 711 | 712 | resp = self.es_proxy.fetch_dashboard_search_results(query_term='test_query_term', 713 | page_index=0, 714 | index='dashboard_search_index') 715 | self.assertEqual(resp.total_results, expected.total_results) 716 | 717 | self.assertDictEqual(vars(resp.results[0]), 718 | vars(expected.results[0]), 719 | "Search result doesn't match with expected result!") 720 | --------------------------------------------------------------------------------