├── lambda ├── search_layer │ └── python │ │ ├── PIL │ │ ├── py.typed │ │ ├── _imaging.pyi │ │ ├── _webp.pyi │ │ ├── _imagingft.pyi │ │ ├── _imagingmath.pyi │ │ ├── _imagingmorph.pyi │ │ ├── _version.py │ │ ├── report.py │ │ ├── __main__.py │ │ ├── _webp.cpython-39-x86_64-linux-gnu.so │ │ ├── _imaging.cpython-39-x86_64-linux-gnu.so │ │ ├── _imagingft.cpython-39-x86_64-linux-gnu.so │ │ ├── _imagingtk.cpython-39-x86_64-linux-gnu.so │ │ ├── _imagingcms.cpython-39-x86_64-linux-gnu.so │ │ ├── _imagingmath.cpython-39-x86_64-linux-gnu.so │ │ ├── _imagingmorph.cpython-39-x86_64-linux-gnu.so │ │ ├── ImagePath.py │ │ ├── _tkinter_finder.py │ │ ├── _typing.py │ │ ├── _util.py │ │ ├── PaletteFile.py │ │ ├── GimpPaletteFile.py │ │ ├── PcdImagePlugin.py │ │ ├── GribStubImagePlugin.py │ │ ├── Hdf5StubImagePlugin.py │ │ ├── BufrStubImagePlugin.py │ │ ├── TarIO.py │ │ ├── PixarImagePlugin.py │ │ ├── CurImagePlugin.py │ │ ├── McIdasImagePlugin.py │ │ ├── _deprecate.py │ │ ├── DcxImagePlugin.py │ │ ├── MpegImagePlugin.py │ │ ├── __init__.py │ │ ├── XVThumbImagePlugin.py │ │ ├── ImageSequence.py │ │ ├── XbmImagePlugin.py │ │ ├── _binary.py │ │ ├── ImageMode.py │ │ ├── MicImagePlugin.py │ │ └── ImtImagePlugin.py │ │ ├── certifi │ │ ├── py.typed │ │ ├── __init__.py │ │ └── __main__.py │ │ ├── idna │ │ ├── py.typed │ │ ├── package_data.py │ │ ├── compat.py │ │ ├── __init__.py │ │ └── intranges.py │ │ ├── opensearchpy │ │ ├── py.typed │ │ ├── plugins │ │ │ └── __init__.py │ │ ├── _async │ │ │ ├── helpers │ │ │ │ ├── __init__.py │ │ │ │ └── test.py │ │ │ ├── plugins │ │ │ │ └── __init__.py │ │ │ ├── __init__.py │ │ │ ├── client │ │ │ │ ├── remote.py │ │ │ │ ├── utils.py │ │ │ │ ├── client.py │ │ │ │ ├── remote_store.py │ │ │ │ ├── plugins.py │ │ │ │ └── features.py │ │ │ ├── compat.py │ │ │ └── _extra_imports.py │ │ ├── _version.py │ │ ├── client │ │ │ ├── remote.py │ │ │ ├── client.py │ │ │ ├── remote_store.py │ │ │ ├── plugins.py │ │ │ └── features.py │ │ ├── connection │ │ │ ├── __init__.py │ │ │ └── pooling.py │ │ ├── helpers │ │ │ ├── errors.py │ │ │ ├── __init__.py │ │ │ ├── response │ │ │ │ └── hit.py │ │ │ └── asyncsigner.py │ │ └── compat.py │ │ ├── charset_normalizer │ │ ├── py.typed │ │ ├── __main__.py │ │ ├── version.py │ │ ├── cli │ │ │ └── __init__.py │ │ ├── md.cpython-38-x86_64-linux-gnu.so │ │ ├── md__mypyc.cpython-38-x86_64-linux-gnu.so │ │ ├── __init__.py │ │ └── legacy.py │ │ ├── urllib3 │ │ ├── contrib │ │ │ ├── __init__.py │ │ │ ├── _securetransport │ │ │ │ └── __init__.py │ │ │ └── _appengine_environ.py │ │ ├── packages │ │ │ ├── __init__.py │ │ │ └── backports │ │ │ │ ├── __init__.py │ │ │ │ └── makefile.py │ │ ├── _version.py │ │ ├── util │ │ │ ├── queue.py │ │ │ ├── __init__.py │ │ │ └── proxy.py │ │ └── filepost.py │ │ ├── idna-3.6.dist-info │ │ ├── INSTALLER │ │ ├── WHEEL │ │ ├── RECORD │ │ └── LICENSE.md │ │ ├── pillow-10.3.0.dist-info │ │ ├── REQUESTED │ │ ├── INSTALLER │ │ ├── zip-safe │ │ ├── top_level.txt │ │ └── WHEEL │ │ ├── opensearch_py-2.5.0.dist-info │ │ ├── REQUESTED │ │ ├── INSTALLER │ │ ├── top_level.txt │ │ ├── WHEEL │ │ ├── AUTHORS │ │ └── NOTICE.txt │ │ ├── six-1.16.0.dist-info │ │ ├── INSTALLER │ │ ├── top_level.txt │ │ ├── WHEEL │ │ ├── RECORD │ │ ├── LICENSE │ │ └── METADATA │ │ ├── certifi-2024.2.2.dist-info │ │ ├── INSTALLER │ │ ├── top_level.txt │ │ ├── WHEEL │ │ ├── RECORD │ │ ├── LICENSE │ │ └── METADATA │ │ ├── requests-2.31.0.dist-info │ │ ├── INSTALLER │ │ ├── top_level.txt │ │ ├── WHEEL │ │ └── RECORD │ │ ├── urllib3-1.26.18.dist-info │ │ ├── INSTALLER │ │ ├── top_level.txt │ │ ├── WHEEL │ │ └── LICENSE.txt │ │ ├── python_dateutil-2.9.0.post0.dist-info │ │ ├── zip-safe │ │ ├── INSTALLER │ │ ├── top_level.txt │ │ ├── WHEEL │ │ ├── LICENSE │ │ └── RECORD │ │ ├── charset_normalizer-3.3.2.dist-info │ │ ├── INSTALLER │ │ ├── top_level.txt │ │ ├── entry_points.txt │ │ ├── WHEEL │ │ ├── LICENSE │ │ └── RECORD │ │ ├── dateutil │ │ ├── tzwin.py │ │ ├── zoneinfo │ │ │ ├── dateutil-zoneinfo.tar.gz │ │ │ └── rebuild.py │ │ ├── _version.py │ │ ├── tz │ │ │ ├── __init__.py │ │ │ └── _factories.py │ │ ├── __init__.py │ │ ├── _common.py │ │ ├── parser │ │ │ └── __init__.py │ │ ├── utils.py │ │ └── easter.py │ │ ├── pillow.libs │ │ ├── libXau-00ec42fe.so.6.0.0 │ │ ├── libjpeg-46d5cb53.so.62.4.0 │ │ ├── liblzma-d1e41b3a.so.5.4.5 │ │ ├── libtiff-f683b479.so.6.0.2 │ │ ├── libwebp-8a0843dd.so.7.1.8 │ │ ├── libxcb-ac5351d8.so.1.1.0 │ │ ├── liblcms2-8d000061.so.2.0.16 │ │ ├── libopenjp2-d64ae697.so.2.5.2 │ │ ├── libpng16-8b7e0db9.so.16.43.0 │ │ ├── libbrotlidec-6c4e80e7.so.1.1.0 │ │ ├── libfreetype-e3eca772.so.6.20.1 │ │ ├── libsharpyuv-652b6057.so.0.0.1 │ │ ├── libwebpdemux-f9b98349.so.2.0.14 │ │ ├── libwebpmux-b067bc14.so.3.0.13 │ │ ├── libbrotlicommon-c43ca8d5.so.1.1.0 │ │ └── libharfbuzz-59c4b977.so.0.60840.0 │ │ ├── bin │ │ └── normalizer │ │ ├── requests │ │ ├── __version__.py │ │ ├── certs.py │ │ ├── hooks.py │ │ ├── packages.py │ │ ├── _internal_utils.py │ │ └── compat.py │ │ └── model.py ├── personalize_ranking │ └── lambda_function.py └── bedrock_invoke │ └── lambda_function.py ├── deployment ├── package.json ├── requirements.txt ├── package-lock.json ├── cdk.json ├── app.py ├── Resource_deployment_README.md └── lib │ └── ss_search_notebook.py ├── assets ├── ec2-1.png ├── ec2-2.png ├── ec2-3.png ├── ec2-4.png ├── ec2-5.png ├── ec2-6.png ├── ec2-7.png ├── ec2-8.png ├── data-load.png ├── text-search.png ├── architecture.png ├── data-load-ui.png ├── image-search.png ├── text-search-ui.png ├── image-search-ui.png ├── search_pipeline.png ├── text_search_demo.png └── image_search_demo.png ├── model ├── clip_classification_deploy │ ├── clip │ │ ├── requirements.txt │ │ └── serving.properties │ └── test-image-clip.jpeg └── clip_image_embedding_deploy │ ├── clip │ ├── requirements.txt │ └── serving.properties │ └── test-image-clip.jpeg ├── .gitignore ├── CODE_OF_CONDUCT.md ├── web_ui ├── aws_search_demo.py └── UI_deployment_README.md ├── LICENSE └── data_load_offline └── embeddings.py /lambda/search_layer/python/PIL/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/search_layer/python/certifi/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/search_layer/python/idna/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3/contrib/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3/packages/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/search_layer/python/idna-3.6.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow-10.3.0.dist-info/REQUESTED: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearch_py-2.5.0.dist-info/REQUESTED: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow-10.3.0.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow-10.3.0.dist-info/zip-safe: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/six-1.16.0.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3/packages/backports/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/search_layer/python/certifi-2024.2.2.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow-10.3.0.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | PIL 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/requests-2.31.0.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/six-1.16.0.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | six 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3-1.26.18.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3/contrib/_securetransport/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/search_layer/python/idna/package_data.py: -------------------------------------------------------------------------------- 1 | __version__ = '3.6' 2 | 3 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearch_py-2.5.0.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/python_dateutil-2.9.0.post0.dist-info/zip-safe: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3-1.26.18.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | urllib3 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/certifi-2024.2.2.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | certifi 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer-3.3.2.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/python_dateutil-2.9.0.post0.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/requests-2.31.0.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | requests 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearch_py-2.5.0.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | opensearchpy 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/python_dateutil-2.9.0.post0.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | dateutil 2 | -------------------------------------------------------------------------------- /deployment/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "aws-cdk": "^2.75.0" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer-3.3.2.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | charset_normalizer 2 | -------------------------------------------------------------------------------- /lambda/search_layer/python/dateutil/tzwin.py: -------------------------------------------------------------------------------- 1 | # tzwin has moved to dateutil.tz.win 2 | from .tz.win import * 3 | -------------------------------------------------------------------------------- /assets/ec2-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/ec2-1.png -------------------------------------------------------------------------------- /assets/ec2-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/ec2-2.png -------------------------------------------------------------------------------- /assets/ec2-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/ec2-3.png -------------------------------------------------------------------------------- /assets/ec2-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/ec2-4.png -------------------------------------------------------------------------------- /assets/ec2-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/ec2-5.png -------------------------------------------------------------------------------- /assets/ec2-6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/ec2-6.png -------------------------------------------------------------------------------- /assets/ec2-7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/ec2-7.png -------------------------------------------------------------------------------- /assets/ec2-8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/ec2-8.png -------------------------------------------------------------------------------- /deployment/requirements.txt: -------------------------------------------------------------------------------- 1 | aws-cdk-lib 2 | constructs>=10.0.0,<11.0.0 3 | sagemaker 4 | requests==2.28.2 5 | pandas -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_imaging.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | def __getattr__(name: str) -> Any: ... 4 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_webp.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | def __getattr__(name: str) -> Any: ... 4 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3/_version.py: -------------------------------------------------------------------------------- 1 | # This file is protected via CODEOWNERS 2 | __version__ = "1.26.18" 3 | -------------------------------------------------------------------------------- /assets/data-load.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/data-load.png -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_imagingft.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | def __getattr__(name: str) -> Any: ... 4 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_imagingmath.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | def __getattr__(name: str) -> Any: ... 4 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_imagingmorph.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | def __getattr__(name: str) -> Any: ... 4 | -------------------------------------------------------------------------------- /assets/text-search.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/text-search.png -------------------------------------------------------------------------------- /model/clip_classification_deploy/clip/requirements.txt: -------------------------------------------------------------------------------- 1 | Pillow 2 | requests 3 | safetensors 4 | tqdm 5 | transformers>=4.27.1 6 | -------------------------------------------------------------------------------- /assets/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/architecture.png -------------------------------------------------------------------------------- /assets/data-load-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/data-load-ui.png -------------------------------------------------------------------------------- /assets/image-search.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/image-search.png -------------------------------------------------------------------------------- /assets/text-search-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/text-search-ui.png -------------------------------------------------------------------------------- /model/clip_image_embedding_deploy/clip/requirements.txt: -------------------------------------------------------------------------------- 1 | Pillow 2 | requests 3 | safetensors 4 | tqdm 5 | transformers>=4.27.1 6 | -------------------------------------------------------------------------------- /assets/image-search-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/image-search-ui.png -------------------------------------------------------------------------------- /assets/search_pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/search_pipeline.png -------------------------------------------------------------------------------- /assets/text_search_demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/text_search_demo.png -------------------------------------------------------------------------------- /assets/image_search_demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/assets/image_search_demo.png -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_version.py: -------------------------------------------------------------------------------- 1 | # Master version for Pillow 2 | from __future__ import annotations 3 | 4 | __version__ = "10.3.0" 5 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer/__main__.py: -------------------------------------------------------------------------------- 1 | from .cli import cli_detect 2 | 3 | if __name__ == "__main__": 4 | cli_detect() 5 | -------------------------------------------------------------------------------- /lambda/search_layer/python/idna-3.6.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: flit 3.9.0 3 | Root-Is-Purelib: true 4 | Tag: py3-none-any 5 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer-3.3.2.dist-info/entry_points.txt: -------------------------------------------------------------------------------- 1 | [console_scripts] 2 | normalizer = charset_normalizer.cli:cli_detect 3 | -------------------------------------------------------------------------------- /lambda/search_layer/python/certifi/__init__.py: -------------------------------------------------------------------------------- 1 | from .core import contents, where 2 | 3 | __all__ = ["contents", "where"] 4 | __version__ = "2024.02.02" 5 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/report.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from .features import pilinfo 4 | 5 | pilinfo(supported_formats=False) 6 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer/version.py: -------------------------------------------------------------------------------- 1 | """ 2 | Expose version 3 | """ 4 | 5 | __version__ = "3.3.2" 6 | VERSION = __version__.split(".") 7 | -------------------------------------------------------------------------------- /lambda/search_layer/python/certifi-2024.2.2.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.42.0) 3 | Root-Is-Purelib: true 4 | Tag: py3-none-any 5 | 6 | -------------------------------------------------------------------------------- /lambda/search_layer/python/requests-2.31.0.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.40.0) 3 | Root-Is-Purelib: true 4 | Tag: py3-none-any 5 | 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .~*.py 2 | /lambda/search_layer/__pycache__/ 3 | *.DS_Store 4 | deployment/cdk.out 5 | deployment/node_module 6 | deployment/lib/__pycache__ 7 | *.pyc 8 | .idea/ 9 | -------------------------------------------------------------------------------- /model/clip_classification_deploy/clip/serving.properties: -------------------------------------------------------------------------------- 1 | engine = Python 2 | option.tensor_parallel_degree = 1 3 | option.model_id = s3://sagemaker-us-east-1-631450739534/model_clip/ -------------------------------------------------------------------------------- /model/clip_image_embedding_deploy/clip/serving.properties: -------------------------------------------------------------------------------- 1 | engine = Python 2 | option.tensor_parallel_degree = 1 3 | option.model_id = s3://sagemaker-us-east-1-631450739534/model_clip/ -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer/cli/__init__.py: -------------------------------------------------------------------------------- 1 | from .__main__ import cli_detect, query_yes_no 2 | 3 | __all__ = ( 4 | "cli_detect", 5 | "query_yes_no", 6 | ) 7 | -------------------------------------------------------------------------------- /lambda/search_layer/python/six-1.16.0.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.36.2) 3 | Root-Is-Purelib: true 4 | Tag: py2-none-any 5 | Tag: py3-none-any 6 | 7 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3-1.26.18.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.41.2) 3 | Root-Is-Purelib: true 4 | Tag: py2-none-any 5 | Tag: py3-none-any 6 | 7 | -------------------------------------------------------------------------------- /model/clip_classification_deploy/test-image-clip.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/model/clip_classification_deploy/test-image-clip.jpeg -------------------------------------------------------------------------------- /model/clip_image_embedding_deploy/test-image-clip.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/model/clip_image_embedding_deploy/test-image-clip.jpeg -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearch_py-2.5.0.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.43.0) 3 | Root-Is-Purelib: true 4 | Tag: py2-none-any 5 | Tag: py3-none-any 6 | 7 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/__main__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | 5 | from .features import pilinfo 6 | 7 | pilinfo(supported_formats="--report" not in sys.argv) 8 | -------------------------------------------------------------------------------- /lambda/search_layer/python/python_dateutil-2.9.0.post0.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.42.0) 3 | Root-Is-Purelib: true 4 | Tag: py2-none-any 5 | Tag: py3-none-any 6 | 7 | -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libXau-00ec42fe.so.6.0.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libXau-00ec42fe.so.6.0.0 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libjpeg-46d5cb53.so.62.4.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libjpeg-46d5cb53.so.62.4.0 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/liblzma-d1e41b3a.so.5.4.5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/liblzma-d1e41b3a.so.5.4.5 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libtiff-f683b479.so.6.0.2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libtiff-f683b479.so.6.0.2 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libwebp-8a0843dd.so.7.1.8: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libwebp-8a0843dd.so.7.1.8 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libxcb-ac5351d8.so.1.1.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libxcb-ac5351d8.so.1.1.0 -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_webp.cpython-39-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/PIL/_webp.cpython-39-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearch_py-2.5.0.dist-info/AUTHORS: -------------------------------------------------------------------------------- 1 | For a list of all our amazing authors please see the contributors page: 2 | https://github.com/opensearch-project/opensearch-py/graphs/contributors 3 | -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/liblcms2-8d000061.so.2.0.16: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/liblcms2-8d000061.so.2.0.16 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libopenjp2-d64ae697.so.2.5.2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libopenjp2-d64ae697.so.2.5.2 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libpng16-8b7e0db9.so.16.43.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libpng16-8b7e0db9.so.16.43.0 -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_imaging.cpython-39-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/PIL/_imaging.cpython-39-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /lambda/search_layer/python/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libbrotlidec-6c4e80e7.so.1.1.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libbrotlidec-6c4e80e7.so.1.1.0 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libfreetype-e3eca772.so.6.20.1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libfreetype-e3eca772.so.6.20.1 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libsharpyuv-652b6057.so.0.0.1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libsharpyuv-652b6057.so.0.0.1 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libwebpdemux-f9b98349.so.2.0.14: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libwebpdemux-f9b98349.so.2.0.14 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libwebpmux-b067bc14.so.3.0.13: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libwebpmux-b067bc14.so.3.0.13 -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_imagingft.cpython-39-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/PIL/_imagingft.cpython-39-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_imagingtk.cpython-39-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/PIL/_imagingtk.cpython-39-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow-10.3.0.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.43.0) 3 | Root-Is-Purelib: false 4 | Tag: cp39-cp39-manylinux_2_17_x86_64 5 | Tag: cp39-cp39-manylinux2014_x86_64 6 | 7 | -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libbrotlicommon-c43ca8d5.so.1.1.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libbrotlicommon-c43ca8d5.so.1.1.0 -------------------------------------------------------------------------------- /lambda/search_layer/python/pillow.libs/libharfbuzz-59c4b977.so.0.60840.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/pillow.libs/libharfbuzz-59c4b977.so.0.60840.0 -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_imagingcms.cpython-39-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/PIL/_imagingcms.cpython-39-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_imagingmath.cpython-39-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/PIL/_imagingmath.cpython-39-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_imagingmorph.cpython-39-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/PIL/_imagingmorph.cpython-39-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /lambda/search_layer/python/dateutil/_version.py: -------------------------------------------------------------------------------- 1 | # file generated by setuptools_scm 2 | # don't change, don't track in version control 3 | __version__ = version = '2.9.0.post0' 4 | __version_tuple__ = version_tuple = (2, 9, 0) 5 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer-3.3.2.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.41.2) 3 | Root-Is-Purelib: false 4 | Tag: cp38-cp38-manylinux_2_17_x86_64 5 | Tag: cp38-cp38-manylinux2014_x86_64 6 | 7 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer/md.cpython-38-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/charset_normalizer/md.cpython-38-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer/md__mypyc.cpython-38-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/intelligent-product-search-with-llm/HEAD/lambda/search_layer/python/charset_normalizer/md__mypyc.cpython-38-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /lambda/search_layer/python/bin/normalizer: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # -*- coding: utf-8 -*- 3 | import re 4 | import sys 5 | from charset_normalizer.cli import cli_detect 6 | if __name__ == '__main__': 7 | sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) 8 | sys.exit(cli_detect()) 9 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /lambda/search_layer/python/certifi/__main__.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | from certifi import contents, where 4 | 5 | parser = argparse.ArgumentParser() 6 | parser.add_argument("-c", "--contents", action="store_true") 7 | args = parser.parse_args() 8 | 9 | if args.contents: 10 | print(contents()) 11 | else: 12 | print(where()) 13 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearch_py-2.5.0.dist-info/NOTICE.txt: -------------------------------------------------------------------------------- 1 | OpenSearch (https://opensearch.org/) 2 | Copyright OpenSearch Contributors 3 | 4 | This product includes software developed by 5 | Elasticsearch (http://www.elastic.co). 6 | 7 | This product includes software developed by The Apache Software 8 | Foundation (http://www.apache.org/). 9 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/plugins/__init__.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/plugins/__init__.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | -------------------------------------------------------------------------------- /lambda/search_layer/python/idna/compat.py: -------------------------------------------------------------------------------- 1 | from .core import * 2 | from .codec import * 3 | from typing import Any, Union 4 | 5 | def ToASCII(label: str) -> bytes: 6 | return encode(label) 7 | 8 | def ToUnicode(label: Union[bytes, bytearray]) -> str: 9 | return decode(label) 10 | 11 | def nameprep(s: Any) -> None: 12 | raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') 13 | 14 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/ImagePath.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library 3 | # $Id$ 4 | # 5 | # path interface 6 | # 7 | # History: 8 | # 1996-11-04 fl Created 9 | # 2002-04-14 fl Added documentation stub class 10 | # 11 | # Copyright (c) Secret Labs AB 1997. 12 | # Copyright (c) Fredrik Lundh 1996. 13 | # 14 | # See the README file for information on usage and redistribution. 15 | # 16 | from __future__ import annotations 17 | 18 | from . import Image 19 | 20 | Path = Image.core.path 21 | -------------------------------------------------------------------------------- /lambda/search_layer/python/dateutil/tz/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .tz import * 3 | from .tz import __doc__ 4 | 5 | __all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", 6 | "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz", 7 | "enfold", "datetime_ambiguous", "datetime_exists", 8 | "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"] 9 | 10 | 11 | class DeprecatedTzFormatWarning(Warning): 12 | """Warning raised when time zones are parsed from deprecated formats.""" 13 | -------------------------------------------------------------------------------- /lambda/search_layer/python/requests/__version__.py: -------------------------------------------------------------------------------- 1 | # .-. .-. .-. . . .-. .-. .-. .-. 2 | # |( |- |.| | | |- `-. | `-. 3 | # ' ' `-' `-`.`-' `-' `-' ' `-' 4 | 5 | __title__ = "requests" 6 | __description__ = "Python HTTP for Humans." 7 | __url__ = "https://requests.readthedocs.io" 8 | __version__ = "2.31.0" 9 | __build__ = 0x023100 10 | __author__ = "Kenneth Reitz" 11 | __author_email__ = "me@kennethreitz.org" 12 | __license__ = "Apache 2.0" 13 | __copyright__ = "Copyright Kenneth Reitz" 14 | __cake__ = "\u2728 \U0001f370 \u2728" 15 | -------------------------------------------------------------------------------- /lambda/search_layer/python/requests/certs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | requests.certs 5 | ~~~~~~~~~~~~~~ 6 | 7 | This module returns the preferred default CA certificate bundle. There is 8 | only one — the one from the certifi package. 9 | 10 | If you are packaging Requests, e.g., for a Linux distribution or a managed 11 | environment, you can change the definition of where() to return a separately 12 | packaged CA bundle. 13 | """ 14 | from certifi import where 15 | 16 | if __name__ == "__main__": 17 | print(where()) 18 | -------------------------------------------------------------------------------- /lambda/search_layer/python/six-1.16.0.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | __pycache__/six.cpython-38.pyc,, 2 | six-1.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 3 | six-1.16.0.dist-info/LICENSE,sha256=i7hQxWWqOJ_cFvOkaWWtI9gq3_YPI5P8J2K2MYXo5sk,1066 4 | six-1.16.0.dist-info/METADATA,sha256=VQcGIFCAEmfZcl77E5riPCN4v2TIsc_qtacnjxKHJoI,1795 5 | six-1.16.0.dist-info/RECORD,, 6 | six-1.16.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 7 | six-1.16.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 8 | six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549 9 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3/util/queue.py: -------------------------------------------------------------------------------- 1 | import collections 2 | 3 | from ..packages import six 4 | from ..packages.six.moves import queue 5 | 6 | if six.PY2: 7 | # Queue is imported for side effects on MS Windows. See issue #229. 8 | import Queue as _unused_module_Queue # noqa: F401 9 | 10 | 11 | class LifoQueue(queue.Queue): 12 | def _init(self, _): 13 | self.queue = collections.deque() 14 | 15 | def _qsize(self, len=len): 16 | return len(self.queue) 17 | 18 | def _put(self, item): 19 | self.queue.append(item) 20 | 21 | def _get(self): 22 | return self.queue.pop() 23 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_tkinter_finder.py: -------------------------------------------------------------------------------- 1 | """ Find compiled module linking to Tcl / Tk libraries 2 | """ 3 | 4 | from __future__ import annotations 5 | 6 | import sys 7 | import tkinter 8 | 9 | tk = getattr(tkinter, "_tkinter") 10 | 11 | try: 12 | if hasattr(sys, "pypy_find_executable"): 13 | TKINTER_LIB = tk.tklib_cffi.__file__ 14 | else: 15 | TKINTER_LIB = tk.__file__ 16 | except AttributeError: 17 | # _tkinter may be compiled directly into Python, in which case __file__ is 18 | # not available. load_tkinter_funcs will check the binary first in any case. 19 | TKINTER_LIB = None 20 | 21 | tk_version = str(tkinter.TkVersion) 22 | -------------------------------------------------------------------------------- /lambda/search_layer/python/dateutil/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import sys 3 | 4 | try: 5 | from ._version import version as __version__ 6 | except ImportError: 7 | __version__ = 'unknown' 8 | 9 | __all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', 10 | 'utils', 'zoneinfo'] 11 | 12 | def __getattr__(name): 13 | import importlib 14 | 15 | if name in __all__: 16 | return importlib.import_module("." + name, __name__) 17 | raise AttributeError( 18 | "module {!r} has not attribute {!r}".format(__name__, name) 19 | ) 20 | 21 | 22 | def __dir__(): 23 | # __dir__ should include all the lazy-importable modules as well. 24 | return [x for x in globals() if x not in sys.modules] + __all__ 25 | -------------------------------------------------------------------------------- /web_ui/aws_search_demo.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | 3 | st.set_page_config( 4 | page_title = 'aws search solution' 5 | ) 6 | 7 | st.write('# AWS Product Search Solution') 8 | 9 | st.markdown( 10 | """ 11 | AWS Product Search Solution is a solution for improving e-commerce product search results. Its main features include: 12 | ### product search 13 | - Text search: Search by matching the query text entered by the user with the product description text 14 | - Vector search: Search by calculating the vector similarity between the query text vector input by the user and the product description text vector. 15 | - Hybrid search: supports both text search and vector search 16 | 17 | ### Image search 18 | - Vector search: Search by calculating the vector similarity between the user input image vector and the product image vector 19 | 20 | 21 | """ 22 | ) 23 | -------------------------------------------------------------------------------- /lambda/search_layer/python/requests/hooks.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests.hooks 3 | ~~~~~~~~~~~~~~ 4 | 5 | This module provides the capabilities for the Requests hooks system. 6 | 7 | Available hooks: 8 | 9 | ``response``: 10 | The response generated from a Request. 11 | """ 12 | HOOKS = ["response"] 13 | 14 | 15 | def default_hooks(): 16 | return {event: [] for event in HOOKS} 17 | 18 | 19 | # TODO: response is the only one 20 | 21 | 22 | def dispatch_hook(key, hooks, hook_data, **kwargs): 23 | """Dispatches a hook dictionary on a given piece of data.""" 24 | hooks = hooks or {} 25 | hooks = hooks.get(key) 26 | if hooks: 27 | if hasattr(hooks, "__call__"): 28 | hooks = [hooks] 29 | for hook in hooks: 30 | _hook_data = hook(hook_data, **kwargs) 31 | if _hook_data is not None: 32 | hook_data = _hook_data 33 | return hook_data 34 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_typing.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import sys 5 | from typing import Protocol, Sequence, TypeVar, Union 6 | 7 | if sys.version_info >= (3, 10): 8 | from typing import TypeGuard 9 | else: 10 | try: 11 | from typing_extensions import TypeGuard 12 | except ImportError: 13 | from typing import Any 14 | 15 | class TypeGuard: # type: ignore[no-redef] 16 | def __class_getitem__(cls, item: Any) -> type[bool]: 17 | return bool 18 | 19 | 20 | Coords = Union[Sequence[float], Sequence[Sequence[float]]] 21 | 22 | 23 | _T_co = TypeVar("_T_co", covariant=True) 24 | 25 | 26 | class SupportsRead(Protocol[_T_co]): 27 | def read(self, __length: int = ...) -> _T_co: ... 28 | 29 | 30 | StrOrBytesPath = Union[str, bytes, "os.PathLike[str]", "os.PathLike[bytes]"] 31 | 32 | 33 | __all__ = ["TypeGuard", "StrOrBytesPath", "SupportsRead"] 34 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_util.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | from typing import Any, NoReturn 5 | 6 | from ._typing import StrOrBytesPath, TypeGuard 7 | 8 | 9 | def is_path(f: Any) -> TypeGuard[StrOrBytesPath]: 10 | return isinstance(f, (bytes, str, os.PathLike)) 11 | 12 | 13 | def is_directory(f: Any) -> TypeGuard[StrOrBytesPath]: 14 | """Checks if an object is a string, and that it points to a directory.""" 15 | return is_path(f) and os.path.isdir(f) 16 | 17 | 18 | class DeferredError: 19 | def __init__(self, ex: BaseException): 20 | self.ex = ex 21 | 22 | def __getattr__(self, elt: str) -> NoReturn: 23 | raise self.ex 24 | 25 | @staticmethod 26 | def new(ex: BaseException) -> Any: 27 | """ 28 | Creates an object that raises the wrapped exception ``ex`` when used, 29 | and casts it to :py:obj:`~typing.Any` type. 30 | """ 31 | return DeferredError(ex) 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT No Attribution 2 | 3 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 12 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 13 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 14 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 15 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 16 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | -------------------------------------------------------------------------------- /lambda/search_layer/python/idna/__init__.py: -------------------------------------------------------------------------------- 1 | from .package_data import __version__ 2 | from .core import ( 3 | IDNABidiError, 4 | IDNAError, 5 | InvalidCodepoint, 6 | InvalidCodepointContext, 7 | alabel, 8 | check_bidi, 9 | check_hyphen_ok, 10 | check_initial_combiner, 11 | check_label, 12 | check_nfc, 13 | decode, 14 | encode, 15 | ulabel, 16 | uts46_remap, 17 | valid_contextj, 18 | valid_contexto, 19 | valid_label_length, 20 | valid_string_length, 21 | ) 22 | from .intranges import intranges_contain 23 | 24 | __all__ = [ 25 | "IDNABidiError", 26 | "IDNAError", 27 | "InvalidCodepoint", 28 | "InvalidCodepointContext", 29 | "alabel", 30 | "check_bidi", 31 | "check_hyphen_ok", 32 | "check_initial_combiner", 33 | "check_label", 34 | "check_nfc", 35 | "decode", 36 | "encode", 37 | "intranges_contain", 38 | "ulabel", 39 | "uts46_remap", 40 | "valid_contextj", 41 | "valid_contexto", 42 | "valid_label_length", 43 | "valid_string_length", 44 | ] 45 | -------------------------------------------------------------------------------- /lambda/search_layer/python/certifi-2024.2.2.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | certifi-2024.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 2 | certifi-2024.2.2.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 3 | certifi-2024.2.2.dist-info/METADATA,sha256=1noreLRChpOgeSj0uJT1mehiBl8ngh33Guc7KdvzYYM,2170 4 | certifi-2024.2.2.dist-info/RECORD,, 5 | certifi-2024.2.2.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 6 | certifi-2024.2.2.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 7 | certifi/__init__.py,sha256=ljtEx-EmmPpTe2SOd5Kzsujm_lUD0fKJVnE9gzce320,94 8 | certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 9 | certifi/__pycache__/__init__.cpython-38.pyc,, 10 | certifi/__pycache__/__main__.cpython-38.pyc,, 11 | certifi/__pycache__/core.cpython-38.pyc,, 12 | certifi/cacert.pem,sha256=ejR8qP724p-CtuR4U1WmY1wX-nVeCUD2XxWqj8e9f5I,292541 13 | certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426 14 | certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 15 | -------------------------------------------------------------------------------- /lambda/search_layer/python/requests/packages.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | try: 4 | import chardet 5 | except ImportError: 6 | import warnings 7 | 8 | import charset_normalizer as chardet 9 | 10 | warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer") 11 | 12 | # This code exists for backwards compatibility reasons. 13 | # I don't like it either. Just look the other way. :) 14 | 15 | for package in ("urllib3", "idna"): 16 | locals()[package] = __import__(package) 17 | # This traversal is apparently necessary such that the identities are 18 | # preserved (requests.packages.urllib3.* is urllib3.*) 19 | for mod in list(sys.modules): 20 | if mod == package or mod.startswith(f"{package}."): 21 | sys.modules[f"requests.packages.{mod}"] = sys.modules[mod] 22 | 23 | target = chardet.__name__ 24 | for mod in list(sys.modules): 25 | if mod == target or mod.startswith(f"{target}."): 26 | target = target.replace(target, "chardet") 27 | sys.modules[f"requests.packages.{target}"] = sys.modules[mod] 28 | # Kinda cool, though, right? 29 | -------------------------------------------------------------------------------- /lambda/search_layer/python/certifi-2024.2.2.dist-info/LICENSE: -------------------------------------------------------------------------------- 1 | This package contains a modified version of ca-bundle.crt: 2 | 3 | ca-bundle.crt -- Bundle of CA Root Certificates 4 | 5 | This is a bundle of X.509 certificates of public Certificate Authorities 6 | (CA). These were automatically extracted from Mozilla's root certificates 7 | file (certdata.txt). This file can be found in the mozilla source tree: 8 | https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt 9 | It contains the certificates in PEM format and therefore 10 | can be directly used with curl / libcurl / php_curl, or with 11 | an Apache+mod_ssl webserver for SSL client authentication. 12 | Just configure this file as the SSLCACertificateFile.# 13 | 14 | ***** BEGIN LICENSE BLOCK ***** 15 | This Source Code Form is subject to the terms of the Mozilla Public License, 16 | v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain 17 | one at http://mozilla.org/MPL/2.0/. 18 | 19 | ***** END LICENSE BLOCK ***** 20 | @(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ 21 | -------------------------------------------------------------------------------- /lambda/search_layer/python/dateutil/_common.py: -------------------------------------------------------------------------------- 1 | """ 2 | Common code used in multiple modules. 3 | """ 4 | 5 | 6 | class weekday(object): 7 | __slots__ = ["weekday", "n"] 8 | 9 | def __init__(self, weekday, n=None): 10 | self.weekday = weekday 11 | self.n = n 12 | 13 | def __call__(self, n): 14 | if n == self.n: 15 | return self 16 | else: 17 | return self.__class__(self.weekday, n) 18 | 19 | def __eq__(self, other): 20 | try: 21 | if self.weekday != other.weekday or self.n != other.n: 22 | return False 23 | except AttributeError: 24 | return False 25 | return True 26 | 27 | def __hash__(self): 28 | return hash(( 29 | self.weekday, 30 | self.n, 31 | )) 32 | 33 | def __ne__(self, other): 34 | return not (self == other) 35 | 36 | def __repr__(self): 37 | s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] 38 | if not self.n: 39 | return s 40 | else: 41 | return "%s(%+d)" % (s, self.n) 42 | 43 | # vim:ts=4:sw=4:et 44 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3/contrib/_appengine_environ.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides means to detect the App Engine environment. 3 | """ 4 | 5 | import os 6 | 7 | 8 | def is_appengine(): 9 | return is_local_appengine() or is_prod_appengine() 10 | 11 | 12 | def is_appengine_sandbox(): 13 | """Reports if the app is running in the first generation sandbox. 14 | 15 | The second generation runtimes are technically still in a sandbox, but it 16 | is much less restrictive, so generally you shouldn't need to check for it. 17 | see https://cloud.google.com/appengine/docs/standard/runtimes 18 | """ 19 | return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27" 20 | 21 | 22 | def is_local_appengine(): 23 | return "APPENGINE_RUNTIME" in os.environ and os.environ.get( 24 | "SERVER_SOFTWARE", "" 25 | ).startswith("Development/") 26 | 27 | 28 | def is_prod_appengine(): 29 | return "APPENGINE_RUNTIME" in os.environ and os.environ.get( 30 | "SERVER_SOFTWARE", "" 31 | ).startswith("Google App Engine/") 32 | 33 | 34 | def is_prod_appengine_mvms(): 35 | """Deprecated.""" 36 | return False 37 | -------------------------------------------------------------------------------- /lambda/search_layer/python/six-1.16.0.dist-info/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2010-2020 Benjamin Peterson 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so, 8 | subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 15 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 16 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 17 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 18 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 19 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer-3.3.2.dist-info/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 TAHRI Ahmed R. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/__init__.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | -------------------------------------------------------------------------------- /deployment/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "deployment", 3 | "lockfileVersion": 3, 4 | "requires": true, 5 | "packages": { 6 | "": { 7 | "dependencies": { 8 | "aws-cdk": "^2.75.0" 9 | } 10 | }, 11 | "node_modules/aws-cdk": { 12 | "version": "2.75.0", 13 | "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.75.0.tgz", 14 | "integrity": "sha512-BkyWNpYZz66Ewoi7rBPYZnW+0BAKbWYawhQ1v7KQWmGB0cFlQmvIfoOFklF5EOyAKOltUVRQF6KJf1/AIedkmg==", 15 | "bin": { 16 | "cdk": "bin/cdk" 17 | }, 18 | "engines": { 19 | "node": ">= 14.15.0" 20 | }, 21 | "optionalDependencies": { 22 | "fsevents": "2.3.2" 23 | } 24 | }, 25 | "node_modules/fsevents": { 26 | "version": "2.3.2", 27 | "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", 28 | "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", 29 | "hasInstallScript": true, 30 | "optional": true, 31 | "os": [ 32 | "darwin" 33 | ], 34 | "engines": { 35 | "node": "^8.16.0 || ^10.6.0 || >=11.0.0" 36 | } 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_version.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | __versionstr__: str = "2.5.0" 28 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3-1.26.18.dist-info/LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /deployment/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "python3 app.py", 3 | "watch": { 4 | "include": [ 5 | "**" 6 | ], 7 | "exclude": [ 8 | "README.md", 9 | "cdk*.json", 10 | "requirements*.txt", 11 | "source.bat", 12 | "**/__init__.py", 13 | "python/__pycache__", 14 | "tests" 15 | ] 16 | }, 17 | "context": { 18 | "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": true, 19 | "@aws-cdk/core:stackRelativeExports": true, 20 | "@aws-cdk/aws-rds:lowercaseDbIdentifier": true, 21 | "@aws-cdk/aws-lambda:recognizeVersionProps": true, 22 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true, 23 | "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true, 24 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 25 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 26 | "@aws-cdk/core:checkSecretUsage": true, 27 | "@aws-cdk/aws-iam:minimizePolicies": true, 28 | "@aws-cdk/core:validateSnapshotRemovalPolicy": true, 29 | "@aws-cdk/customresources:installLatestAwsSdkDefault": false, 30 | "@aws-cdk/core:target-partitions": [ 31 | "aws", 32 | "aws-cn" 33 | ], 34 | "personalize":false, 35 | "text_search": true, 36 | "image_search": true, 37 | "bedrock_invoke": false 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /data_load_offline/embeddings.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | import base64 4 | import requests 5 | 6 | smr_client = boto3.client("sagemaker-runtime") 7 | 8 | def get_embedding_sagemaker(endpoint_name: str, inputs: str, language: str = 'chinese'): 9 | is_query = False 10 | instruction = "为这个句子生成表示以用于检索相关文章:" 11 | if language == 'english': 12 | instruction = "Represent this sentence for searching relevant passages:" 13 | inputs = {"inputs": inputs, "is_query":is_query,"instruction":instruction} 14 | response = run_inference(endpoint_name, inputs) 15 | return response[0] 16 | 17 | def encode_image(url): 18 | img_str = base64.b64encode(requests.get(url).content) 19 | base64_string = img_str.decode("latin1") 20 | return base64_string 21 | 22 | def get_image_embedding_sagemaker(endpoint_name: str, url: str): 23 | image_embedding = '' 24 | if len(endpoint_name) >0 and len(url) > 0: 25 | base64_string = encode_image(url) 26 | inputs = {"image": base64_string} 27 | output = run_inference(endpoint_name, inputs) 28 | image_embedding = output['image_embedding'][0] 29 | return image_embedding 30 | 31 | def run_inference(endpoint_name, inputs): 32 | response = smr_client.invoke_endpoint(EndpointName=endpoint_name, Body=json.dumps(inputs)) 33 | return json.loads(response["Body"].read().decode("utf-8")) -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3/util/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | # For backwards compatibility, provide imports that used to be here. 4 | from .connection import is_connection_dropped 5 | from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers 6 | from .response import is_fp_closed 7 | from .retry import Retry 8 | from .ssl_ import ( 9 | ALPN_PROTOCOLS, 10 | HAS_SNI, 11 | IS_PYOPENSSL, 12 | IS_SECURETRANSPORT, 13 | PROTOCOL_TLS, 14 | SSLContext, 15 | assert_fingerprint, 16 | resolve_cert_reqs, 17 | resolve_ssl_version, 18 | ssl_wrap_socket, 19 | ) 20 | from .timeout import Timeout, current_time 21 | from .url import Url, get_host, parse_url, split_first 22 | from .wait import wait_for_read, wait_for_write 23 | 24 | __all__ = ( 25 | "HAS_SNI", 26 | "IS_PYOPENSSL", 27 | "IS_SECURETRANSPORT", 28 | "SSLContext", 29 | "PROTOCOL_TLS", 30 | "ALPN_PROTOCOLS", 31 | "Retry", 32 | "Timeout", 33 | "Url", 34 | "assert_fingerprint", 35 | "current_time", 36 | "is_connection_dropped", 37 | "is_fp_closed", 38 | "get_host", 39 | "parse_url", 40 | "make_headers", 41 | "resolve_cert_reqs", 42 | "resolve_ssl_version", 43 | "split_first", 44 | "ssl_wrap_socket", 45 | "wait_for_read", 46 | "wait_for_write", 47 | "SKIP_HEADER", 48 | "SKIPPABLE_HEADERS", 49 | ) 50 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/PaletteFile.py: -------------------------------------------------------------------------------- 1 | # 2 | # Python Imaging Library 3 | # $Id$ 4 | # 5 | # stuff to read simple, teragon-style palette files 6 | # 7 | # History: 8 | # 97-08-23 fl Created 9 | # 10 | # Copyright (c) Secret Labs AB 1997. 11 | # Copyright (c) Fredrik Lundh 1997. 12 | # 13 | # See the README file for information on usage and redistribution. 14 | # 15 | from __future__ import annotations 16 | 17 | from ._binary import o8 18 | 19 | 20 | class PaletteFile: 21 | """File handler for Teragon-style palette files.""" 22 | 23 | rawmode = "RGB" 24 | 25 | def __init__(self, fp): 26 | self.palette = [(i, i, i) for i in range(256)] 27 | 28 | while True: 29 | s = fp.readline() 30 | 31 | if not s: 32 | break 33 | if s[:1] == b"#": 34 | continue 35 | if len(s) > 100: 36 | msg = "bad palette file" 37 | raise SyntaxError(msg) 38 | 39 | v = [int(x) for x in s.split()] 40 | try: 41 | [i, r, g, b] = v 42 | except ValueError: 43 | [i, r] = v 44 | g = b = r 45 | 46 | if 0 <= i <= 255: 47 | self.palette[i] = o8(r) + o8(g) + o8(b) 48 | 49 | self.palette = b"".join(self.palette) 50 | 51 | def getpalette(self): 52 | return self.palette, self.rawmode 53 | -------------------------------------------------------------------------------- /deployment/app.py: -------------------------------------------------------------------------------- 1 | import os 2 | import aws_cdk as cdk 3 | from lib.ss_image_search_stack import ImageSearchStack 4 | from lib.ss_text_search_stack import TextSearchStack 5 | from lib.ss_search_notebook import SearchNotebookStack 6 | from lib.ss_opensearch_stack import ProductOpenSearchStack 7 | from lib.ss_data_load_stack import DataLoadStack 8 | from lib.ss_bedrock_invoke_stack import BedrockStack 9 | from lib.ss_personalize_stack import PersonalizeRankingStack 10 | 11 | 12 | ACCOUNT = os.getenv('AWS_ACCOUNT_ID', '') 13 | REGION = os.getenv('AWS_REGION', '') 14 | AWS_ENV = cdk.Environment(account=ACCOUNT, region=REGION) 15 | env = AWS_ENV 16 | print(env) 17 | app = cdk.App() 18 | 19 | 20 | search_notebook_stack = SearchNotebookStack(app, "SearchNotebookStack", env=env) 21 | product_opensearch_stack = ProductOpenSearchStack(app, "ProductOpenSearchStack", env=env) 22 | data_load_stack = DataLoadStack(app,"DataLoadStack",env=env) 23 | 24 | if app.node.try_get_context('text_search'): 25 | text_search_stack = TextSearchStack(app, "TextSearchStack",env=env) 26 | 27 | if app.node.try_get_context('image_search'): 28 | image_search_stack = ImageSearchStack(app, "ImageSearchStack",env=env) 29 | 30 | if app.node.try_get_context('bedrock_invoke'): 31 | bedrock_stack = BedrockStack(app, "BedrockStack",env=env) 32 | 33 | if app.node.try_get_context('personalize'): 34 | personalize_stack = PersonalizeRankingStack(app, "PersonalizeRankingStack",env=env) 35 | 36 | 37 | app.synth() 38 | -------------------------------------------------------------------------------- /lambda/search_layer/python/idna-3.6.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | idna-3.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 2 | idna-3.6.dist-info/LICENSE.md,sha256=yy-vDKGMbTh-x8tm8yGTn7puZ-nawJ0xR3y52NP-aJk,1541 3 | idna-3.6.dist-info/METADATA,sha256=N93B509dkvvkd_Y0E_VxCHPkVkrD6InxoyfXvX4egds,9888 4 | idna-3.6.dist-info/RECORD,, 5 | idna-3.6.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 6 | idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 7 | idna/__pycache__/__init__.cpython-38.pyc,, 8 | idna/__pycache__/codec.cpython-38.pyc,, 9 | idna/__pycache__/compat.cpython-38.pyc,, 10 | idna/__pycache__/core.cpython-38.pyc,, 11 | idna/__pycache__/idnadata.cpython-38.pyc,, 12 | idna/__pycache__/intranges.cpython-38.pyc,, 13 | idna/__pycache__/package_data.cpython-38.pyc,, 14 | idna/__pycache__/uts46data.cpython-38.pyc,, 15 | idna/codec.py,sha256=PS6m-XmdST7Wj7J7ulRMakPDt5EBJyYrT3CPtjh-7t4,3426 16 | idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 17 | idna/core.py,sha256=Bxz9L1rH0N5U-yukGfPuDRTxR2jDUl96NCq1ql3YAUw,12908 18 | idna/idnadata.py,sha256=9u3Ec_GRrhlcbs7QM3pAZ2ObEQzPIOm99FaVOm91UGg,44351 19 | idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 20 | idna/package_data.py,sha256=y-iv-qJdmHsWVR5FszYwsMo1AQg8qpdU2aU5nT-S2oQ,21 21 | idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 22 | idna/uts46data.py,sha256=1KuksWqLuccPXm2uyRVkhfiFLNIhM_H2m4azCcnOqEU,206503 23 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/helpers/test.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | 10 | import os 11 | import time 12 | from typing import Any 13 | from unittest import SkipTest 14 | 15 | from opensearchpy import AsyncOpenSearch 16 | from opensearchpy.exceptions import ConnectionError 17 | 18 | OPENSEARCH_URL = os.environ.get("OPENSEARCH_URL", "https://admin:admin@localhost:9200") 19 | 20 | 21 | async def get_test_client(nowait: bool = False, **kwargs: Any) -> Any: 22 | # construct kwargs from the environment 23 | kw = {"timeout": 30} 24 | 25 | from opensearchpy import AsyncConnection 26 | 27 | async_connection = AsyncConnection() 28 | if hasattr(async_connection, "AIOHttpConnection"): 29 | kw["connection_class"] = getattr(async_connection, "AIOHttpConnection") 30 | 31 | kw.update(kwargs) 32 | client = AsyncOpenSearch(OPENSEARCH_URL, **kw) # type: ignore 33 | 34 | # wait for yellow status 35 | for _ in range(1 if nowait else 100): 36 | try: 37 | await client.cluster.health(wait_for_status="yellow") 38 | return client 39 | except ConnectionError: 40 | time.sleep(0.1) 41 | else: 42 | # timeout 43 | raise SkipTest("OpenSearch failed to start.") 44 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/client/remote.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | from typing import Any 29 | 30 | from .utils import NamespacedClient, query_params 31 | 32 | 33 | class RemoteClient(NamespacedClient): 34 | @query_params() 35 | def info(self, params: Any = None, headers: Any = None) -> Any: 36 | return self.transport.perform_request( 37 | "GET", "/_remote/info", params=params, headers=headers 38 | ) 39 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/client/remote.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | from typing import Any 29 | 30 | from .utils import NamespacedClient, query_params 31 | 32 | 33 | class RemoteClient(NamespacedClient): 34 | @query_params() 35 | async def info(self, params: Any = None, headers: Any = None) -> Any: 36 | return await self.transport.perform_request( 37 | "GET", "/_remote/info", params=params, headers=headers 38 | ) 39 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/client/utils.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | from ...client.utils import NamespacedClient # noqa 29 | from ...client.utils import ( 30 | SKIP_IN_PATH, 31 | _bulk_body, 32 | _escape, 33 | _make_path, 34 | _normalize_hosts, 35 | query_params, 36 | ) 37 | 38 | __all__ = [ 39 | "SKIP_IN_PATH", 40 | "NamespacedClient", 41 | "_make_path", 42 | "query_params", 43 | "_bulk_body", 44 | "_escape", 45 | "_normalize_hosts", 46 | ] 47 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/GimpPaletteFile.py: -------------------------------------------------------------------------------- 1 | # 2 | # Python Imaging Library 3 | # $Id$ 4 | # 5 | # stuff to read GIMP palette files 6 | # 7 | # History: 8 | # 1997-08-23 fl Created 9 | # 2004-09-07 fl Support GIMP 2.0 palette files. 10 | # 11 | # Copyright (c) Secret Labs AB 1997-2004. All rights reserved. 12 | # Copyright (c) Fredrik Lundh 1997-2004. 13 | # 14 | # See the README file for information on usage and redistribution. 15 | # 16 | from __future__ import annotations 17 | 18 | import re 19 | 20 | from ._binary import o8 21 | 22 | 23 | class GimpPaletteFile: 24 | """File handler for GIMP's palette format.""" 25 | 26 | rawmode = "RGB" 27 | 28 | def __init__(self, fp): 29 | self.palette = [o8(i) * 3 for i in range(256)] 30 | 31 | if fp.readline()[:12] != b"GIMP Palette": 32 | msg = "not a GIMP palette file" 33 | raise SyntaxError(msg) 34 | 35 | for i in range(256): 36 | s = fp.readline() 37 | if not s: 38 | break 39 | 40 | # skip fields and comment lines 41 | if re.match(rb"\w+:|#", s): 42 | continue 43 | if len(s) > 100: 44 | msg = "bad palette file" 45 | raise SyntaxError(msg) 46 | 47 | v = tuple(map(int, s.split()[:3])) 48 | if len(v) != 3: 49 | msg = "bad palette entry" 50 | raise ValueError(msg) 51 | 52 | self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2]) 53 | 54 | self.palette = b"".join(self.palette) 55 | 56 | def getpalette(self): 57 | return self.palette, self.rawmode 58 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3/packages/backports/makefile.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | backports.makefile 4 | ~~~~~~~~~~~~~~~~~~ 5 | 6 | Backports the Python 3 ``socket.makefile`` method for use with anything that 7 | wants to create a "fake" socket object. 8 | """ 9 | import io 10 | from socket import SocketIO 11 | 12 | 13 | def backport_makefile( 14 | self, mode="r", buffering=None, encoding=None, errors=None, newline=None 15 | ): 16 | """ 17 | Backport of ``socket.makefile`` from Python 3.5. 18 | """ 19 | if not set(mode) <= {"r", "w", "b"}: 20 | raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) 21 | writing = "w" in mode 22 | reading = "r" in mode or not writing 23 | assert reading or writing 24 | binary = "b" in mode 25 | rawmode = "" 26 | if reading: 27 | rawmode += "r" 28 | if writing: 29 | rawmode += "w" 30 | raw = SocketIO(self, rawmode) 31 | self._makefile_refs += 1 32 | if buffering is None: 33 | buffering = -1 34 | if buffering < 0: 35 | buffering = io.DEFAULT_BUFFER_SIZE 36 | if buffering == 0: 37 | if not binary: 38 | raise ValueError("unbuffered streams must be binary") 39 | return raw 40 | if reading and writing: 41 | buffer = io.BufferedRWPair(raw, raw, buffering) 42 | elif reading: 43 | buffer = io.BufferedReader(raw, buffering) 44 | else: 45 | assert writing 46 | buffer = io.BufferedWriter(raw, buffering) 47 | if binary: 48 | return buffer 49 | text = io.TextIOWrapper(buffer, encoding, errors, newline) 50 | text.mode = mode 51 | return text 52 | -------------------------------------------------------------------------------- /lambda/search_layer/python/idna-3.6.dist-info/LICENSE.md: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2013-2023, Kim Davies and contributors. 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are 8 | met: 9 | 10 | 1. Redistributions of source code must retain the above copyright 11 | notice, this list of conditions and the following disclaimer. 12 | 13 | 2. Redistributions in binary form must reproduce the above copyright 14 | notice, this list of conditions and the following disclaimer in the 15 | documentation and/or other materials provided with the distribution. 16 | 17 | 3. Neither the name of the copyright holder nor the names of its 18 | contributors may be used to endorse or promote products derived from 19 | this software without specific prior written permission. 20 | 21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 22 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 23 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 24 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 25 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 26 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 27 | TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 28 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 29 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 30 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 31 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 32 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Charset-Normalizer 4 | ~~~~~~~~~~~~~~ 5 | The Real First Universal Charset Detector. 6 | A library that helps you read text from an unknown charset encoding. 7 | Motivated by chardet, This package is trying to resolve the issue by taking a new approach. 8 | All IANA character set names for which the Python core library provides codecs are supported. 9 | 10 | Basic usage: 11 | >>> from charset_normalizer import from_bytes 12 | >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) 13 | >>> best_guess = results.best() 14 | >>> str(best_guess) 15 | 'Bсеки човек има право на образование. Oбразованието!' 16 | 17 | Others methods and usages are available - see the full documentation 18 | at . 19 | :copyright: (c) 2021 by Ahmed TAHRI 20 | :license: MIT, see LICENSE for more details. 21 | """ 22 | import logging 23 | 24 | from .api import from_bytes, from_fp, from_path, is_binary 25 | from .legacy import detect 26 | from .models import CharsetMatch, CharsetMatches 27 | from .utils import set_logging_handler 28 | from .version import VERSION, __version__ 29 | 30 | __all__ = ( 31 | "from_fp", 32 | "from_path", 33 | "from_bytes", 34 | "is_binary", 35 | "detect", 36 | "CharsetMatch", 37 | "CharsetMatches", 38 | "__version__", 39 | "VERSION", 40 | "set_logging_handler", 41 | ) 42 | 43 | # Attach a NullHandler to the top level logger by default 44 | # https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library 45 | 46 | logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) 47 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/connection/__init__.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | from .base import Connection 29 | from .http_requests import RequestsHttpConnection 30 | from .http_urllib3 import Urllib3HttpConnection, create_ssl_context 31 | 32 | __all__ = [ 33 | "Connection", 34 | "RequestsHttpConnection", 35 | "Urllib3HttpConnection", 36 | "create_ssl_context", 37 | ] 38 | 39 | try: 40 | from .http_async import AsyncHttpConnection 41 | 42 | __all__ += [ 43 | "AsyncHttpConnection", 44 | ] 45 | except (ImportError, SyntaxError): 46 | pass 47 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/client/client.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | 10 | from typing import Any, Optional, Type 11 | 12 | from opensearchpy.client.utils import _normalize_hosts 13 | from opensearchpy.transport import Transport 14 | 15 | 16 | class Client(object): 17 | """ 18 | A generic async OpenSearch client. 19 | """ 20 | 21 | def __init__( 22 | self, 23 | hosts: Optional[str] = None, 24 | transport_class: Type[Transport] = Transport, 25 | **kwargs: Any 26 | ) -> None: 27 | """ 28 | :arg hosts: list of nodes, or a single node, we should connect to. 29 | Node should be a dictionary ({"host": "localhost", "port": 9200}), 30 | the entire dictionary will be passed to the :class:`~opensearchpy.Connection` 31 | class as kwargs, or a string in the format of ``host[:port]`` which will be 32 | translated to a dictionary automatically. If no value is given the 33 | :class:`~opensearchpy.Connection` class defaults will be used. 34 | 35 | :arg transport_class: :class:`~opensearchpy.Transport` subclass to use. 36 | 37 | :arg kwargs: any additional arguments will be passed on to the 38 | :class:`~opensearchpy.Transport` class and, subsequently, to the 39 | :class:`~opensearchpy.Connection` instances. 40 | """ 41 | self.transport = transport_class(_normalize_hosts(hosts), **kwargs) 42 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/client/client.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | 10 | from typing import Any, Optional, Type 11 | 12 | from opensearchpy.client.utils import _normalize_hosts 13 | from opensearchpy.transport import Transport 14 | 15 | 16 | class Client(object): 17 | """ 18 | A generic async OpenSearch client. 19 | """ 20 | 21 | def __init__( 22 | self, 23 | hosts: Optional[str] = None, 24 | transport_class: Type[Transport] = Transport, 25 | **kwargs: Any 26 | ) -> None: 27 | """ 28 | :arg hosts: list of nodes, or a single node, we should connect to. 29 | Node should be a dictionary ({"host": "localhost", "port": 9200}), 30 | the entire dictionary will be passed to the :class:`~opensearchpy.Connection` 31 | class as kwargs, or a string in the format of ``host[:port]`` which will be 32 | translated to a dictionary automatically. If no value is given the 33 | :class:`~opensearchpy.Connection` class defaults will be used. 34 | 35 | :arg transport_class: :class:`~opensearchpy.Transport` subclass to use. 36 | 37 | :arg kwargs: any additional arguments will be passed on to the 38 | :class:`~opensearchpy.Transport` class and, subsequently, to the 39 | :class:`~opensearchpy.Connection` instances. 40 | """ 41 | self.transport = transport_class(_normalize_hosts(hosts), **kwargs) 42 | -------------------------------------------------------------------------------- /lambda/search_layer/python/requests/_internal_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests._internal_utils 3 | ~~~~~~~~~~~~~~ 4 | 5 | Provides utility functions that are consumed internally by Requests 6 | which depend on extremely few external helpers (such as compat) 7 | """ 8 | import re 9 | 10 | from .compat import builtin_str 11 | 12 | _VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$") 13 | _VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$") 14 | _VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") 15 | _VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") 16 | 17 | _HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR) 18 | _HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE) 19 | HEADER_VALIDATORS = { 20 | bytes: _HEADER_VALIDATORS_BYTE, 21 | str: _HEADER_VALIDATORS_STR, 22 | } 23 | 24 | 25 | def to_native_string(string, encoding="ascii"): 26 | """Given a string object, regardless of type, returns a representation of 27 | that string in the native string type, encoding and decoding where 28 | necessary. This assumes ASCII unless told otherwise. 29 | """ 30 | if isinstance(string, builtin_str): 31 | out = string 32 | else: 33 | out = string.decode(encoding) 34 | 35 | return out 36 | 37 | 38 | def unicode_is_ascii(u_string): 39 | """Determine if unicode string only contains ASCII characters. 40 | 41 | :param str u_string: unicode string to check. Must be unicode 42 | and not Python 2 `str`. 43 | :rtype: bool 44 | """ 45 | assert isinstance(u_string, str) 46 | try: 47 | u_string.encode("ascii") 48 | return True 49 | except UnicodeEncodeError: 50 | return False 51 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/helpers/errors.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | from typing import Any, List 29 | 30 | from ..exceptions import OpenSearchException 31 | 32 | 33 | class BulkIndexError(OpenSearchException): 34 | @property 35 | def errors(self) -> List[Any]: 36 | """List of errors from execution of the last chunk.""" 37 | return self.args[1] # type: ignore 38 | 39 | 40 | class ScanError(OpenSearchException): 41 | scroll_id: str 42 | 43 | def __init__(self, scroll_id: str, *args: Any, **kwargs: Any) -> None: 44 | super(ScanError, self).__init__(*args, **kwargs) 45 | self.scroll_id = scroll_id 46 | -------------------------------------------------------------------------------- /lambda/search_layer/python/requests/compat.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests.compat 3 | ~~~~~~~~~~~~~~~ 4 | 5 | This module previously handled import compatibility issues 6 | between Python 2 and Python 3. It remains for backwards 7 | compatibility until the next major version. 8 | """ 9 | 10 | try: 11 | import chardet 12 | except ImportError: 13 | import charset_normalizer as chardet 14 | 15 | import sys 16 | 17 | # ------- 18 | # Pythons 19 | # ------- 20 | 21 | # Syntax sugar. 22 | _ver = sys.version_info 23 | 24 | #: Python 2.x? 25 | is_py2 = _ver[0] == 2 26 | 27 | #: Python 3.x? 28 | is_py3 = _ver[0] == 3 29 | 30 | # json/simplejson module import resolution 31 | has_simplejson = False 32 | try: 33 | import simplejson as json 34 | 35 | has_simplejson = True 36 | except ImportError: 37 | import json 38 | 39 | if has_simplejson: 40 | from simplejson import JSONDecodeError 41 | else: 42 | from json import JSONDecodeError 43 | 44 | # Keep OrderedDict for backwards compatibility. 45 | from collections import OrderedDict 46 | from collections.abc import Callable, Mapping, MutableMapping 47 | from http import cookiejar as cookielib 48 | from http.cookies import Morsel 49 | from io import StringIO 50 | 51 | # -------------- 52 | # Legacy Imports 53 | # -------------- 54 | from urllib.parse import ( 55 | quote, 56 | quote_plus, 57 | unquote, 58 | unquote_plus, 59 | urldefrag, 60 | urlencode, 61 | urljoin, 62 | urlparse, 63 | urlsplit, 64 | urlunparse, 65 | ) 66 | from urllib.request import ( 67 | getproxies, 68 | getproxies_environment, 69 | parse_http_list, 70 | proxy_bypass, 71 | proxy_bypass_environment, 72 | ) 73 | 74 | builtin_str = str 75 | str = str 76 | bytes = bytes 77 | basestring = (str, bytes) 78 | numeric_types = (int, float) 79 | integer_types = (int,) 80 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3/util/proxy.py: -------------------------------------------------------------------------------- 1 | from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version 2 | 3 | 4 | def connection_requires_http_tunnel( 5 | proxy_url=None, proxy_config=None, destination_scheme=None 6 | ): 7 | """ 8 | Returns True if the connection requires an HTTP CONNECT through the proxy. 9 | 10 | :param URL proxy_url: 11 | URL of the proxy. 12 | :param ProxyConfig proxy_config: 13 | Proxy configuration from poolmanager.py 14 | :param str destination_scheme: 15 | The scheme of the destination. (i.e https, http, etc) 16 | """ 17 | # If we're not using a proxy, no way to use a tunnel. 18 | if proxy_url is None: 19 | return False 20 | 21 | # HTTP destinations never require tunneling, we always forward. 22 | if destination_scheme == "http": 23 | return False 24 | 25 | # Support for forwarding with HTTPS proxies and HTTPS destinations. 26 | if ( 27 | proxy_url.scheme == "https" 28 | and proxy_config 29 | and proxy_config.use_forwarding_for_https 30 | ): 31 | return False 32 | 33 | # Otherwise always use a tunnel. 34 | return True 35 | 36 | 37 | def create_proxy_ssl_context( 38 | ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None 39 | ): 40 | """ 41 | Generates a default proxy ssl context if one hasn't been provided by the 42 | user. 43 | """ 44 | ssl_context = create_urllib3_context( 45 | ssl_version=resolve_ssl_version(ssl_version), 46 | cert_reqs=resolve_cert_reqs(cert_reqs), 47 | ) 48 | 49 | if ( 50 | not ca_certs 51 | and not ca_cert_dir 52 | and not ca_cert_data 53 | and hasattr(ssl_context, "load_default_certs") 54 | ): 55 | ssl_context.load_default_certs() 56 | 57 | return ssl_context 58 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/PcdImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # PCD file handling 6 | # 7 | # History: 8 | # 96-05-10 fl Created 9 | # 96-05-27 fl Added draft mode (128x192, 256x384) 10 | # 11 | # Copyright (c) Secret Labs AB 1997. 12 | # Copyright (c) Fredrik Lundh 1996. 13 | # 14 | # See the README file for information on usage and redistribution. 15 | # 16 | from __future__ import annotations 17 | 18 | from . import Image, ImageFile 19 | 20 | ## 21 | # Image plugin for PhotoCD images. This plugin only reads the 768x512 22 | # image from the file; higher resolutions are encoded in a proprietary 23 | # encoding. 24 | 25 | 26 | class PcdImageFile(ImageFile.ImageFile): 27 | format = "PCD" 28 | format_description = "Kodak PhotoCD" 29 | 30 | def _open(self) -> None: 31 | # rough 32 | assert self.fp is not None 33 | 34 | self.fp.seek(2048) 35 | s = self.fp.read(2048) 36 | 37 | if s[:4] != b"PCD_": 38 | msg = "not a PCD file" 39 | raise SyntaxError(msg) 40 | 41 | orientation = s[1538] & 3 42 | self.tile_post_rotate = None 43 | if orientation == 1: 44 | self.tile_post_rotate = 90 45 | elif orientation == 3: 46 | self.tile_post_rotate = -90 47 | 48 | self._mode = "RGB" 49 | self._size = 768, 512 # FIXME: not correct for rotated images! 50 | self.tile = [("pcd", (0, 0) + self.size, 96 * 2048, None)] 51 | 52 | def load_end(self) -> None: 53 | if self.tile_post_rotate: 54 | # Handle rotated PCDs 55 | assert self.im is not None 56 | 57 | self.im = self.im.rotate(self.tile_post_rotate) 58 | self._size = self.im.size 59 | 60 | 61 | # 62 | # registry 63 | 64 | Image.register_open(PcdImageFile.format, PcdImageFile) 65 | 66 | Image.register_extension(PcdImageFile.format, ".pcd") 67 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/GribStubImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library 3 | # $Id$ 4 | # 5 | # GRIB stub adapter 6 | # 7 | # Copyright (c) 1996-2003 by Fredrik Lundh 8 | # 9 | # See the README file for information on usage and redistribution. 10 | # 11 | from __future__ import annotations 12 | 13 | from . import Image, ImageFile 14 | 15 | _handler = None 16 | 17 | 18 | def register_handler(handler): 19 | """ 20 | Install application-specific GRIB image handler. 21 | 22 | :param handler: Handler object. 23 | """ 24 | global _handler 25 | _handler = handler 26 | 27 | 28 | # -------------------------------------------------------------------- 29 | # Image adapter 30 | 31 | 32 | def _accept(prefix): 33 | return prefix[:4] == b"GRIB" and prefix[7] == 1 34 | 35 | 36 | class GribStubImageFile(ImageFile.StubImageFile): 37 | format = "GRIB" 38 | format_description = "GRIB" 39 | 40 | def _open(self): 41 | offset = self.fp.tell() 42 | 43 | if not _accept(self.fp.read(8)): 44 | msg = "Not a GRIB file" 45 | raise SyntaxError(msg) 46 | 47 | self.fp.seek(offset) 48 | 49 | # make something up 50 | self._mode = "F" 51 | self._size = 1, 1 52 | 53 | loader = self._load() 54 | if loader: 55 | loader.open(self) 56 | 57 | def _load(self): 58 | return _handler 59 | 60 | 61 | def _save(im, fp, filename): 62 | if _handler is None or not hasattr(_handler, "save"): 63 | msg = "GRIB save handler not installed" 64 | raise OSError(msg) 65 | _handler.save(im, fp, filename) 66 | 67 | 68 | # -------------------------------------------------------------------- 69 | # Registry 70 | 71 | Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept) 72 | Image.register_save(GribStubImageFile.format, _save) 73 | 74 | Image.register_extension(GribStubImageFile.format, ".grib") 75 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/Hdf5StubImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library 3 | # $Id$ 4 | # 5 | # HDF5 stub adapter 6 | # 7 | # Copyright (c) 2000-2003 by Fredrik Lundh 8 | # 9 | # See the README file for information on usage and redistribution. 10 | # 11 | from __future__ import annotations 12 | 13 | from . import Image, ImageFile 14 | 15 | _handler = None 16 | 17 | 18 | def register_handler(handler): 19 | """ 20 | Install application-specific HDF5 image handler. 21 | 22 | :param handler: Handler object. 23 | """ 24 | global _handler 25 | _handler = handler 26 | 27 | 28 | # -------------------------------------------------------------------- 29 | # Image adapter 30 | 31 | 32 | def _accept(prefix): 33 | return prefix[:8] == b"\x89HDF\r\n\x1a\n" 34 | 35 | 36 | class HDF5StubImageFile(ImageFile.StubImageFile): 37 | format = "HDF5" 38 | format_description = "HDF5" 39 | 40 | def _open(self): 41 | offset = self.fp.tell() 42 | 43 | if not _accept(self.fp.read(8)): 44 | msg = "Not an HDF file" 45 | raise SyntaxError(msg) 46 | 47 | self.fp.seek(offset) 48 | 49 | # make something up 50 | self._mode = "F" 51 | self._size = 1, 1 52 | 53 | loader = self._load() 54 | if loader: 55 | loader.open(self) 56 | 57 | def _load(self): 58 | return _handler 59 | 60 | 61 | def _save(im, fp, filename): 62 | if _handler is None or not hasattr(_handler, "save"): 63 | msg = "HDF5 save handler not installed" 64 | raise OSError(msg) 65 | _handler.save(im, fp, filename) 66 | 67 | 68 | # -------------------------------------------------------------------- 69 | # Registry 70 | 71 | Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept) 72 | Image.register_save(HDF5StubImageFile.format, _save) 73 | 74 | Image.register_extensions(HDF5StubImageFile.format, [".h5", ".hdf"]) 75 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/BufrStubImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library 3 | # $Id$ 4 | # 5 | # BUFR stub adapter 6 | # 7 | # Copyright (c) 1996-2003 by Fredrik Lundh 8 | # 9 | # See the README file for information on usage and redistribution. 10 | # 11 | from __future__ import annotations 12 | 13 | from . import Image, ImageFile 14 | 15 | _handler = None 16 | 17 | 18 | def register_handler(handler): 19 | """ 20 | Install application-specific BUFR image handler. 21 | 22 | :param handler: Handler object. 23 | """ 24 | global _handler 25 | _handler = handler 26 | 27 | 28 | # -------------------------------------------------------------------- 29 | # Image adapter 30 | 31 | 32 | def _accept(prefix): 33 | return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC" 34 | 35 | 36 | class BufrStubImageFile(ImageFile.StubImageFile): 37 | format = "BUFR" 38 | format_description = "BUFR" 39 | 40 | def _open(self): 41 | offset = self.fp.tell() 42 | 43 | if not _accept(self.fp.read(4)): 44 | msg = "Not a BUFR file" 45 | raise SyntaxError(msg) 46 | 47 | self.fp.seek(offset) 48 | 49 | # make something up 50 | self._mode = "F" 51 | self._size = 1, 1 52 | 53 | loader = self._load() 54 | if loader: 55 | loader.open(self) 56 | 57 | def _load(self): 58 | return _handler 59 | 60 | 61 | def _save(im, fp, filename): 62 | if _handler is None or not hasattr(_handler, "save"): 63 | msg = "BUFR save handler not installed" 64 | raise OSError(msg) 65 | _handler.save(im, fp, filename) 66 | 67 | 68 | # -------------------------------------------------------------------- 69 | # Registry 70 | 71 | Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept) 72 | Image.register_save(BufrStubImageFile.format, _save) 73 | 74 | Image.register_extension(BufrStubImageFile.format, ".bufr") 75 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/compat.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | import asyncio 29 | 30 | from ..compat import * # noqa 31 | 32 | # Hack supporting Python 3.6 asyncio which didn't have 'get_running_loop()'. 33 | # Essentially we want to get away from having users pass in a loop to us. 34 | # Instead we should call 'get_running_loop()' whenever we need 35 | # the currently running loop. 36 | # See: https://aiopg.readthedocs.io/en/stable/run_loop.html#implementation 37 | try: 38 | from asyncio import get_running_loop 39 | except ImportError: 40 | 41 | def get_running_loop() -> asyncio.AbstractEventLoop: 42 | loop = asyncio.get_event_loop() 43 | if not loop.is_running(): 44 | raise RuntimeError("no running event loop") 45 | return loop 46 | 47 | 48 | __all__ = ["get_running_loop"] 49 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/client/remote_store.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | 10 | # ------------------------------------------------------------------------------------------ 11 | # THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST 12 | # 13 | # To contribute, kindly make modifications in the opensearch-py client generator 14 | # or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md 15 | # and https://github.com/opensearch-project/opensearch-api-specification for details. 16 | # -----------------------------------------------------------------------------------------+ 17 | 18 | 19 | from typing import Any 20 | 21 | from .utils import SKIP_IN_PATH, NamespacedClient, query_params 22 | 23 | 24 | class RemoteStoreClient(NamespacedClient): 25 | @query_params("cluster_manager_timeout", "wait_for_completion") 26 | def restore( 27 | self, 28 | body: Any, 29 | params: Any = None, 30 | headers: Any = None, 31 | ) -> Any: 32 | """ 33 | Restores from remote store. 34 | 35 | 36 | :arg body: Comma-separated list of index IDs 37 | :arg cluster_manager_timeout: Operation timeout for connection 38 | to cluster-manager node. 39 | :arg wait_for_completion: Should this request wait until the 40 | operation has completed before returning. Default is false. 41 | """ 42 | if body in SKIP_IN_PATH: 43 | raise ValueError("Empty value passed for a required argument 'body'.") 44 | 45 | return self.transport.perform_request( 46 | "POST", "/_remotestore/_restore", params=params, headers=headers, body=body 47 | ) 48 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/client/remote_store.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | 10 | # ------------------------------------------------------------------------------------------ 11 | # THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST 12 | # 13 | # To contribute, kindly make modifications in the opensearch-py client generator 14 | # or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md 15 | # and https://github.com/opensearch-project/opensearch-api-specification for details. 16 | # -----------------------------------------------------------------------------------------+ 17 | 18 | 19 | from typing import Any 20 | 21 | from .utils import SKIP_IN_PATH, NamespacedClient, query_params 22 | 23 | 24 | class RemoteStoreClient(NamespacedClient): 25 | @query_params("cluster_manager_timeout", "wait_for_completion") 26 | async def restore( 27 | self, 28 | body: Any, 29 | params: Any = None, 30 | headers: Any = None, 31 | ) -> Any: 32 | """ 33 | Restores from remote store. 34 | 35 | 36 | :arg body: Comma-separated list of index IDs 37 | :arg cluster_manager_timeout: Operation timeout for connection 38 | to cluster-manager node. 39 | :arg wait_for_completion: Should this request wait until the 40 | operation has completed before returning. Default is false. 41 | """ 42 | if body in SKIP_IN_PATH: 43 | raise ValueError("Empty value passed for a required argument 'body'.") 44 | 45 | return await self.transport.perform_request( 46 | "POST", "/_remotestore/_restore", params=params, headers=headers, body=body 47 | ) 48 | -------------------------------------------------------------------------------- /lambda/personalize_ranking/lambda_function.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import boto3 4 | 5 | 6 | region = os.environ.get('AWS_REGION') 7 | account_id = boto3.client('sts').get_caller_identity()['Account'] 8 | 9 | campaignArn='arn:aws:personalize:'+region+':'+str(account_id)+':campaign/personalize-poc-rerank' 10 | 11 | response = { 12 | "statusCode": 200, 13 | "headers": { 14 | "Access-Control-Allow-Origin": '*' 15 | }, 16 | "isBase64Encoded": False 17 | } 18 | 19 | def lambda_handler(event, context): 20 | print(event) 21 | 22 | user_id = 1 23 | if "user_id" in event.keys(): 24 | user_id = event['user_id'] 25 | elif "queryStringParameters" in event.keys(): 26 | if "user_id" in event['queryStringParameters'].keys(): 27 | user_id = event['queryStringParameters']['user_id'].strip() 28 | 29 | item_id_list = "" 30 | if "item_id_list" in event.keys(): 31 | item_id_list = event['item_id_list'] 32 | elif "queryStringParameters" in event.keys(): 33 | if "item_id_list" in event['queryStringParameters'].keys(): 34 | item_id_list = event['queryStringParameters']['item_id_list'].strip() 35 | 36 | item_id_list=item_id_list.split(',') 37 | print('item_id_list:',item_id_list) 38 | 39 | if user_id == '' or len(item_id_list) == 0: 40 | ranking_result = 'user id or item list is null' 41 | else: 42 | personalize_runtime = boto3.client('personalize-runtime') 43 | 44 | result = personalize_runtime.get_personalized_ranking( 45 | campaignArn = campaignArn, 46 | userId = user_id, 47 | inputList = item_id_list 48 | ) 49 | print('result:',result) 50 | ranking_result = result['personalizedRanking'] 51 | 52 | print("ranking_result:",ranking_result) 53 | 54 | response['body'] = json.dumps( 55 | { 56 | 'ranking_result':ranking_result 57 | }) 58 | return response -------------------------------------------------------------------------------- /lambda/search_layer/python/six-1.16.0.dist-info/METADATA: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: six 3 | Version: 1.16.0 4 | Summary: Python 2 and 3 compatibility utilities 5 | Home-page: https://github.com/benjaminp/six 6 | Author: Benjamin Peterson 7 | Author-email: benjamin@python.org 8 | License: MIT 9 | Platform: UNKNOWN 10 | Classifier: Development Status :: 5 - Production/Stable 11 | Classifier: Programming Language :: Python :: 2 12 | Classifier: Programming Language :: Python :: 3 13 | Classifier: Intended Audience :: Developers 14 | Classifier: License :: OSI Approved :: MIT License 15 | Classifier: Topic :: Software Development :: Libraries 16 | Classifier: Topic :: Utilities 17 | Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.* 18 | 19 | .. image:: https://img.shields.io/pypi/v/six.svg 20 | :target: https://pypi.org/project/six/ 21 | :alt: six on PyPI 22 | 23 | .. image:: https://travis-ci.org/benjaminp/six.svg?branch=master 24 | :target: https://travis-ci.org/benjaminp/six 25 | :alt: six on TravisCI 26 | 27 | .. image:: https://readthedocs.org/projects/six/badge/?version=latest 28 | :target: https://six.readthedocs.io/ 29 | :alt: six's documentation on Read the Docs 30 | 31 | .. image:: https://img.shields.io/badge/license-MIT-green.svg 32 | :target: https://github.com/benjaminp/six/blob/master/LICENSE 33 | :alt: MIT License badge 34 | 35 | Six is a Python 2 and 3 compatibility library. It provides utility functions 36 | for smoothing over the differences between the Python versions with the goal of 37 | writing Python code that is compatible on both Python versions. See the 38 | documentation for more information on what is provided. 39 | 40 | Six supports Python 2.7 and 3.3+. It is contained in only one Python 41 | file, so it can be easily copied into your project. (The copyright and license 42 | notice must be retained.) 43 | 44 | Online documentation is at https://six.readthedocs.io/. 45 | 46 | Bugs can be reported to https://github.com/benjaminp/six. The code can also 47 | be found there. 48 | 49 | 50 | -------------------------------------------------------------------------------- /lambda/search_layer/python/model.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | from typing import Dict,Any 4 | from bedrockAdapter import BedrockAdapter 5 | import requests 6 | 7 | bedrock = boto3.client(service_name='bedrock-runtime') 8 | 9 | def invoke_model_local(prompt: str,model_id:str,model_kwargs: Dict[str, Any]): 10 | 11 | provider = model_id.split(".")[0] 12 | model_kwargs['modelId'] = model_id 13 | modle_input = BedrockAdapter.prepare_input(provider,prompt,model_kwargs) 14 | body = json.dumps(modle_input) 15 | #print('body:',body) 16 | try: 17 | accept = "application/json" 18 | contentType = "application/json" 19 | response = bedrock.invoke_model( 20 | body=body, modelId=model_id, accept=accept, contentType=contentType 21 | ) 22 | function_call_response = BedrockAdapter.prepare_output(provider,response) 23 | # function_call_response = json.loads(response.get("body").read().decode()) 24 | return function_call_response 25 | 26 | except Exception as e: 27 | raise ValueError(f"Error raised by bedrock service: {e}") 28 | 29 | def request_model(url): 30 | response = requests.get(url) 31 | result = response.text 32 | # print('result:',result) 33 | result = json.loads(result) 34 | answer = result['answer'] 35 | return answer 36 | 37 | def invoke_model_api(prompt: str,model_id:str,api_url:str,model_kwargs: Dict[str, Any]): 38 | url = api_url + '/bedrock?' 39 | 40 | url += ('prompt='+prompt) 41 | url += ('&modelId='+model_id) 42 | if len(model_kwargs) > 0: 43 | for key in model_kwargs.keys(): 44 | url += ('&' + str(key) + '=' + str(model_kwargs[key])) 45 | try: 46 | answer = request_model(url) 47 | return answer 48 | except Exception as e: 49 | try: 50 | answer = request_model(url) 51 | return answer 52 | except Exception as e: 53 | raise ValueError(f"Error raised by bedrock api service: {e}") 54 | 55 | -------------------------------------------------------------------------------- /lambda/search_layer/python/dateutil/parser/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from ._parser import parse, parser, parserinfo, ParserError 3 | from ._parser import DEFAULTPARSER, DEFAULTTZPARSER 4 | from ._parser import UnknownTimezoneWarning 5 | 6 | from ._parser import __doc__ 7 | 8 | from .isoparser import isoparser, isoparse 9 | 10 | __all__ = ['parse', 'parser', 'parserinfo', 11 | 'isoparse', 'isoparser', 12 | 'ParserError', 13 | 'UnknownTimezoneWarning'] 14 | 15 | 16 | ### 17 | # Deprecate portions of the private interface so that downstream code that 18 | # is improperly relying on it is given *some* notice. 19 | 20 | 21 | def __deprecated_private_func(f): 22 | from functools import wraps 23 | import warnings 24 | 25 | msg = ('{name} is a private function and may break without warning, ' 26 | 'it will be moved and or renamed in future versions.') 27 | msg = msg.format(name=f.__name__) 28 | 29 | @wraps(f) 30 | def deprecated_func(*args, **kwargs): 31 | warnings.warn(msg, DeprecationWarning) 32 | return f(*args, **kwargs) 33 | 34 | return deprecated_func 35 | 36 | def __deprecate_private_class(c): 37 | import warnings 38 | 39 | msg = ('{name} is a private class and may break without warning, ' 40 | 'it will be moved and or renamed in future versions.') 41 | msg = msg.format(name=c.__name__) 42 | 43 | class private_class(c): 44 | __doc__ = c.__doc__ 45 | 46 | def __init__(self, *args, **kwargs): 47 | warnings.warn(msg, DeprecationWarning) 48 | super(private_class, self).__init__(*args, **kwargs) 49 | 50 | private_class.__name__ = c.__name__ 51 | 52 | return private_class 53 | 54 | 55 | from ._parser import _timelex, _resultbase 56 | from ._parser import _tzparser, _parsetz 57 | 58 | _timelex = __deprecate_private_class(_timelex) 59 | _tzparser = __deprecate_private_class(_tzparser) 60 | _resultbase = __deprecate_private_class(_resultbase) 61 | _parsetz = __deprecated_private_func(_parsetz) 62 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/TarIO.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # read files from within a tar file 6 | # 7 | # History: 8 | # 95-06-18 fl Created 9 | # 96-05-28 fl Open files in binary mode 10 | # 11 | # Copyright (c) Secret Labs AB 1997. 12 | # Copyright (c) Fredrik Lundh 1995-96. 13 | # 14 | # See the README file for information on usage and redistribution. 15 | # 16 | from __future__ import annotations 17 | 18 | import io 19 | from types import TracebackType 20 | 21 | from . import ContainerIO 22 | 23 | 24 | class TarIO(ContainerIO.ContainerIO[bytes]): 25 | """A file object that provides read access to a given member of a TAR file.""" 26 | 27 | def __init__(self, tarfile: str, file: str) -> None: 28 | """ 29 | Create file object. 30 | 31 | :param tarfile: Name of TAR file. 32 | :param file: Name of member file. 33 | """ 34 | self.fh = open(tarfile, "rb") 35 | 36 | while True: 37 | s = self.fh.read(512) 38 | if len(s) != 512: 39 | msg = "unexpected end of tar file" 40 | raise OSError(msg) 41 | 42 | name = s[:100].decode("utf-8") 43 | i = name.find("\0") 44 | if i == 0: 45 | msg = "cannot find subfile" 46 | raise OSError(msg) 47 | if i > 0: 48 | name = name[:i] 49 | 50 | size = int(s[124:135], 8) 51 | 52 | if file == name: 53 | break 54 | 55 | self.fh.seek((size + 511) & (~511), io.SEEK_CUR) 56 | 57 | # Open region 58 | super().__init__(self.fh, self.fh.tell(), size) 59 | 60 | # Context manager support 61 | def __enter__(self) -> TarIO: 62 | return self 63 | 64 | def __exit__( 65 | self, 66 | exc_type: type[BaseException] | None, 67 | exc_val: BaseException | None, 68 | exc_tb: TracebackType | None, 69 | ) -> None: 70 | self.close() 71 | 72 | def close(self) -> None: 73 | self.fh.close() 74 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/PixarImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # PIXAR raster support for PIL 6 | # 7 | # history: 8 | # 97-01-29 fl Created 9 | # 10 | # notes: 11 | # This is incomplete; it is based on a few samples created with 12 | # Photoshop 2.5 and 3.0, and a summary description provided by 13 | # Greg Coats . Hopefully, "L" and 14 | # "RGBA" support will be added in future versions. 15 | # 16 | # Copyright (c) Secret Labs AB 1997. 17 | # Copyright (c) Fredrik Lundh 1997. 18 | # 19 | # See the README file for information on usage and redistribution. 20 | # 21 | from __future__ import annotations 22 | 23 | from . import Image, ImageFile 24 | from ._binary import i16le as i16 25 | 26 | # 27 | # helpers 28 | 29 | 30 | def _accept(prefix: bytes) -> bool: 31 | return prefix[:4] == b"\200\350\000\000" 32 | 33 | 34 | ## 35 | # Image plugin for PIXAR raster images. 36 | 37 | 38 | class PixarImageFile(ImageFile.ImageFile): 39 | format = "PIXAR" 40 | format_description = "PIXAR raster image" 41 | 42 | def _open(self) -> None: 43 | # assuming a 4-byte magic label 44 | assert self.fp is not None 45 | 46 | s = self.fp.read(4) 47 | if not _accept(s): 48 | msg = "not a PIXAR file" 49 | raise SyntaxError(msg) 50 | 51 | # read rest of header 52 | s = s + self.fp.read(508) 53 | 54 | self._size = i16(s, 418), i16(s, 416) 55 | 56 | # get channel/depth descriptions 57 | mode = i16(s, 424), i16(s, 426) 58 | 59 | if mode == (14, 2): 60 | self._mode = "RGB" 61 | # FIXME: to be continued... 62 | 63 | # create tile descriptor (assuming "dumped") 64 | self.tile = [("raw", (0, 0) + self.size, 1024, (self.mode, 0, 1))] 65 | 66 | 67 | # 68 | # -------------------------------------------------------------------- 69 | 70 | Image.register_open(PixarImageFile.format, PixarImageFile, _accept) 71 | 72 | Image.register_extension(PixarImageFile.format, ".pxr") 73 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/CurImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # Windows Cursor support for PIL 6 | # 7 | # notes: 8 | # uses BmpImagePlugin.py to read the bitmap data. 9 | # 10 | # history: 11 | # 96-05-27 fl Created 12 | # 13 | # Copyright (c) Secret Labs AB 1997. 14 | # Copyright (c) Fredrik Lundh 1996. 15 | # 16 | # See the README file for information on usage and redistribution. 17 | # 18 | from __future__ import annotations 19 | 20 | from . import BmpImagePlugin, Image 21 | from ._binary import i16le as i16 22 | from ._binary import i32le as i32 23 | 24 | # 25 | # -------------------------------------------------------------------- 26 | 27 | 28 | def _accept(prefix): 29 | return prefix[:4] == b"\0\0\2\0" 30 | 31 | 32 | ## 33 | # Image plugin for Windows Cursor files. 34 | 35 | 36 | class CurImageFile(BmpImagePlugin.BmpImageFile): 37 | format = "CUR" 38 | format_description = "Windows Cursor" 39 | 40 | def _open(self): 41 | offset = self.fp.tell() 42 | 43 | # check magic 44 | s = self.fp.read(6) 45 | if not _accept(s): 46 | msg = "not a CUR file" 47 | raise SyntaxError(msg) 48 | 49 | # pick the largest cursor in the file 50 | m = b"" 51 | for i in range(i16(s, 4)): 52 | s = self.fp.read(16) 53 | if not m: 54 | m = s 55 | elif s[0] > m[0] and s[1] > m[1]: 56 | m = s 57 | if not m: 58 | msg = "No cursors were found" 59 | raise TypeError(msg) 60 | 61 | # load as bitmap 62 | self._bitmap(i32(m, 12) + offset) 63 | 64 | # patch up the bitmap height 65 | self._size = self.size[0], self.size[1] // 2 66 | d, e, o, a = self.tile[0] 67 | self.tile[0] = d, (0, 0) + self.size, o, a 68 | 69 | 70 | # 71 | # -------------------------------------------------------------------- 72 | 73 | Image.register_open(CurImageFile.format, CurImageFile, _accept) 74 | 75 | Image.register_extension(CurImageFile.format, ".cur") 76 | -------------------------------------------------------------------------------- /lambda/search_layer/python/idna/intranges.py: -------------------------------------------------------------------------------- 1 | """ 2 | Given a list of integers, made up of (hopefully) a small number of long runs 3 | of consecutive integers, compute a representation of the form 4 | ((start1, end1), (start2, end2) ...). Then answer the question "was x present 5 | in the original list?" in time O(log(# runs)). 6 | """ 7 | 8 | import bisect 9 | from typing import List, Tuple 10 | 11 | def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: 12 | """Represent a list of integers as a sequence of ranges: 13 | ((start_0, end_0), (start_1, end_1), ...), such that the original 14 | integers are exactly those x such that start_i <= x < end_i for some i. 15 | 16 | Ranges are encoded as single integers (start << 32 | end), not as tuples. 17 | """ 18 | 19 | sorted_list = sorted(list_) 20 | ranges = [] 21 | last_write = -1 22 | for i in range(len(sorted_list)): 23 | if i+1 < len(sorted_list): 24 | if sorted_list[i] == sorted_list[i+1]-1: 25 | continue 26 | current_range = sorted_list[last_write+1:i+1] 27 | ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) 28 | last_write = i 29 | 30 | return tuple(ranges) 31 | 32 | def _encode_range(start: int, end: int) -> int: 33 | return (start << 32) | end 34 | 35 | def _decode_range(r: int) -> Tuple[int, int]: 36 | return (r >> 32), (r & ((1 << 32) - 1)) 37 | 38 | 39 | def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: 40 | """Determine if `int_` falls into one of the ranges in `ranges`.""" 41 | tuple_ = _encode_range(int_, 0) 42 | pos = bisect.bisect_left(ranges, tuple_) 43 | # we could be immediately ahead of a tuple (start, end) 44 | # with start < int_ <= end 45 | if pos > 0: 46 | left, right = _decode_range(ranges[pos-1]) 47 | if left <= int_ < right: 48 | return True 49 | # or we could be immediately behind a tuple (int_, end) 50 | if pos < len(ranges): 51 | left, _ = _decode_range(ranges[pos]) 52 | if left == int_: 53 | return True 54 | return False 55 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | from .._async.helpers.actions import ( 29 | async_bulk, 30 | async_reindex, 31 | async_scan, 32 | async_streaming_bulk, 33 | ) 34 | from .actions import ( 35 | _chunk_actions, 36 | _process_bulk_chunk, 37 | bulk, 38 | expand_action, 39 | parallel_bulk, 40 | reindex, 41 | scan, 42 | streaming_bulk, 43 | ) 44 | from .asyncsigner import AWSV4SignerAsyncAuth 45 | from .errors import BulkIndexError, ScanError 46 | from .signer import AWSV4SignerAuth, RequestsAWSV4SignerAuth, Urllib3AWSV4SignerAuth 47 | 48 | __all__ = [ 49 | "BulkIndexError", 50 | "ScanError", 51 | "expand_action", 52 | "streaming_bulk", 53 | "bulk", 54 | "parallel_bulk", 55 | "scan", 56 | "reindex", 57 | "_chunk_actions", 58 | "_process_bulk_chunk", 59 | "AWSV4SignerAuth", 60 | "AWSV4SignerAsyncAuth", 61 | "RequestsAWSV4SignerAuth", 62 | "Urllib3AWSV4SignerAuth", 63 | "async_scan", 64 | "async_bulk", 65 | "async_reindex", 66 | "async_streaming_bulk", 67 | ] 68 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/_extra_imports.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | # type: ignore 29 | 30 | # This file exists for the sole reason of making mypy not 31 | # complain about type issues to do with 'aiohttp' and 'yarl'. 32 | # We're in a catch-22 situation: 33 | # - If we use 'type: ignore' on 'import aiohttp' and it's not installed 34 | # mypy will complain that the annotation is unnecessary. 35 | # - If we don't use 'type: ignore' on 'import aiohttp' and it 36 | # it's not installed mypy will complain that it can't find 37 | # type hints for aiohttp. 38 | # So to make mypy happy we move all our 'extra' imports here 39 | # and add a global 'type: ignore' which mypy never complains 40 | # about being unnecessary. 41 | 42 | import aiohttp 43 | import aiohttp.client_exceptions as aiohttp_exceptions 44 | 45 | # We do this because we don't explicitly require 'yarl' 46 | # within our [async] extra any more. 47 | # See AIOHttpConnection.request() for more information why. 48 | try: 49 | import yarl 50 | except ImportError: 51 | yarl = False 52 | 53 | __all__ = ["aiohttp", "aiohttp_exceptions", "yarl"] 54 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/McIdasImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # Basic McIdas support for PIL 6 | # 7 | # History: 8 | # 1997-05-05 fl Created (8-bit images only) 9 | # 2009-03-08 fl Added 16/32-bit support. 10 | # 11 | # Thanks to Richard Jones and Craig Swank for specs and samples. 12 | # 13 | # Copyright (c) Secret Labs AB 1997. 14 | # Copyright (c) Fredrik Lundh 1997. 15 | # 16 | # See the README file for information on usage and redistribution. 17 | # 18 | from __future__ import annotations 19 | 20 | import struct 21 | 22 | from . import Image, ImageFile 23 | 24 | 25 | def _accept(prefix: bytes) -> bool: 26 | return prefix[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04" 27 | 28 | 29 | ## 30 | # Image plugin for McIdas area images. 31 | 32 | 33 | class McIdasImageFile(ImageFile.ImageFile): 34 | format = "MCIDAS" 35 | format_description = "McIdas area file" 36 | 37 | def _open(self) -> None: 38 | # parse area file directory 39 | assert self.fp is not None 40 | 41 | s = self.fp.read(256) 42 | if not _accept(s) or len(s) != 256: 43 | msg = "not an McIdas area file" 44 | raise SyntaxError(msg) 45 | 46 | self.area_descriptor_raw = s 47 | self.area_descriptor = w = [0] + list(struct.unpack("!64i", s)) 48 | 49 | # get mode 50 | if w[11] == 1: 51 | mode = rawmode = "L" 52 | elif w[11] == 2: 53 | # FIXME: add memory map support 54 | mode = "I" 55 | rawmode = "I;16B" 56 | elif w[11] == 4: 57 | # FIXME: add memory map support 58 | mode = "I" 59 | rawmode = "I;32B" 60 | else: 61 | msg = "unsupported McIdas format" 62 | raise SyntaxError(msg) 63 | 64 | self._mode = mode 65 | self._size = w[10], w[9] 66 | 67 | offset = w[34] + w[15] 68 | stride = w[15] + w[10] * w[11] * w[14] 69 | 70 | self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))] 71 | 72 | 73 | # -------------------------------------------------------------------- 74 | # registry 75 | 76 | Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept) 77 | 78 | # no default extension 79 | -------------------------------------------------------------------------------- /web_ui/UI_deployment_README.md: -------------------------------------------------------------------------------- 1 | # Deploy streamlit in EC2 2 | 3 | ### 1.Create EC2 instance 4 | 5 | Network settings choose "Allow HTTP traffic from the internet" 6 | 7 | ### 2.Connect to EC2, install the following dependencies: 8 | 9 | ``` 10 | sudo yum update 11 | sudo yum install nginx 12 | sudo yum install tmux -y 13 | sudo yum install python3-pip 14 | pip install streamlit 15 | ``` 16 | 17 | ### 3.Create nginx profiles 18 | 19 | ``` 20 | cd /etc/nginx/conf.d 21 | sudo touch streamlit.conf 22 | sudo chmod 777 streamlit.conf 23 | vi streamlit.conf 24 | ``` 25 | 26 | enter the template: 27 | 28 | ``` 29 | upstream ws-backend { 30 | server xxx.xxx.xxx.xxx:8501; 31 | } 32 | 33 | server { 34 | listen 80; 35 | server_name xxx.xxx.xxx.xxx; 36 | client_max_body_size 100m; 37 | 38 | location / { 39 | 40 | proxy_pass http://ws-backend; 41 | 42 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 43 | proxy_set_header Host $http_host; 44 | proxy_redirect off; 45 | proxy_http_version 1.1; 46 | proxy_set_header Upgrade $http_upgrade; 47 | proxy_set_header Connection "upgrade"; 48 | } 49 | } 50 | ``` 51 | 52 | Change the xxx.xxx.xxx.xxx to the EC2 private IP. 53 | 54 | 55 | ### 4. start nginx 56 | 57 | ``` 58 | sudo systemctl start nginx.service 59 | ``` 60 | 61 | ### 5.Run streamlit ui stript 62 | 63 | ``` 64 | cd /home/ec2-user/aws_product_search_solution/web_ui 65 | tmux 66 | streamlit run aws_search_demo.py 67 | ``` 68 | 69 | ### 6.Open ui page 70 | 71 | Enter the url in the webpage:http://EC2 public IP 72 | 73 | ### 7.configure the invoke url 74 | 75 | (1) copy the text search invoke url from api gateway 76 | 77 | ![EC2](../assets/text-search.png) 78 | 79 | paste the invoke url into the webpage api url input box 80 | 81 | ![EC2](../assets/text-search-ui.png) 82 | 83 | (2) copy the image search invoke url from api gateway 84 | 85 | ![EC2](../assets/image-search.png) 86 | 87 | paste the invoke url into the webpage api url input box 88 | 89 | ![EC2](../assets/image-search-ui.png) 90 | 91 | (3) copy the data load invoke url from api gateway 92 | 93 | ![EC2](../assets/data-load.png) 94 | 95 | paste the invoke url into the webpage api url input box 96 | 97 | ![EC2](../assets/data-load-ui.png) 98 | -------------------------------------------------------------------------------- /lambda/search_layer/python/dateutil/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | This module offers general convenience and utility functions for dealing with 4 | datetimes. 5 | 6 | .. versionadded:: 2.7.0 7 | """ 8 | from __future__ import unicode_literals 9 | 10 | from datetime import datetime, time 11 | 12 | 13 | def today(tzinfo=None): 14 | """ 15 | Returns a :py:class:`datetime` representing the current day at midnight 16 | 17 | :param tzinfo: 18 | The time zone to attach (also used to determine the current day). 19 | 20 | :return: 21 | A :py:class:`datetime.datetime` object representing the current day 22 | at midnight. 23 | """ 24 | 25 | dt = datetime.now(tzinfo) 26 | return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) 27 | 28 | 29 | def default_tzinfo(dt, tzinfo): 30 | """ 31 | Sets the ``tzinfo`` parameter on naive datetimes only 32 | 33 | This is useful for example when you are provided a datetime that may have 34 | either an implicit or explicit time zone, such as when parsing a time zone 35 | string. 36 | 37 | .. doctest:: 38 | 39 | >>> from dateutil.tz import tzoffset 40 | >>> from dateutil.parser import parse 41 | >>> from dateutil.utils import default_tzinfo 42 | >>> dflt_tz = tzoffset("EST", -18000) 43 | >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz)) 44 | 2014-01-01 12:30:00+00:00 45 | >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz)) 46 | 2014-01-01 12:30:00-05:00 47 | 48 | :param dt: 49 | The datetime on which to replace the time zone 50 | 51 | :param tzinfo: 52 | The :py:class:`datetime.tzinfo` subclass instance to assign to 53 | ``dt`` if (and only if) it is naive. 54 | 55 | :return: 56 | Returns an aware :py:class:`datetime.datetime`. 57 | """ 58 | if dt.tzinfo is not None: 59 | return dt 60 | else: 61 | return dt.replace(tzinfo=tzinfo) 62 | 63 | 64 | def within_delta(dt1, dt2, delta): 65 | """ 66 | Useful for comparing two datetimes that may have a negligible difference 67 | to be considered equal. 68 | """ 69 | delta = abs(delta) 70 | difference = dt1 - dt2 71 | return -delta <= difference <= delta 72 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/client/plugins.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | 10 | import warnings 11 | from typing import Any 12 | 13 | from ..plugins.alerting import AlertingClient 14 | from ..plugins.index_management import IndexManagementClient 15 | from ..plugins.knn import KnnClient 16 | from .client import Client 17 | from .utils import NamespacedClient 18 | 19 | 20 | class PluginsClient(NamespacedClient): 21 | alerting: Any 22 | index_management: Any 23 | 24 | def __init__(self, client: Client) -> None: 25 | super(PluginsClient, self).__init__(client) 26 | self.knn = KnnClient(client) 27 | # self.query_workbench = QueryWorkbenchClient(client) 28 | # self.reporting = ReportingClient(client) 29 | # self.notebooks = NotebooksClient(client) 30 | self.alerting = AlertingClient(client) 31 | # self.anomaly_detection = AnomalyDetectionClient(client) 32 | # self.trace_analytics = TraceAnalyticsClient(client) 33 | self.index_management = IndexManagementClient(client) 34 | 35 | self._dynamic_lookup(client) 36 | 37 | def _dynamic_lookup(self, client: Any) -> None: 38 | # Issue : https://github.com/opensearch-project/opensearch-py/issues/90#issuecomment-1003396742 39 | 40 | plugins = [ 41 | # "query_workbench", 42 | # "reporting", 43 | # "notebooks", 44 | "alerting", 45 | # "anomaly_detection", 46 | # "trace_analytics", 47 | "index_management", 48 | ] 49 | for plugin in plugins: 50 | if not hasattr(client, plugin): 51 | setattr(client, plugin, getattr(self, plugin)) 52 | else: 53 | warnings.warn( 54 | f"Cannot load `{plugin}` directly to {self.client.__class__.__name__} as it already exists. Use `{self.client.__class__.__name__}.plugin.{plugin}` instead.", 55 | category=RuntimeWarning, 56 | stacklevel=2, 57 | ) 58 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer/legacy.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Optional, Union 2 | from warnings import warn 3 | 4 | from .api import from_bytes 5 | from .constant import CHARDET_CORRESPONDENCE 6 | 7 | 8 | def detect( 9 | byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any 10 | ) -> Dict[str, Optional[Union[str, float]]]: 11 | """ 12 | chardet legacy method 13 | Detect the encoding of the given byte string. It should be mostly backward-compatible. 14 | Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) 15 | This function is deprecated and should be used to migrate your project easily, consult the documentation for 16 | further information. Not planned for removal. 17 | 18 | :param byte_str: The byte sequence to examine. 19 | :param should_rename_legacy: Should we rename legacy encodings 20 | to their more modern equivalents? 21 | """ 22 | if len(kwargs): 23 | warn( 24 | f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" 25 | ) 26 | 27 | if not isinstance(byte_str, (bytearray, bytes)): 28 | raise TypeError( # pragma: nocover 29 | "Expected object of type bytes or bytearray, got: " 30 | "{0}".format(type(byte_str)) 31 | ) 32 | 33 | if isinstance(byte_str, bytearray): 34 | byte_str = bytes(byte_str) 35 | 36 | r = from_bytes(byte_str).best() 37 | 38 | encoding = r.encoding if r is not None else None 39 | language = r.language if r is not None and r.language != "Unknown" else "" 40 | confidence = 1.0 - r.chaos if r is not None else None 41 | 42 | # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process 43 | # but chardet does return 'utf-8-sig' and it is a valid codec name. 44 | if r is not None and encoding == "utf_8" and r.bom: 45 | encoding += "_sig" 46 | 47 | if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: 48 | encoding = CHARDET_CORRESPONDENCE[encoding] 49 | 50 | return { 51 | "encoding": encoding, 52 | "language": language, 53 | "confidence": confidence, 54 | } 55 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_deprecate.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import warnings 4 | 5 | from . import __version__ 6 | 7 | 8 | def deprecate( 9 | deprecated: str, 10 | when: int | None, 11 | replacement: str | None = None, 12 | *, 13 | action: str | None = None, 14 | plural: bool = False, 15 | ) -> None: 16 | """ 17 | Deprecations helper. 18 | 19 | :param deprecated: Name of thing to be deprecated. 20 | :param when: Pillow major version to be removed in. 21 | :param replacement: Name of replacement. 22 | :param action: Instead of "replacement", give a custom call to action 23 | e.g. "Upgrade to new thing". 24 | :param plural: if the deprecated thing is plural, needing "are" instead of "is". 25 | 26 | Usually of the form: 27 | 28 | "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd). 29 | Use [replacement] instead." 30 | 31 | You can leave out the replacement sentence: 32 | 33 | "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd)" 34 | 35 | Or with another call to action: 36 | 37 | "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd). 38 | [action]." 39 | """ 40 | 41 | is_ = "are" if plural else "is" 42 | 43 | if when is None: 44 | removed = "a future version" 45 | elif when <= int(__version__.split(".")[0]): 46 | msg = f"{deprecated} {is_} deprecated and should be removed." 47 | raise RuntimeError(msg) 48 | elif when == 11: 49 | removed = "Pillow 11 (2024-10-15)" 50 | elif when == 12: 51 | removed = "Pillow 12 (2025-10-15)" 52 | else: 53 | msg = f"Unknown removal version: {when}. Update {__name__}?" 54 | raise ValueError(msg) 55 | 56 | if replacement and action: 57 | msg = "Use only one of 'replacement' and 'action'" 58 | raise ValueError(msg) 59 | 60 | if replacement: 61 | action = f". Use {replacement} instead." 62 | elif action: 63 | action = f". {action.rstrip('.')}." 64 | else: 65 | action = "" 66 | 67 | warnings.warn( 68 | f"{deprecated} {is_} deprecated and will be removed in {removed}{action}", 69 | DeprecationWarning, 70 | stacklevel=3, 71 | ) 72 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/client/plugins.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | 10 | import warnings 11 | from typing import Any 12 | 13 | from ..plugins.alerting import AlertingClient 14 | from ..plugins.index_management import IndexManagementClient 15 | from ..plugins.knn import KnnClient 16 | from .client import Client 17 | from .utils import NamespacedClient 18 | 19 | 20 | class PluginsClient(NamespacedClient): 21 | alerting: Any 22 | index_management: Any 23 | 24 | def __init__(self, client: Client) -> None: 25 | super(PluginsClient, self).__init__(client) 26 | self.knn = KnnClient(client) 27 | # self.query_workbench = QueryWorkbenchClient(client) 28 | # self.reporting = ReportingClient(client) 29 | # self.notebooks = NotebooksClient(client) 30 | self.alerting = AlertingClient(client) 31 | # self.anomaly_detection = AnomalyDetectionClient(client) 32 | # self.trace_analytics = TraceAnalyticsClient(client) 33 | self.index_management = IndexManagementClient(client) 34 | 35 | self._dynamic_lookup(client) 36 | 37 | def _dynamic_lookup(self, client: Any) -> None: 38 | # Issue : https://github.com/opensearch-project/opensearch-py/issues/90#issuecomment-1003396742 39 | 40 | plugins = [ 41 | # "query_workbench", 42 | # "reporting", 43 | # "notebooks", 44 | "alerting", 45 | # "anomaly_detection", 46 | # "trace_analytics", 47 | "index_management", 48 | ] 49 | for plugin in plugins: 50 | if not hasattr(client, plugin): 51 | setattr(client, plugin, getattr(self, plugin)) 52 | else: 53 | warnings.warn( 54 | f"Cannot load `{plugin}` directly to {self.client.__class__.__name__} as it already exists. Use `{self.client.__class__.__name__}.plugin.{plugin}` instead.", 55 | category=RuntimeWarning, 56 | stacklevel=2, 57 | ) 58 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/DcxImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # DCX file handling 6 | # 7 | # DCX is a container file format defined by Intel, commonly used 8 | # for fax applications. Each DCX file consists of a directory 9 | # (a list of file offsets) followed by a set of (usually 1-bit) 10 | # PCX files. 11 | # 12 | # History: 13 | # 1995-09-09 fl Created 14 | # 1996-03-20 fl Properly derived from PcxImageFile. 15 | # 1998-07-15 fl Renamed offset attribute to avoid name clash 16 | # 2002-07-30 fl Fixed file handling 17 | # 18 | # Copyright (c) 1997-98 by Secret Labs AB. 19 | # Copyright (c) 1995-96 by Fredrik Lundh. 20 | # 21 | # See the README file for information on usage and redistribution. 22 | # 23 | from __future__ import annotations 24 | 25 | from . import Image 26 | from ._binary import i32le as i32 27 | from .PcxImagePlugin import PcxImageFile 28 | 29 | MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? 30 | 31 | 32 | def _accept(prefix): 33 | return len(prefix) >= 4 and i32(prefix) == MAGIC 34 | 35 | 36 | ## 37 | # Image plugin for the Intel DCX format. 38 | 39 | 40 | class DcxImageFile(PcxImageFile): 41 | format = "DCX" 42 | format_description = "Intel DCX" 43 | _close_exclusive_fp_after_loading = False 44 | 45 | def _open(self): 46 | # Header 47 | s = self.fp.read(4) 48 | if not _accept(s): 49 | msg = "not a DCX file" 50 | raise SyntaxError(msg) 51 | 52 | # Component directory 53 | self._offset = [] 54 | for i in range(1024): 55 | offset = i32(self.fp.read(4)) 56 | if not offset: 57 | break 58 | self._offset.append(offset) 59 | 60 | self._fp = self.fp 61 | self.frame = None 62 | self.n_frames = len(self._offset) 63 | self.is_animated = self.n_frames > 1 64 | self.seek(0) 65 | 66 | def seek(self, frame): 67 | if not self._seek_check(frame): 68 | return 69 | self.frame = frame 70 | self.fp = self._fp 71 | self.fp.seek(self._offset[frame]) 72 | PcxImageFile._open(self) 73 | 74 | def tell(self): 75 | return self.frame 76 | 77 | 78 | Image.register_open(DcxImageFile.format, DcxImageFile, _accept) 79 | 80 | Image.register_extension(DcxImageFile.format, ".dcx") 81 | -------------------------------------------------------------------------------- /lambda/bedrock_invoke/lambda_function.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import json 4 | 5 | import boto3 6 | import botocore 7 | 8 | from bedrockAdapter import BedrockAdapter 9 | 10 | boto3_bedrock = boto3.client( 11 | service_name="bedrock-runtime", region_name=os.environ.get("AWS_REGION") 12 | ) 13 | 14 | 15 | def lambda_handler(event, context): 16 | 17 | print("event:",event) 18 | print("boto3_bedrock:",boto3_bedrock) 19 | prompt_data = """hello""" 20 | 21 | prompt=prompt_data 22 | if "prompt" in event['queryStringParameters'].keys(): 23 | prompt = event['queryStringParameters']['prompt'] 24 | print('prompt:',prompt) 25 | 26 | max_tokens=512 27 | if "max_tokens" in event['queryStringParameters'].keys(): 28 | max_tokens = int(event['queryStringParameters']['max_tokens']) 29 | print('max_tokens:',max_tokens) 30 | 31 | modelId = 'anthropic.claude-v2' 32 | if "modelId" in event['queryStringParameters'].keys(): 33 | modelId = event['queryStringParameters']['modelId'] 34 | print('modelId:',modelId) 35 | 36 | temperature=0.01 37 | if "temperature" in event['queryStringParameters'].keys(): 38 | temperature = float(event['queryStringParameters']['temperature']) 39 | print('temperature:',temperature) 40 | 41 | provider = modelId.split(".")[0] 42 | params = {"max_tokens": max_tokens,"temperature": temperature} 43 | params["modelId"] = modelId 44 | input_body = BedrockAdapter.prepare_input(provider, prompt, params) 45 | body = json.dumps(input_body) 46 | 47 | accept = "application/json" 48 | if modelId == 'meta.llama2-13b-chat-v1': 49 | accept = "*/*" 50 | contentType = "application/json" 51 | 52 | result = boto3_bedrock.invoke_model( 53 | body=body, modelId=modelId, accept=accept, contentType=contentType 54 | ) 55 | 56 | answer = BedrockAdapter.prepare_output(provider,result) 57 | print('answer:',answer) 58 | 59 | response = { 60 | "statusCode": 200, 61 | "headers": { 62 | "Access-Control-Allow-Origin": '*' 63 | }, 64 | "isBase64Encoded": False 65 | } 66 | 67 | response['body'] = json.dumps( 68 | { 69 | 'answer':answer, 70 | }) 71 | 72 | 73 | return response 74 | 75 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/MpegImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # MPEG file handling 6 | # 7 | # History: 8 | # 95-09-09 fl Created 9 | # 10 | # Copyright (c) Secret Labs AB 1997. 11 | # Copyright (c) Fredrik Lundh 1995. 12 | # 13 | # See the README file for information on usage and redistribution. 14 | # 15 | from __future__ import annotations 16 | 17 | from . import Image, ImageFile 18 | from ._binary import i8 19 | from ._typing import SupportsRead 20 | 21 | # 22 | # Bitstream parser 23 | 24 | 25 | class BitStream: 26 | def __init__(self, fp: SupportsRead[bytes]) -> None: 27 | self.fp = fp 28 | self.bits = 0 29 | self.bitbuffer = 0 30 | 31 | def next(self) -> int: 32 | return i8(self.fp.read(1)) 33 | 34 | def peek(self, bits: int) -> int: 35 | while self.bits < bits: 36 | c = self.next() 37 | if c < 0: 38 | self.bits = 0 39 | continue 40 | self.bitbuffer = (self.bitbuffer << 8) + c 41 | self.bits += 8 42 | return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1 43 | 44 | def skip(self, bits: int) -> None: 45 | while self.bits < bits: 46 | self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1)) 47 | self.bits += 8 48 | self.bits = self.bits - bits 49 | 50 | def read(self, bits: int) -> int: 51 | v = self.peek(bits) 52 | self.bits = self.bits - bits 53 | return v 54 | 55 | 56 | ## 57 | # Image plugin for MPEG streams. This plugin can identify a stream, 58 | # but it cannot read it. 59 | 60 | 61 | class MpegImageFile(ImageFile.ImageFile): 62 | format = "MPEG" 63 | format_description = "MPEG" 64 | 65 | def _open(self) -> None: 66 | assert self.fp is not None 67 | 68 | s = BitStream(self.fp) 69 | if s.read(32) != 0x1B3: 70 | msg = "not an MPEG file" 71 | raise SyntaxError(msg) 72 | 73 | self._mode = "RGB" 74 | self._size = s.read(12), s.read(12) 75 | 76 | 77 | # -------------------------------------------------------------------- 78 | # Registry stuff 79 | 80 | Image.register_open(MpegImageFile.format, MpegImageFile) 81 | 82 | Image.register_extensions(MpegImageFile.format, [".mpg", ".mpeg"]) 83 | 84 | Image.register_mime(MpegImageFile.format, "video/mpeg") 85 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/__init__.py: -------------------------------------------------------------------------------- 1 | """Pillow (Fork of the Python Imaging Library) 2 | 3 | Pillow is the friendly PIL fork by Jeffrey A. Clark and contributors. 4 | https://github.com/python-pillow/Pillow/ 5 | 6 | Pillow is forked from PIL 1.1.7. 7 | 8 | PIL is the Python Imaging Library by Fredrik Lundh and contributors. 9 | Copyright (c) 1999 by Secret Labs AB. 10 | 11 | Use PIL.__version__ for this Pillow version. 12 | 13 | ;-) 14 | """ 15 | 16 | from __future__ import annotations 17 | 18 | from . import _version 19 | 20 | # VERSION was removed in Pillow 6.0.0. 21 | # PILLOW_VERSION was removed in Pillow 9.0.0. 22 | # Use __version__ instead. 23 | __version__ = _version.__version__ 24 | del _version 25 | 26 | 27 | _plugins = [ 28 | "BlpImagePlugin", 29 | "BmpImagePlugin", 30 | "BufrStubImagePlugin", 31 | "CurImagePlugin", 32 | "DcxImagePlugin", 33 | "DdsImagePlugin", 34 | "EpsImagePlugin", 35 | "FitsImagePlugin", 36 | "FliImagePlugin", 37 | "FpxImagePlugin", 38 | "FtexImagePlugin", 39 | "GbrImagePlugin", 40 | "GifImagePlugin", 41 | "GribStubImagePlugin", 42 | "Hdf5StubImagePlugin", 43 | "IcnsImagePlugin", 44 | "IcoImagePlugin", 45 | "ImImagePlugin", 46 | "ImtImagePlugin", 47 | "IptcImagePlugin", 48 | "JpegImagePlugin", 49 | "Jpeg2KImagePlugin", 50 | "McIdasImagePlugin", 51 | "MicImagePlugin", 52 | "MpegImagePlugin", 53 | "MpoImagePlugin", 54 | "MspImagePlugin", 55 | "PalmImagePlugin", 56 | "PcdImagePlugin", 57 | "PcxImagePlugin", 58 | "PdfImagePlugin", 59 | "PixarImagePlugin", 60 | "PngImagePlugin", 61 | "PpmImagePlugin", 62 | "PsdImagePlugin", 63 | "QoiImagePlugin", 64 | "SgiImagePlugin", 65 | "SpiderImagePlugin", 66 | "SunImagePlugin", 67 | "TgaImagePlugin", 68 | "TiffImagePlugin", 69 | "WebPImagePlugin", 70 | "WmfImagePlugin", 71 | "XbmImagePlugin", 72 | "XpmImagePlugin", 73 | "XVThumbImagePlugin", 74 | ] 75 | 76 | 77 | class UnidentifiedImageError(OSError): 78 | """ 79 | Raised in :py:meth:`PIL.Image.open` if an image cannot be opened and identified. 80 | 81 | If a PNG image raises this error, setting :data:`.ImageFile.LOAD_TRUNCATED_IMAGES` 82 | to true may allow the image to be opened after all. The setting will ignore missing 83 | data and checksum failures. 84 | """ 85 | 86 | pass 87 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/XVThumbImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # XV Thumbnail file handler by Charles E. "Gene" Cash 6 | # (gcash@magicnet.net) 7 | # 8 | # see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV, 9 | # available from ftp://ftp.cis.upenn.edu/pub/xv/ 10 | # 11 | # history: 12 | # 98-08-15 cec created (b/w only) 13 | # 98-12-09 cec added color palette 14 | # 98-12-28 fl added to PIL (with only a few very minor modifications) 15 | # 16 | # To do: 17 | # FIXME: make save work (this requires quantization support) 18 | # 19 | from __future__ import annotations 20 | 21 | from . import Image, ImageFile, ImagePalette 22 | from ._binary import o8 23 | 24 | _MAGIC = b"P7 332" 25 | 26 | # standard color palette for thumbnails (RGB332) 27 | PALETTE = b"" 28 | for r in range(8): 29 | for g in range(8): 30 | for b in range(4): 31 | PALETTE = PALETTE + ( 32 | o8((r * 255) // 7) + o8((g * 255) // 7) + o8((b * 255) // 3) 33 | ) 34 | 35 | 36 | def _accept(prefix: bytes) -> bool: 37 | return prefix[:6] == _MAGIC 38 | 39 | 40 | ## 41 | # Image plugin for XV thumbnail images. 42 | 43 | 44 | class XVThumbImageFile(ImageFile.ImageFile): 45 | format = "XVThumb" 46 | format_description = "XV thumbnail image" 47 | 48 | def _open(self) -> None: 49 | # check magic 50 | assert self.fp is not None 51 | 52 | if not _accept(self.fp.read(6)): 53 | msg = "not an XV thumbnail file" 54 | raise SyntaxError(msg) 55 | 56 | # Skip to beginning of next line 57 | self.fp.readline() 58 | 59 | # skip info comments 60 | while True: 61 | s = self.fp.readline() 62 | if not s: 63 | msg = "Unexpected EOF reading XV thumbnail file" 64 | raise SyntaxError(msg) 65 | if s[0] != 35: # ie. when not a comment: '#' 66 | break 67 | 68 | # parse header line (already read) 69 | s = s.strip().split() 70 | 71 | self._mode = "P" 72 | self._size = int(s[0]), int(s[1]) 73 | 74 | self.palette = ImagePalette.raw("RGB", PALETTE) 75 | 76 | self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1))] 77 | 78 | 79 | # -------------------------------------------------------------------- 80 | 81 | Image.register_open(XVThumbImageFile.format, XVThumbImageFile, _accept) 82 | -------------------------------------------------------------------------------- /lambda/search_layer/python/certifi-2024.2.2.dist-info/METADATA: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: certifi 3 | Version: 2024.2.2 4 | Summary: Python package for providing Mozilla's CA Bundle. 5 | Home-page: https://github.com/certifi/python-certifi 6 | Author: Kenneth Reitz 7 | Author-email: me@kennethreitz.com 8 | License: MPL-2.0 9 | Project-URL: Source, https://github.com/certifi/python-certifi 10 | Classifier: Development Status :: 5 - Production/Stable 11 | Classifier: Intended Audience :: Developers 12 | Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) 13 | Classifier: Natural Language :: English 14 | Classifier: Programming Language :: Python 15 | Classifier: Programming Language :: Python :: 3 16 | Classifier: Programming Language :: Python :: 3 :: Only 17 | Classifier: Programming Language :: Python :: 3.6 18 | Classifier: Programming Language :: Python :: 3.7 19 | Classifier: Programming Language :: Python :: 3.8 20 | Classifier: Programming Language :: Python :: 3.9 21 | Classifier: Programming Language :: Python :: 3.10 22 | Classifier: Programming Language :: Python :: 3.11 23 | Requires-Python: >=3.6 24 | License-File: LICENSE 25 | 26 | Certifi: Python SSL Certificates 27 | ================================ 28 | 29 | Certifi provides Mozilla's carefully curated collection of Root Certificates for 30 | validating the trustworthiness of SSL certificates while verifying the identity 31 | of TLS hosts. It has been extracted from the `Requests`_ project. 32 | 33 | Installation 34 | ------------ 35 | 36 | ``certifi`` is available on PyPI. Simply install it with ``pip``:: 37 | 38 | $ pip install certifi 39 | 40 | Usage 41 | ----- 42 | 43 | To reference the installed certificate authority (CA) bundle, you can use the 44 | built-in function:: 45 | 46 | >>> import certifi 47 | 48 | >>> certifi.where() 49 | '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' 50 | 51 | Or from the command line:: 52 | 53 | $ python -m certifi 54 | /usr/local/lib/python3.7/site-packages/certifi/cacert.pem 55 | 56 | Enjoy! 57 | 58 | .. _`Requests`: https://requests.readthedocs.io/en/master/ 59 | 60 | Addition/Removal of Certificates 61 | -------------------------------- 62 | 63 | Certifi does not support any addition/removal or other modification of the 64 | CA trust store content. This project is intended to provide a reliable and 65 | highly portable root of trust to python deployments. Look to upstream projects 66 | for methods to use alternate trust. 67 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/connection/pooling.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | from typing import Any 29 | 30 | from .base import Connection 31 | 32 | try: 33 | import queue 34 | except ImportError: 35 | import Queue as queue # type: ignore 36 | 37 | 38 | class PoolingConnection(Connection): 39 | _free_connections: queue.Queue[Connection] 40 | 41 | """ 42 | Base connection class for connections that use libraries without thread 43 | safety and no capacity for connection pooling. To use this just implement a 44 | ``_make_connection`` method that constructs a new connection and returns 45 | it. 46 | """ 47 | 48 | def __init__(self, *args: Any, **kwargs: Any) -> None: 49 | self._free_connections = queue.Queue() 50 | super(PoolingConnection, self).__init__(*args, **kwargs) 51 | 52 | def _make_connection(self) -> Connection: 53 | raise NotImplementedError 54 | 55 | def _get_connection(self) -> Connection: 56 | try: 57 | return self._free_connections.get_nowait() 58 | except queue.Empty: 59 | return self._make_connection() 60 | 61 | def _release_connection(self, con: Connection) -> None: 62 | self._free_connections.put(con) 63 | 64 | def close(self) -> None: 65 | """ 66 | Explicitly close connection 67 | """ 68 | pass 69 | -------------------------------------------------------------------------------- /deployment/Resource_deployment_README.md: -------------------------------------------------------------------------------- 1 | # Welcome to your CDK Python project! 2 | 3 | ### 0. Precondition 4 | 5 | Please make sure you have over 14 GB memory and Python 3 and npm installed on your environment. Linux or Mac OS preferred. 6 | 7 | If there's no npm, install via nvm: 8 | ``` 9 | curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash 10 | source /home/ec2-user/.bashrc 11 | ``` 12 | Note the v0.39.3 is just an example, download your preferred version.Then close and reopen terminal, then 13 | 14 | ``` 15 | nvm install v16.15.1 16 | ``` 17 | or 18 | ``` 19 | nvm install node 20 | ``` 21 | 22 | 23 | ### 1. Change directory to ./deployment folder 24 | 25 | ``` 26 | cd ./deployment 27 | ``` 28 | 29 | 30 | ### 2. Install AWS CDK 31 | 32 | ``` 33 | npm install -g aws-cdk 34 | ``` 35 | 36 | 37 | ### 3. Bootstrap the CDK to provision all the infrastructure needed for the CDK to make changes to your AWS account 38 | 39 | ``` 40 | sudo yum install python3-pip 41 | pip install -r requirements.txt 42 | ``` 43 | (precondition: you have installed pip via "sudo apt install python3-pip") 44 | 45 | 46 | ### 4. export your account configuration to the environment 47 | ``` 48 | export AWS_ACCOUNT_ID=XXXXXXXXXXXX 49 | export AWS_REGION=xx-xx-x 50 | export AWS_ACCESS_KEY_ID=XXXXXX 51 | export AWS_SECRET_ACCESS_KEY=XXXXXXX 52 | ``` 53 | ``` 54 | cdk bootstrap aws://[your-account-id]/[your-region] 55 | ``` 56 | you can install the required dependencies. 57 | 58 | 59 | ### 5. Below command will validate the environment and generate CloudFormation.json 60 | 61 | ``` 62 | cdk synth 63 | ``` 64 | If everything is good, then 65 | ``` 66 | cdk deploy --all --require-approval never 67 | ``` 68 | 69 | ### 6. The CDK deployment will provide CloudFormation stacks with relevant resouces like Lambda, API Gateway and SageMaker notebook etc. 70 | 71 | ### Clean Up 72 | When you don't need the environment and want to clean it up, run: 73 | 74 | ``` 75 | $ cdk destroy --all 76 | ``` 77 | Then resources which are not created by cdk, need to manually clean it up. Like SageMaker endpoints,endpoint configurations,models, pls go to AWS console SageMaker page and delete resources under the "Inference" part. 78 | 79 | ### Useful commands 80 | 81 | * `cdk ls` list all stacks in the app 82 | * `cdk synth` emits the synthesized CloudFormation template 83 | * `cdk deploy` deploy this stack to your default AWS account/region 84 | * `cdk diff` compare deployed stack with current state 85 | * `cdk docs` open CDK documentation 86 | -------------------------------------------------------------------------------- /deployment/lib/ss_search_notebook.py: -------------------------------------------------------------------------------- 1 | from constructs import Construct 2 | import os 3 | import aws_cdk as cdk 4 | from aws_cdk import ( 5 | Stack, 6 | CfnParameter, 7 | Aws, 8 | Duration, 9 | aws_secretsmanager, 10 | aws_sagemaker as _sagemaker, 11 | aws_iam as _iam 12 | ) 13 | import sagemaker 14 | import boto3 15 | import json 16 | from sagemaker.serializers import JSONSerializer 17 | from sagemaker.deserializers import JSONDeserializer 18 | 19 | 20 | region = os.getenv('AWS_REGION', '') 21 | 22 | class SearchNotebookStack(cdk.Stack): 23 | def __init__(self, scope: Construct, construct_id: str, **kwargs): 24 | super().__init__(scope, construct_id, **kwargs) 25 | 26 | # set role for sagemaker notebook 27 | self.notebook_job_role = _iam.Role( 28 | self,'SearchNotebookRole', 29 | assumed_by=_iam.ServicePrincipal('sagemaker.amazonaws.com'), 30 | description =' IAM role for notebook job', 31 | ) 32 | self.notebook_job_role.add_managed_policy(_iam.ManagedPolicy.from_aws_managed_policy_name('AmazonS3FullAccess')) 33 | self.notebook_job_role.add_managed_policy(_iam.ManagedPolicy.from_aws_managed_policy_name('AmazonSageMakerFullAccess')) 34 | self.notebook_job_role.add_managed_policy(_iam.ManagedPolicy.from_aws_managed_policy_name('IAMFullAccess')) 35 | 36 | print('Deploying SageMaker via Notebook...') 37 | self.createNotebookInstanceByCDK() 38 | 39 | def createNotebookInstanceByCDK(self): 40 | notebook_lifecycle = _sagemaker.CfnNotebookInstanceLifecycleConfig( 41 | self, f'Search-LifeCycleConfig', 42 | notebook_instance_lifecycle_config_name='ss-config', 43 | on_create=[_sagemaker.CfnNotebookInstanceLifecycleConfig.NotebookInstanceLifecycleHookProperty( 44 | content=cdk.Fn.base64(f""" 45 | #!/bin/bash 46 | cd home/ec2-user/SageMaker 47 | git clone -b main --single-branch https://github.com/aws-samples/intelligent-product-search-with-llm.git 48 | chmod -R 777 ./ 49 | 50 | """) 51 | )] 52 | ) 53 | cfn_notebook_instance = _sagemaker.CfnNotebookInstance(self,"SearchGuideNotebook", 54 | notebook_instance_name="SearchGuideNotebook", 55 | role_arn=self.notebook_job_role.role_arn, 56 | instance_type="ml.t3.xlarge", 57 | lifecycle_config_name='ss-config', 58 | volume_size_in_gb=100) 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/helpers/response/hit.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | from typing import Any 28 | 29 | from ..utils import AttrDict, HitMeta 30 | 31 | 32 | class Hit(AttrDict): 33 | def __init__(self, document: Any) -> None: 34 | data = {} 35 | if "_source" in document: 36 | data = document["_source"] 37 | if "fields" in document: 38 | data.update(document["fields"]) 39 | 40 | super(Hit, self).__init__(data) 41 | # assign meta as attribute and not as key in self._d_ 42 | super(AttrDict, self).__setattr__("meta", HitMeta(document)) 43 | 44 | def __getstate__(self) -> Any: 45 | # add self.meta since it is not in self.__dict__ 46 | return super(Hit, self).__getstate__() + (self.meta,) 47 | 48 | def __setstate__(self, state: Any) -> None: 49 | super(AttrDict, self).__setattr__("meta", state[-1]) 50 | super(Hit, self).__setstate__(state[:-1]) 51 | 52 | def __dir__(self) -> Any: 53 | # be sure to expose meta in dir(self) 54 | return super(Hit, self).__dir__() + ["meta"] 55 | 56 | def __repr__(self) -> str: 57 | return "".format( 58 | "/".join( 59 | getattr(self.meta, key) for key in ("index", "id") if key in self.meta 60 | ), 61 | super(Hit, self).__repr__(), 62 | ) 63 | 64 | 65 | __all__ = ["Hit", "HitMeta"] 66 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/ImageSequence.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # sequence support classes 6 | # 7 | # history: 8 | # 1997-02-20 fl Created 9 | # 10 | # Copyright (c) 1997 by Secret Labs AB. 11 | # Copyright (c) 1997 by Fredrik Lundh. 12 | # 13 | # See the README file for information on usage and redistribution. 14 | # 15 | 16 | ## 17 | from __future__ import annotations 18 | 19 | from typing import Callable 20 | 21 | from . import Image 22 | 23 | 24 | class Iterator: 25 | """ 26 | This class implements an iterator object that can be used to loop 27 | over an image sequence. 28 | 29 | You can use the ``[]`` operator to access elements by index. This operator 30 | will raise an :py:exc:`IndexError` if you try to access a nonexistent 31 | frame. 32 | 33 | :param im: An image object. 34 | """ 35 | 36 | def __init__(self, im: Image.Image): 37 | if not hasattr(im, "seek"): 38 | msg = "im must have seek method" 39 | raise AttributeError(msg) 40 | self.im = im 41 | self.position = getattr(self.im, "_min_frame", 0) 42 | 43 | def __getitem__(self, ix: int) -> Image.Image: 44 | try: 45 | self.im.seek(ix) 46 | return self.im 47 | except EOFError as e: 48 | msg = "end of sequence" 49 | raise IndexError(msg) from e 50 | 51 | def __iter__(self) -> Iterator: 52 | return self 53 | 54 | def __next__(self) -> Image.Image: 55 | try: 56 | self.im.seek(self.position) 57 | self.position += 1 58 | return self.im 59 | except EOFError as e: 60 | msg = "end of sequence" 61 | raise StopIteration(msg) from e 62 | 63 | 64 | def all_frames( 65 | im: Image.Image | list[Image.Image], 66 | func: Callable[[Image.Image], Image.Image] | None = None, 67 | ) -> list[Image.Image]: 68 | """ 69 | Applies a given function to all frames in an image or a list of images. 70 | The frames are returned as a list of separate images. 71 | 72 | :param im: An image, or a list of images. 73 | :param func: The function to apply to all of the image frames. 74 | :returns: A list of images. 75 | """ 76 | if not isinstance(im, list): 77 | im = [im] 78 | 79 | ims = [] 80 | for imSequence in im: 81 | current = imSequence.tell() 82 | 83 | ims += [im_frame.copy() for im_frame in Iterator(imSequence)] 84 | 85 | imSequence.seek(current) 86 | return [func(im) for im in ims] if func else ims 87 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/client/features.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | from typing import Any 29 | 30 | from .utils import NamespacedClient, query_params 31 | 32 | 33 | class FeaturesClient(NamespacedClient): 34 | @query_params("master_timeout", "cluster_manager_timeout") 35 | def get_features(self, params: Any = None, headers: Any = None) -> Any: 36 | """ 37 | Gets a list of features which can be included in snapshots using the 38 | feature_states field when creating a snapshot 39 | 40 | 41 | :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection 42 | to master node 43 | :arg cluster_manager_timeout: Explicit operation timeout for connection 44 | to cluster_manager node 45 | """ 46 | return self.transport.perform_request( 47 | "GET", "/_features", params=params, headers=headers 48 | ) 49 | 50 | @query_params() 51 | def reset_features(self, params: Any = None, headers: Any = None) -> Any: 52 | """ 53 | Resets the internal state of features, usually by deleting system indices 54 | 55 | 56 | .. warning:: 57 | 58 | This API is **experimental** so may include breaking changes 59 | or be removed in a future version 60 | """ 61 | return self.transport.perform_request( 62 | "POST", "/_features/_reset", params=params, headers=headers 63 | ) 64 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/_async/client/features.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | from typing import Any 29 | 30 | from .utils import NamespacedClient, query_params 31 | 32 | 33 | class FeaturesClient(NamespacedClient): 34 | @query_params("master_timeout", "cluster_manager_timeout") 35 | async def get_features(self, params: Any = None, headers: Any = None) -> Any: 36 | """ 37 | Gets a list of features which can be included in snapshots using the 38 | feature_states field when creating a snapshot 39 | 40 | 41 | :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection 42 | to master node 43 | :arg cluster_manager_timeout: Explicit operation timeout for connection 44 | to cluster_manager node 45 | """ 46 | return await self.transport.perform_request( 47 | "GET", "/_features", params=params, headers=headers 48 | ) 49 | 50 | @query_params() 51 | async def reset_features(self, params: Any = None, headers: Any = None) -> Any: 52 | """ 53 | Resets the internal state of features, usually by deleting system indices 54 | 55 | 56 | .. warning:: 57 | 58 | This API is **experimental** so may include breaking changes 59 | or be removed in a future version 60 | """ 61 | return await self.transport.perform_request( 62 | "POST", "/_features/_reset", params=params, headers=headers 63 | ) 64 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/compat.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | # 10 | # Licensed to Elasticsearch B.V. under one or more contributor 11 | # license agreements. See the NOTICE file distributed with 12 | # this work for additional information regarding copyright 13 | # ownership. Elasticsearch B.V. licenses this file to you under 14 | # the Apache License, Version 2.0 (the "License"); you may 15 | # not use this file except in compliance with the License. 16 | # You may obtain a copy of the License at 17 | # 18 | # http://www.apache.org/licenses/LICENSE-2.0 19 | # 20 | # Unless required by applicable law or agreed to in writing, 21 | # software distributed under the License is distributed on an 22 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 23 | # KIND, either express or implied. See the License for the 24 | # specific language governing permissions and limitations 25 | # under the License. 26 | 27 | 28 | from collections.abc import Mapping 29 | from queue import Queue 30 | from typing import Tuple, Type, Union 31 | from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse 32 | 33 | string_types = str, bytes 34 | map = map # pylint: disable=invalid-name 35 | 36 | 37 | def to_str(x: Union[str, bytes], encoding: str = "ascii") -> str: 38 | """ 39 | returns x as a string encoded in "encoding" if it is not already a string 40 | :param x: the value to convert to a str 41 | :param encoding: the encoding to convert to - see https://docs.python.org/3/library/codecs.html#standard-encodings 42 | :return: an encoded str 43 | """ 44 | if not isinstance(x, str): 45 | return x.decode(encoding) 46 | return x 47 | 48 | 49 | def to_bytes(x: Union[str, bytes], encoding: str = "ascii") -> bytes: 50 | if not isinstance(x, bytes): 51 | return x.encode(encoding) 52 | return x 53 | 54 | 55 | try: 56 | reraise_exceptions: Tuple[Type[BaseException], ...] = (RecursionError,) 57 | except NameError: 58 | reraise_exceptions = () 59 | 60 | try: 61 | import asyncio 62 | 63 | reraise_exceptions += (asyncio.CancelledError,) 64 | except (ImportError, AttributeError): 65 | pass 66 | 67 | 68 | __all__ = [ 69 | "string_types", 70 | "reraise_exceptions", 71 | "quote_plus", 72 | "quote", 73 | "urlencode", 74 | "unquote", 75 | "urlparse", 76 | "map", 77 | "Queue", 78 | "Mapping", 79 | ] 80 | -------------------------------------------------------------------------------- /lambda/search_layer/python/dateutil/zoneinfo/rebuild.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import tempfile 4 | import shutil 5 | import json 6 | from subprocess import check_call, check_output 7 | from tarfile import TarFile 8 | 9 | from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME 10 | 11 | 12 | def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): 13 | """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* 14 | 15 | filename is the timezone tarball from ``ftp.iana.org/tz``. 16 | 17 | """ 18 | tmpdir = tempfile.mkdtemp() 19 | zonedir = os.path.join(tmpdir, "zoneinfo") 20 | moduledir = os.path.dirname(__file__) 21 | try: 22 | with TarFile.open(filename) as tf: 23 | for name in zonegroups: 24 | tf.extract(name, tmpdir) 25 | filepaths = [os.path.join(tmpdir, n) for n in zonegroups] 26 | 27 | _run_zic(zonedir, filepaths) 28 | 29 | # write metadata file 30 | with open(os.path.join(zonedir, METADATA_FN), 'w') as f: 31 | json.dump(metadata, f, indent=4, sort_keys=True) 32 | target = os.path.join(moduledir, ZONEFILENAME) 33 | with TarFile.open(target, "w:%s" % format) as tf: 34 | for entry in os.listdir(zonedir): 35 | entrypath = os.path.join(zonedir, entry) 36 | tf.add(entrypath, entry) 37 | finally: 38 | shutil.rmtree(tmpdir) 39 | 40 | 41 | def _run_zic(zonedir, filepaths): 42 | """Calls the ``zic`` compiler in a compatible way to get a "fat" binary. 43 | 44 | Recent versions of ``zic`` default to ``-b slim``, while older versions 45 | don't even have the ``-b`` option (but default to "fat" binaries). The 46 | current version of dateutil does not support Version 2+ TZif files, which 47 | causes problems when used in conjunction with "slim" binaries, so this 48 | function is used to ensure that we always get a "fat" binary. 49 | """ 50 | 51 | try: 52 | help_text = check_output(["zic", "--help"]) 53 | except OSError as e: 54 | _print_on_nosuchfile(e) 55 | raise 56 | 57 | if b"-b " in help_text: 58 | bloat_args = ["-b", "fat"] 59 | else: 60 | bloat_args = [] 61 | 62 | check_call(["zic"] + bloat_args + ["-d", zonedir] + filepaths) 63 | 64 | 65 | def _print_on_nosuchfile(e): 66 | """Print helpful troubleshooting message 67 | 68 | e is an exception raised by subprocess.check_call() 69 | 70 | """ 71 | if e.errno == 2: 72 | logging.error( 73 | "Could not find zic. Perhaps you need to install " 74 | "libc-bin or some other package that provides it, " 75 | "or it's not in your PATH?") 76 | -------------------------------------------------------------------------------- /lambda/search_layer/python/urllib3/filepost.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import binascii 4 | import codecs 5 | import os 6 | from io import BytesIO 7 | 8 | from .fields import RequestField 9 | from .packages import six 10 | from .packages.six import b 11 | 12 | writer = codecs.lookup("utf-8")[3] 13 | 14 | 15 | def choose_boundary(): 16 | """ 17 | Our embarrassingly-simple replacement for mimetools.choose_boundary. 18 | """ 19 | boundary = binascii.hexlify(os.urandom(16)) 20 | if not six.PY2: 21 | boundary = boundary.decode("ascii") 22 | return boundary 23 | 24 | 25 | def iter_field_objects(fields): 26 | """ 27 | Iterate over fields. 28 | 29 | Supports list of (k, v) tuples and dicts, and lists of 30 | :class:`~urllib3.fields.RequestField`. 31 | 32 | """ 33 | if isinstance(fields, dict): 34 | i = six.iteritems(fields) 35 | else: 36 | i = iter(fields) 37 | 38 | for field in i: 39 | if isinstance(field, RequestField): 40 | yield field 41 | else: 42 | yield RequestField.from_tuples(*field) 43 | 44 | 45 | def iter_fields(fields): 46 | """ 47 | .. deprecated:: 1.6 48 | 49 | Iterate over fields. 50 | 51 | The addition of :class:`~urllib3.fields.RequestField` makes this function 52 | obsolete. Instead, use :func:`iter_field_objects`, which returns 53 | :class:`~urllib3.fields.RequestField` objects. 54 | 55 | Supports list of (k, v) tuples and dicts. 56 | """ 57 | if isinstance(fields, dict): 58 | return ((k, v) for k, v in six.iteritems(fields)) 59 | 60 | return ((k, v) for k, v in fields) 61 | 62 | 63 | def encode_multipart_formdata(fields, boundary=None): 64 | """ 65 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format. 66 | 67 | :param fields: 68 | Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). 69 | 70 | :param boundary: 71 | If not specified, then a random boundary will be generated using 72 | :func:`urllib3.filepost.choose_boundary`. 73 | """ 74 | body = BytesIO() 75 | if boundary is None: 76 | boundary = choose_boundary() 77 | 78 | for field in iter_field_objects(fields): 79 | body.write(b("--%s\r\n" % (boundary))) 80 | 81 | writer(body).write(field.render_headers()) 82 | data = field.data 83 | 84 | if isinstance(data, int): 85 | data = str(data) # Backwards compatibility 86 | 87 | if isinstance(data, six.text_type): 88 | writer(body).write(data) 89 | else: 90 | body.write(data) 91 | 92 | body.write(b"\r\n") 93 | 94 | body.write(b("--%s--\r\n" % (boundary))) 95 | 96 | content_type = str("multipart/form-data; boundary=%s" % boundary) 97 | 98 | return body.getvalue(), content_type 99 | -------------------------------------------------------------------------------- /lambda/search_layer/python/charset_normalizer-3.3.2.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | ../../bin/normalizer,sha256=O1tLXvRzeuQHDVSDjsuiUko8eeXdZtA_eGTgJcdT5qs,233 2 | charset_normalizer-3.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 3 | charset_normalizer-3.3.2.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 4 | charset_normalizer-3.3.2.dist-info/METADATA,sha256=cfLhl5A6SI-F0oclm8w8ux9wshL1nipdeCdVnYb4AaA,33550 5 | charset_normalizer-3.3.2.dist-info/RECORD,, 6 | charset_normalizer-3.3.2.dist-info/WHEEL,sha256=CzyHWKXay4N1oFds0wxFofK9MEs-L6SZ6gHGZF6-4Co,148 7 | charset_normalizer-3.3.2.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65 8 | charset_normalizer-3.3.2.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 9 | charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577 10 | charset_normalizer/__main__.py,sha256=JxY8bleaENOFlLRb9HfoeZCzAMnn2A1oGR5Xm2eyqg0,73 11 | charset_normalizer/__pycache__/__init__.cpython-38.pyc,, 12 | charset_normalizer/__pycache__/__main__.cpython-38.pyc,, 13 | charset_normalizer/__pycache__/api.cpython-38.pyc,, 14 | charset_normalizer/__pycache__/cd.cpython-38.pyc,, 15 | charset_normalizer/__pycache__/constant.cpython-38.pyc,, 16 | charset_normalizer/__pycache__/legacy.cpython-38.pyc,, 17 | charset_normalizer/__pycache__/md.cpython-38.pyc,, 18 | charset_normalizer/__pycache__/models.cpython-38.pyc,, 19 | charset_normalizer/__pycache__/utils.cpython-38.pyc,, 20 | charset_normalizer/__pycache__/version.cpython-38.pyc,, 21 | charset_normalizer/api.py,sha256=WOlWjy6wT8SeMYFpaGbXZFN1TMXa-s8vZYfkL4G29iQ,21097 22 | charset_normalizer/cd.py,sha256=xwZliZcTQFA3jU0c00PRiu9MNxXTFxQkFLWmMW24ZzI,12560 23 | charset_normalizer/cli/__init__.py,sha256=D5ERp8P62llm2FuoMzydZ7d9rs8cvvLXqE-1_6oViPc,100 24 | charset_normalizer/cli/__main__.py,sha256=2F-xURZJzo063Ye-2RLJ2wcmURpbKeAzKwpiws65dAs,9744 25 | charset_normalizer/cli/__pycache__/__init__.cpython-38.pyc,, 26 | charset_normalizer/cli/__pycache__/__main__.cpython-38.pyc,, 27 | charset_normalizer/constant.py,sha256=p0IsOVcEbPWYPOdWhnhRbjK1YVBy6fs05C5vKC-zoxU,40481 28 | charset_normalizer/legacy.py,sha256=T-QuVMsMeDiQEk8WSszMrzVJg_14AMeSkmHdRYhdl1k,2071 29 | charset_normalizer/md.cpython-38-x86_64-linux-gnu.so,sha256=Y7QSLD5QLoSFAWys0-tL7R6QB7oi5864zM6zr7RWek4,16064 30 | charset_normalizer/md.py,sha256=NkSuVLK13_a8c7BxZ4cGIQ5vOtGIWOdh22WZEvjp-7U,19624 31 | charset_normalizer/md__mypyc.cpython-38-x86_64-linux-gnu.so,sha256=d8E0dOlnRXncnIFlWCUfjlbyQODPyHD44RJzcK1E5AU,268752 32 | charset_normalizer/models.py,sha256=I5i0s4aKCCgLPY2tUY3pwkgFA-BUbbNxQ7hVkVTt62s,11624 33 | charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 34 | charset_normalizer/utils.py,sha256=teiosMqzKjXyAHXnGdjSBOgnBZwx-SkBbCLrx0UXy8M,11894 35 | charset_normalizer/version.py,sha256=iHKUfHD3kDRSyrh_BN2ojh43TA5-UZQjvbVIEFfpHDs,79 36 | -------------------------------------------------------------------------------- /lambda/search_layer/python/requests-2.31.0.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | requests-2.31.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 2 | requests-2.31.0.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142 3 | requests-2.31.0.dist-info/METADATA,sha256=eCPokOnbb0FROLrfl0R5EpDvdufsb9CaN4noJH__54I,4634 4 | requests-2.31.0.dist-info/RECORD,, 5 | requests-2.31.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 6 | requests-2.31.0.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9 7 | requests/__init__.py,sha256=LvmKhjIz8mHaKXthC2Mv5ykZ1d92voyf3oJpd-VuAig,4963 8 | requests/__pycache__/__init__.cpython-38.pyc,, 9 | requests/__pycache__/__version__.cpython-38.pyc,, 10 | requests/__pycache__/_internal_utils.cpython-38.pyc,, 11 | requests/__pycache__/adapters.cpython-38.pyc,, 12 | requests/__pycache__/api.cpython-38.pyc,, 13 | requests/__pycache__/auth.cpython-38.pyc,, 14 | requests/__pycache__/certs.cpython-38.pyc,, 15 | requests/__pycache__/compat.cpython-38.pyc,, 16 | requests/__pycache__/cookies.cpython-38.pyc,, 17 | requests/__pycache__/exceptions.cpython-38.pyc,, 18 | requests/__pycache__/help.cpython-38.pyc,, 19 | requests/__pycache__/hooks.cpython-38.pyc,, 20 | requests/__pycache__/models.cpython-38.pyc,, 21 | requests/__pycache__/packages.cpython-38.pyc,, 22 | requests/__pycache__/sessions.cpython-38.pyc,, 23 | requests/__pycache__/status_codes.cpython-38.pyc,, 24 | requests/__pycache__/structures.cpython-38.pyc,, 25 | requests/__pycache__/utils.cpython-38.pyc,, 26 | requests/__version__.py,sha256=ssI3Ezt7PaxgkOW45GhtwPUclo_SO_ygtIm4A74IOfw,435 27 | requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495 28 | requests/adapters.py,sha256=v_FmjU5KZ76k-YttShZYB5RprIzhhL8Y3zgW9p4eBQ8,19553 29 | requests/api.py,sha256=q61xcXq4tmiImrvcSVLTbFyCiD2F-L_-hWKGbz4y8vg,6449 30 | requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187 31 | requests/certs.py,sha256=Z9Sb410Anv6jUFTyss0jFFhU6xst8ctELqfy8Ev23gw,429 32 | requests/compat.py,sha256=yxntVOSEHGMrn7FNr_32EEam1ZNAdPRdSE13_yaHzTk,1451 33 | requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560 34 | requests/exceptions.py,sha256=DhveFBclVjTRxhRduVpO-GbMYMID2gmjdLfNEqNpI_U,3811 35 | requests/help.py,sha256=gPX5d_H7Xd88aDABejhqGgl9B1VFRTt5BmiYvL3PzIQ,3875 36 | requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733 37 | requests/models.py,sha256=-DlKi0or8gFAM6VzutobXvvBW_2wrJuOF5NfndTIddA,35223 38 | requests/packages.py,sha256=DXgv-FJIczZITmv0vEBAhWj4W-5CGCIN_ksvgR17Dvs,957 39 | requests/sessions.py,sha256=-LvTzrPtetSTrR3buxu4XhdgMrJFLB1q5D7P--L2Xhw,30373 40 | requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235 41 | requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912 42 | requests/utils.py,sha256=6sx2X3cIVA8BgWOg8odxFy-_lbWDFETU8HI4fU4Rmqw,33448 43 | -------------------------------------------------------------------------------- /lambda/search_layer/python/dateutil/tz/_factories.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | import weakref 3 | from collections import OrderedDict 4 | 5 | from six.moves import _thread 6 | 7 | 8 | class _TzSingleton(type): 9 | def __init__(cls, *args, **kwargs): 10 | cls.__instance = None 11 | super(_TzSingleton, cls).__init__(*args, **kwargs) 12 | 13 | def __call__(cls): 14 | if cls.__instance is None: 15 | cls.__instance = super(_TzSingleton, cls).__call__() 16 | return cls.__instance 17 | 18 | 19 | class _TzFactory(type): 20 | def instance(cls, *args, **kwargs): 21 | """Alternate constructor that returns a fresh instance""" 22 | return type.__call__(cls, *args, **kwargs) 23 | 24 | 25 | class _TzOffsetFactory(_TzFactory): 26 | def __init__(cls, *args, **kwargs): 27 | cls.__instances = weakref.WeakValueDictionary() 28 | cls.__strong_cache = OrderedDict() 29 | cls.__strong_cache_size = 8 30 | 31 | cls._cache_lock = _thread.allocate_lock() 32 | 33 | def __call__(cls, name, offset): 34 | if isinstance(offset, timedelta): 35 | key = (name, offset.total_seconds()) 36 | else: 37 | key = (name, offset) 38 | 39 | instance = cls.__instances.get(key, None) 40 | if instance is None: 41 | instance = cls.__instances.setdefault(key, 42 | cls.instance(name, offset)) 43 | 44 | # This lock may not be necessary in Python 3. See GH issue #901 45 | with cls._cache_lock: 46 | cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) 47 | 48 | # Remove an item if the strong cache is overpopulated 49 | if len(cls.__strong_cache) > cls.__strong_cache_size: 50 | cls.__strong_cache.popitem(last=False) 51 | 52 | return instance 53 | 54 | 55 | class _TzStrFactory(_TzFactory): 56 | def __init__(cls, *args, **kwargs): 57 | cls.__instances = weakref.WeakValueDictionary() 58 | cls.__strong_cache = OrderedDict() 59 | cls.__strong_cache_size = 8 60 | 61 | cls.__cache_lock = _thread.allocate_lock() 62 | 63 | def __call__(cls, s, posix_offset=False): 64 | key = (s, posix_offset) 65 | instance = cls.__instances.get(key, None) 66 | 67 | if instance is None: 68 | instance = cls.__instances.setdefault(key, 69 | cls.instance(s, posix_offset)) 70 | 71 | # This lock may not be necessary in Python 3. See GH issue #901 72 | with cls.__cache_lock: 73 | cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) 74 | 75 | # Remove an item if the strong cache is overpopulated 76 | if len(cls.__strong_cache) > cls.__strong_cache_size: 77 | cls.__strong_cache.popitem(last=False) 78 | 79 | return instance 80 | 81 | -------------------------------------------------------------------------------- /lambda/search_layer/python/dateutil/easter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | This module offers a generic Easter computing method for any given year, using 4 | Western, Orthodox or Julian algorithms. 5 | """ 6 | 7 | import datetime 8 | 9 | __all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] 10 | 11 | EASTER_JULIAN = 1 12 | EASTER_ORTHODOX = 2 13 | EASTER_WESTERN = 3 14 | 15 | 16 | def easter(year, method=EASTER_WESTERN): 17 | """ 18 | This method was ported from the work done by GM Arts, 19 | on top of the algorithm by Claus Tondering, which was 20 | based in part on the algorithm of Ouding (1940), as 21 | quoted in "Explanatory Supplement to the Astronomical 22 | Almanac", P. Kenneth Seidelmann, editor. 23 | 24 | This algorithm implements three different Easter 25 | calculation methods: 26 | 27 | 1. Original calculation in Julian calendar, valid in 28 | dates after 326 AD 29 | 2. Original method, with date converted to Gregorian 30 | calendar, valid in years 1583 to 4099 31 | 3. Revised method, in Gregorian calendar, valid in 32 | years 1583 to 4099 as well 33 | 34 | These methods are represented by the constants: 35 | 36 | * ``EASTER_JULIAN = 1`` 37 | * ``EASTER_ORTHODOX = 2`` 38 | * ``EASTER_WESTERN = 3`` 39 | 40 | The default method is method 3. 41 | 42 | More about the algorithm may be found at: 43 | 44 | `GM Arts: Easter Algorithms `_ 45 | 46 | and 47 | 48 | `The Calendar FAQ: Easter `_ 49 | 50 | """ 51 | 52 | if not (1 <= method <= 3): 53 | raise ValueError("invalid method") 54 | 55 | # g - Golden year - 1 56 | # c - Century 57 | # h - (23 - Epact) mod 30 58 | # i - Number of days from March 21 to Paschal Full Moon 59 | # j - Weekday for PFM (0=Sunday, etc) 60 | # p - Number of days from March 21 to Sunday on or before PFM 61 | # (-6 to 28 methods 1 & 3, to 56 for method 2) 62 | # e - Extra days to add for method 2 (converting Julian 63 | # date to Gregorian date) 64 | 65 | y = year 66 | g = y % 19 67 | e = 0 68 | if method < 3: 69 | # Old method 70 | i = (19*g + 15) % 30 71 | j = (y + y//4 + i) % 7 72 | if method == 2: 73 | # Extra dates to convert Julian to Gregorian date 74 | e = 10 75 | if y > 1600: 76 | e = e + y//100 - 16 - (y//100 - 16)//4 77 | else: 78 | # New method 79 | c = y//100 80 | h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 81 | i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) 82 | j = (y + y//4 + i + 2 - c + c//4) % 7 83 | 84 | # p can be from -6 to 56 corresponding to dates 22 March to 23 May 85 | # (later dates apply to method 2, although 23 May never actually occurs) 86 | p = i - j + e 87 | d = 1 + (p + 27 + (p + 6)//40) % 31 88 | m = 3 + (p + 26)//30 89 | return datetime.date(int(y), int(m), int(d)) 90 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/XbmImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # XBM File handling 6 | # 7 | # History: 8 | # 1995-09-08 fl Created 9 | # 1996-11-01 fl Added save support 10 | # 1997-07-07 fl Made header parser more tolerant 11 | # 1997-07-22 fl Fixed yet another parser bug 12 | # 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) 13 | # 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog) 14 | # 2004-02-24 fl Allow some whitespace before first #define 15 | # 16 | # Copyright (c) 1997-2004 by Secret Labs AB 17 | # Copyright (c) 1996-1997 by Fredrik Lundh 18 | # 19 | # See the README file for information on usage and redistribution. 20 | # 21 | from __future__ import annotations 22 | 23 | import re 24 | from typing import IO 25 | 26 | from . import Image, ImageFile 27 | 28 | # XBM header 29 | xbm_head = re.compile( 30 | rb"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+" 31 | b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+" 32 | b"(?P" 33 | b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+" 34 | b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+" 35 | b")?" 36 | rb"[\000-\377]*_bits\[]" 37 | ) 38 | 39 | 40 | def _accept(prefix: bytes) -> bool: 41 | return prefix.lstrip()[:7] == b"#define" 42 | 43 | 44 | ## 45 | # Image plugin for X11 bitmaps. 46 | 47 | 48 | class XbmImageFile(ImageFile.ImageFile): 49 | format = "XBM" 50 | format_description = "X11 Bitmap" 51 | 52 | def _open(self) -> None: 53 | assert self.fp is not None 54 | 55 | m = xbm_head.match(self.fp.read(512)) 56 | 57 | if not m: 58 | msg = "not a XBM file" 59 | raise SyntaxError(msg) 60 | 61 | xsize = int(m.group("width")) 62 | ysize = int(m.group("height")) 63 | 64 | if m.group("hotspot"): 65 | self.info["hotspot"] = (int(m.group("xhot")), int(m.group("yhot"))) 66 | 67 | self._mode = "1" 68 | self._size = xsize, ysize 69 | 70 | self.tile = [("xbm", (0, 0) + self.size, m.end(), None)] 71 | 72 | 73 | def _save(im: Image.Image, fp: IO[bytes], filename: str) -> None: 74 | if im.mode != "1": 75 | msg = f"cannot write mode {im.mode} as XBM" 76 | raise OSError(msg) 77 | 78 | fp.write(f"#define im_width {im.size[0]}\n".encode("ascii")) 79 | fp.write(f"#define im_height {im.size[1]}\n".encode("ascii")) 80 | 81 | hotspot = im.encoderinfo.get("hotspot") 82 | if hotspot: 83 | fp.write(f"#define im_x_hot {hotspot[0]}\n".encode("ascii")) 84 | fp.write(f"#define im_y_hot {hotspot[1]}\n".encode("ascii")) 85 | 86 | fp.write(b"static char im_bits[] = {\n") 87 | 88 | ImageFile._save(im, fp, [("xbm", (0, 0) + im.size, 0, None)]) 89 | 90 | fp.write(b"};\n") 91 | 92 | 93 | Image.register_open(XbmImageFile.format, XbmImageFile, _accept) 94 | Image.register_save(XbmImageFile.format, _save) 95 | 96 | Image.register_extension(XbmImageFile.format, ".xbm") 97 | 98 | Image.register_mime(XbmImageFile.format, "image/xbm") 99 | -------------------------------------------------------------------------------- /lambda/search_layer/python/python_dateutil-2.9.0.post0.dist-info/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2017- Paul Ganssle 2 | Copyright 2017- dateutil contributors (see AUTHORS file) 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | 16 | The above license applies to all contributions after 2017-12-01, as well as 17 | all contributions that have been re-licensed (see AUTHORS file for the list of 18 | contributors who have re-licensed their code). 19 | -------------------------------------------------------------------------------- 20 | dateutil - Extensions to the standard Python datetime module. 21 | 22 | Copyright (c) 2003-2011 - Gustavo Niemeyer 23 | Copyright (c) 2012-2014 - Tomi Pieviläinen 24 | Copyright (c) 2014-2016 - Yaron de Leeuw 25 | Copyright (c) 2015- - Paul Ganssle 26 | Copyright (c) 2015- - dateutil contributors (see AUTHORS file) 27 | 28 | All rights reserved. 29 | 30 | Redistribution and use in source and binary forms, with or without 31 | modification, are permitted provided that the following conditions are met: 32 | 33 | * Redistributions of source code must retain the above copyright notice, 34 | this list of conditions and the following disclaimer. 35 | * Redistributions in binary form must reproduce the above copyright notice, 36 | this list of conditions and the following disclaimer in the documentation 37 | and/or other materials provided with the distribution. 38 | * Neither the name of the copyright holder nor the names of its 39 | contributors may be used to endorse or promote products derived from 40 | this software without specific prior written permission. 41 | 42 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 43 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 44 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 45 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR 46 | CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 47 | EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 48 | PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 49 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 50 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 51 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 52 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 53 | 54 | The above BSD License Applies to all code, even that also covered by Apache 2.0. -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/_binary.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # Binary input/output support routines. 6 | # 7 | # Copyright (c) 1997-2003 by Secret Labs AB 8 | # Copyright (c) 1995-2003 by Fredrik Lundh 9 | # Copyright (c) 2012 by Brian Crowell 10 | # 11 | # See the README file for information on usage and redistribution. 12 | # 13 | 14 | 15 | """Binary input/output support routines.""" 16 | from __future__ import annotations 17 | 18 | from struct import pack, unpack_from 19 | 20 | 21 | def i8(c: bytes) -> int: 22 | return c[0] 23 | 24 | 25 | def o8(i: int) -> bytes: 26 | return bytes((i & 255,)) 27 | 28 | 29 | # Input, le = little endian, be = big endian 30 | def i16le(c: bytes, o: int = 0) -> int: 31 | """ 32 | Converts a 2-bytes (16 bits) string to an unsigned integer. 33 | 34 | :param c: string containing bytes to convert 35 | :param o: offset of bytes to convert in string 36 | """ 37 | return unpack_from(" int: 41 | """ 42 | Converts a 2-bytes (16 bits) string to a signed integer. 43 | 44 | :param c: string containing bytes to convert 45 | :param o: offset of bytes to convert in string 46 | """ 47 | return unpack_from(" int: 51 | """ 52 | Converts a 2-bytes (16 bits) string to a signed integer, big endian. 53 | 54 | :param c: string containing bytes to convert 55 | :param o: offset of bytes to convert in string 56 | """ 57 | return unpack_from(">h", c, o)[0] 58 | 59 | 60 | def i32le(c: bytes, o: int = 0) -> int: 61 | """ 62 | Converts a 4-bytes (32 bits) string to an unsigned integer. 63 | 64 | :param c: string containing bytes to convert 65 | :param o: offset of bytes to convert in string 66 | """ 67 | return unpack_from(" int: 71 | """ 72 | Converts a 4-bytes (32 bits) string to a signed integer. 73 | 74 | :param c: string containing bytes to convert 75 | :param o: offset of bytes to convert in string 76 | """ 77 | return unpack_from(" int: 81 | """ 82 | Converts a 4-bytes (32 bits) string to a signed integer, big endian. 83 | 84 | :param c: string containing bytes to convert 85 | :param o: offset of bytes to convert in string 86 | """ 87 | return unpack_from(">i", c, o)[0] 88 | 89 | 90 | def i16be(c: bytes, o: int = 0) -> int: 91 | return unpack_from(">H", c, o)[0] 92 | 93 | 94 | def i32be(c: bytes, o: int = 0) -> int: 95 | return unpack_from(">I", c, o)[0] 96 | 97 | 98 | # Output, le = little endian, be = big endian 99 | def o16le(i: int) -> bytes: 100 | return pack(" bytes: 104 | return pack(" bytes: 108 | return pack(">H", i) 109 | 110 | 111 | def o32be(i: int) -> bytes: 112 | return pack(">I", i) 113 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/ImageMode.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # standard mode descriptors 6 | # 7 | # History: 8 | # 2006-03-20 fl Added 9 | # 10 | # Copyright (c) 2006 by Secret Labs AB. 11 | # Copyright (c) 2006 by Fredrik Lundh. 12 | # 13 | # See the README file for information on usage and redistribution. 14 | # 15 | from __future__ import annotations 16 | 17 | import sys 18 | from functools import lru_cache 19 | 20 | 21 | class ModeDescriptor: 22 | """Wrapper for mode strings.""" 23 | 24 | def __init__( 25 | self, 26 | mode: str, 27 | bands: tuple[str, ...], 28 | basemode: str, 29 | basetype: str, 30 | typestr: str, 31 | ) -> None: 32 | self.mode = mode 33 | self.bands = bands 34 | self.basemode = basemode 35 | self.basetype = basetype 36 | self.typestr = typestr 37 | 38 | def __str__(self) -> str: 39 | return self.mode 40 | 41 | 42 | @lru_cache 43 | def getmode(mode: str) -> ModeDescriptor: 44 | """Gets a mode descriptor for the given mode.""" 45 | # initialize mode cache 46 | endian = "<" if sys.byteorder == "little" else ">" 47 | 48 | modes = { 49 | # core modes 50 | # Bits need to be extended to bytes 51 | "1": ("L", "L", ("1",), "|b1"), 52 | "L": ("L", "L", ("L",), "|u1"), 53 | "I": ("L", "I", ("I",), endian + "i4"), 54 | "F": ("L", "F", ("F",), endian + "f4"), 55 | "P": ("P", "L", ("P",), "|u1"), 56 | "RGB": ("RGB", "L", ("R", "G", "B"), "|u1"), 57 | "RGBX": ("RGB", "L", ("R", "G", "B", "X"), "|u1"), 58 | "RGBA": ("RGB", "L", ("R", "G", "B", "A"), "|u1"), 59 | "CMYK": ("RGB", "L", ("C", "M", "Y", "K"), "|u1"), 60 | "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr"), "|u1"), 61 | # UNDONE - unsigned |u1i1i1 62 | "LAB": ("RGB", "L", ("L", "A", "B"), "|u1"), 63 | "HSV": ("RGB", "L", ("H", "S", "V"), "|u1"), 64 | # extra experimental modes 65 | "RGBa": ("RGB", "L", ("R", "G", "B", "a"), "|u1"), 66 | "BGR;15": ("RGB", "L", ("B", "G", "R"), "|u1"), 67 | "BGR;16": ("RGB", "L", ("B", "G", "R"), "|u1"), 68 | "BGR;24": ("RGB", "L", ("B", "G", "R"), "|u1"), 69 | "LA": ("L", "L", ("L", "A"), "|u1"), 70 | "La": ("L", "L", ("L", "a"), "|u1"), 71 | "PA": ("RGB", "L", ("P", "A"), "|u1"), 72 | } 73 | if mode in modes: 74 | base_mode, base_type, bands, type_str = modes[mode] 75 | return ModeDescriptor(mode, bands, base_mode, base_type, type_str) 76 | 77 | mapping_modes = { 78 | # I;16 == I;16L, and I;32 == I;32L 79 | "I;16": "u2", 84 | "I;16BS": ">i2", 85 | "I;16N": endian + "u2", 86 | "I;16NS": endian + "i2", 87 | "I;32": "u4", 89 | "I;32L": "i4", 92 | "I;32LS": " 1 69 | 70 | self.__fp = self.fp 71 | self.seek(0) 72 | 73 | def seek(self, frame): 74 | if not self._seek_check(frame): 75 | return 76 | try: 77 | filename = self.images[frame] 78 | except IndexError as e: 79 | msg = "no such frame" 80 | raise EOFError(msg) from e 81 | 82 | self.fp = self.ole.openstream(filename) 83 | 84 | TiffImagePlugin.TiffImageFile._open(self) 85 | 86 | self.frame = frame 87 | 88 | def tell(self): 89 | return self.frame 90 | 91 | def close(self): 92 | self.__fp.close() 93 | self.ole.close() 94 | super().close() 95 | 96 | def __exit__(self, *args): 97 | self.__fp.close() 98 | self.ole.close() 99 | super().__exit__() 100 | 101 | 102 | # 103 | # -------------------------------------------------------------------- 104 | 105 | Image.register_open(MicImageFile.format, MicImageFile, _accept) 106 | 107 | Image.register_extension(MicImageFile.format, ".mic") 108 | -------------------------------------------------------------------------------- /lambda/search_layer/python/PIL/ImtImagePlugin.py: -------------------------------------------------------------------------------- 1 | # 2 | # The Python Imaging Library. 3 | # $Id$ 4 | # 5 | # IM Tools support for PIL 6 | # 7 | # history: 8 | # 1996-05-27 fl Created (read 8-bit images only) 9 | # 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2) 10 | # 11 | # Copyright (c) Secret Labs AB 1997-2001. 12 | # Copyright (c) Fredrik Lundh 1996-2001. 13 | # 14 | # See the README file for information on usage and redistribution. 15 | # 16 | from __future__ import annotations 17 | 18 | import re 19 | 20 | from . import Image, ImageFile 21 | 22 | # 23 | # -------------------------------------------------------------------- 24 | 25 | field = re.compile(rb"([a-z]*) ([^ \r\n]*)") 26 | 27 | 28 | ## 29 | # Image plugin for IM Tools images. 30 | 31 | 32 | class ImtImageFile(ImageFile.ImageFile): 33 | format = "IMT" 34 | format_description = "IM Tools" 35 | 36 | def _open(self) -> None: 37 | # Quick rejection: if there's not a LF among the first 38 | # 100 bytes, this is (probably) not a text header. 39 | 40 | assert self.fp is not None 41 | 42 | buffer = self.fp.read(100) 43 | if b"\n" not in buffer: 44 | msg = "not an IM file" 45 | raise SyntaxError(msg) 46 | 47 | xsize = ysize = 0 48 | 49 | while True: 50 | if buffer: 51 | s = buffer[:1] 52 | buffer = buffer[1:] 53 | else: 54 | s = self.fp.read(1) 55 | if not s: 56 | break 57 | 58 | if s == b"\x0C": 59 | # image data begins 60 | self.tile = [ 61 | ( 62 | "raw", 63 | (0, 0) + self.size, 64 | self.fp.tell() - len(buffer), 65 | (self.mode, 0, 1), 66 | ) 67 | ] 68 | 69 | break 70 | 71 | else: 72 | # read key/value pair 73 | if b"\n" not in buffer: 74 | buffer += self.fp.read(100) 75 | lines = buffer.split(b"\n") 76 | s += lines.pop(0) 77 | buffer = b"\n".join(lines) 78 | if len(s) == 1 or len(s) > 100: 79 | break 80 | if s[0] == ord(b"*"): 81 | continue # comment 82 | 83 | m = field.match(s) 84 | if not m: 85 | break 86 | k, v = m.group(1, 2) 87 | if k == b"width": 88 | xsize = int(v) 89 | self._size = xsize, ysize 90 | elif k == b"height": 91 | ysize = int(v) 92 | self._size = xsize, ysize 93 | elif k == b"pixel" and v == b"n8": 94 | self._mode = "L" 95 | 96 | 97 | # 98 | # -------------------------------------------------------------------- 99 | 100 | Image.register_open(ImtImageFile.format, ImtImageFile) 101 | 102 | # 103 | # no extension registered (".im" is simply too common) 104 | -------------------------------------------------------------------------------- /lambda/search_layer/python/opensearchpy/helpers/asyncsigner.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # The OpenSearch Contributors require contributions made to 4 | # this file be licensed under the Apache-2.0 license or a 5 | # compatible open source license. 6 | # 7 | # Modifications Copyright OpenSearch Contributors. See 8 | # GitHub history for details. 9 | 10 | from typing import Any, Dict, Optional, Union 11 | 12 | 13 | class AWSV4SignerAsyncAuth: 14 | """ 15 | AWS V4 Request Signer for Async Requests. 16 | """ 17 | 18 | def __init__(self, credentials: Any, region: str, service: str = "es") -> None: 19 | if not credentials: 20 | raise ValueError("Credentials cannot be empty") 21 | self.credentials = credentials 22 | 23 | if not region: 24 | raise ValueError("Region cannot be empty") 25 | self.region = region 26 | 27 | if not service: 28 | raise ValueError("Service name cannot be empty") 29 | self.service = service 30 | 31 | def __call__( 32 | self, 33 | method: str, 34 | url: str, 35 | query_string: Optional[str] = None, 36 | body: Optional[Union[str, bytes]] = None, 37 | ) -> Dict[str, str]: 38 | return self._sign_request(method, url, query_string, body) 39 | 40 | def _sign_request( 41 | self, 42 | method: str, 43 | url: str, 44 | query_string: Optional[str], 45 | body: Optional[Union[str, bytes]], 46 | ) -> Dict[str, str]: 47 | """ 48 | This method helps in signing the request by injecting the required headers. 49 | :param prepared_request: unsigned headers 50 | :return: signed headers 51 | """ 52 | 53 | from botocore.auth import SigV4Auth 54 | from botocore.awsrequest import AWSRequest 55 | 56 | # create an AWS request object and sign it using SigV4Auth 57 | aws_request = AWSRequest( 58 | method=method, 59 | url=url, 60 | data=body, 61 | ) 62 | 63 | # credentials objects expose access_key, secret_key and token attributes 64 | # via @property annotations that call _refresh() on every access, 65 | # creating a race condition if the credentials expire before secret_key 66 | # is called but after access_key- the end result is the access_key doesn't 67 | # correspond to the secret_key used to sign the request. To avoid this, 68 | # get_frozen_credentials() which returns non-refreshing credentials is 69 | # called if it exists. 70 | credentials = ( 71 | self.credentials.get_frozen_credentials() 72 | if hasattr(self.credentials, "get_frozen_credentials") 73 | and callable(self.credentials.get_frozen_credentials) 74 | else self.credentials 75 | ) 76 | 77 | sig_v4_auth = SigV4Auth(credentials, self.service, self.region) 78 | sig_v4_auth.add_auth(aws_request) 79 | aws_request.headers["X-Amz-Content-SHA256"] = sig_v4_auth.payload(aws_request) 80 | 81 | # copy the headers from AWS request object into the prepared_request 82 | return dict(aws_request.headers.items()) 83 | -------------------------------------------------------------------------------- /lambda/search_layer/python/python_dateutil-2.9.0.post0.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | dateutil/__init__.py,sha256=Mqam67WO9IkTmUFyI66vS6IoSXTp9G388DadH2LCMLY,620 2 | dateutil/__pycache__/__init__.cpython-38.pyc,, 3 | dateutil/__pycache__/_common.cpython-38.pyc,, 4 | dateutil/__pycache__/_version.cpython-38.pyc,, 5 | dateutil/__pycache__/easter.cpython-38.pyc,, 6 | dateutil/__pycache__/relativedelta.cpython-38.pyc,, 7 | dateutil/__pycache__/rrule.cpython-38.pyc,, 8 | dateutil/__pycache__/tzwin.cpython-38.pyc,, 9 | dateutil/__pycache__/utils.cpython-38.pyc,, 10 | dateutil/_common.py,sha256=77w0yytkrxlYbSn--lDVPUMabUXRR9I3lBv_vQRUqUY,932 11 | dateutil/_version.py,sha256=BV031OxDDAmy58neUg5yyqLkLaqIw7ibK9As3jiMib0,166 12 | dateutil/easter.py,sha256=dyBi-lKvimH1u_k6p7Z0JJK72QhqVtVBsqByvpEPKvc,2678 13 | dateutil/parser/__init__.py,sha256=wWk6GFuxTpjoggCGtgkceJoti4pVjl4_fHQXpNOaSYg,1766 14 | dateutil/parser/__pycache__/__init__.cpython-38.pyc,, 15 | dateutil/parser/__pycache__/_parser.cpython-38.pyc,, 16 | dateutil/parser/__pycache__/isoparser.cpython-38.pyc,, 17 | dateutil/parser/_parser.py,sha256=7klDdyicksQB_Xgl-3UAmBwzCYor1AIZqklIcT6dH_8,58796 18 | dateutil/parser/isoparser.py,sha256=8Fy999bnCd1frSdOYuOraWfJTtd5W7qQ51NwNuH_hXM,13233 19 | dateutil/relativedelta.py,sha256=IY_mglMjoZbYfrvloTY2ce02aiVjPIkiZfqgNTZRfuA,24903 20 | dateutil/rrule.py,sha256=KJzKlaCd1jEbu4A38ZltslaoAUh9nSbdbOFdjp70Kew,66557 21 | dateutil/tz/__init__.py,sha256=F-Mz13v6jYseklQf9Te9J6nzcLDmq47gORa61K35_FA,444 22 | dateutil/tz/__pycache__/__init__.cpython-38.pyc,, 23 | dateutil/tz/__pycache__/_common.cpython-38.pyc,, 24 | dateutil/tz/__pycache__/_factories.cpython-38.pyc,, 25 | dateutil/tz/__pycache__/tz.cpython-38.pyc,, 26 | dateutil/tz/__pycache__/win.cpython-38.pyc,, 27 | dateutil/tz/_common.py,sha256=cgzDTANsOXvEc86cYF77EsliuSab8Puwpsl5-bX3_S4,12977 28 | dateutil/tz/_factories.py,sha256=unb6XQNXrPMveksTCU-Ag8jmVZs4SojoPUcAHpWnrvU,2569 29 | dateutil/tz/tz.py,sha256=EUnEdMfeThXiY6l4sh9yBabZ63_POzy01zSsh9thn1o,62855 30 | dateutil/tz/win.py,sha256=xJszWgSwE1xPx_HJj4ZkepyukC_hNy016WMcXhbRaB8,12935 31 | dateutil/tzwin.py,sha256=7Ar4vdQCnnM0mKR3MUjbIKsZrBVfHgdwsJZc_mGYRew,59 32 | dateutil/utils.py,sha256=dKCchEw8eObi0loGTx91unBxm_7UGlU3v_FjFMdqwYM,1965 33 | dateutil/zoneinfo/__init__.py,sha256=KYg0pthCMjcp5MXSEiBJn3nMjZeNZav7rlJw5-tz1S4,5889 34 | dateutil/zoneinfo/__pycache__/__init__.cpython-38.pyc,, 35 | dateutil/zoneinfo/__pycache__/rebuild.cpython-38.pyc,, 36 | dateutil/zoneinfo/dateutil-zoneinfo.tar.gz,sha256=0-pS57bpaN4NiE3xKIGTWW-pW4A9tPkqGCeac5gARHU,156400 37 | dateutil/zoneinfo/rebuild.py,sha256=MiqYzCIHvNbMH-LdRYLv-4T0EIA7hDKt5GLR0IRTLdI,2392 38 | python_dateutil-2.9.0.post0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 39 | python_dateutil-2.9.0.post0.dist-info/LICENSE,sha256=ugD1Gg2SgjtaHN4n2LW50jIeZ-2NqbwWPv-W1eF-V34,2889 40 | python_dateutil-2.9.0.post0.dist-info/METADATA,sha256=qdQ22jIr6AgzL5jYgyWZjofLaTpniplp_rTPrXKabpM,8354 41 | python_dateutil-2.9.0.post0.dist-info/RECORD,, 42 | python_dateutil-2.9.0.post0.dist-info/WHEEL,sha256=-G_t0oGuE7UD0DrSpVZnq1hHMBV9DD2XkS5v7XpmTnk,110 43 | python_dateutil-2.9.0.post0.dist-info/top_level.txt,sha256=4tjdWkhRZvF7LA_BYe_L9gB2w_p2a-z5y6ArjaRkot8,9 44 | python_dateutil-2.9.0.post0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 45 | --------------------------------------------------------------------------------