├── __init__.py
├── tests
├── __init__.py
├── unit
│ ├── __init__.py
│ └── test_handler.py
├── integration
│ ├── __init__.py
│ └── test_api_gateway.py
└── requirements.txt
├── hello_world
├── __init__.py
├── requirements.txt
└── app.py
├── .aws-sam
├── auto-dependency-layer
│ ├── HelloWorldFunction
│ │ ├── __init__.py
│ │ ├── requirements.txt
│ │ └── app.py
│ ├── HelloWorldFunction19d43fc4DepLayer
│ │ ├── python
│ │ │ ├── idna
│ │ │ │ ├── py.typed
│ │ │ │ ├── package_data.py
│ │ │ │ ├── compat.py
│ │ │ │ ├── __init__.py
│ │ │ │ ├── intranges.py
│ │ │ │ └── codec.py
│ │ │ ├── certifi
│ │ │ │ ├── py.typed
│ │ │ │ ├── __init__.py
│ │ │ │ ├── __main__.py
│ │ │ │ └── core.py
│ │ │ ├── charset_normalizer
│ │ │ │ ├── py.typed
│ │ │ │ ├── __main__.py
│ │ │ │ ├── version.py
│ │ │ │ ├── cli
│ │ │ │ │ └── __init__.py
│ │ │ │ ├── __init__.py
│ │ │ │ └── legacy.py
│ │ │ ├── urllib3
│ │ │ │ ├── contrib
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── emscripten
│ │ │ │ │ │ ├── request.py
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ └── emscripten_fetch_worker.js
│ │ │ │ ├── py.typed
│ │ │ │ ├── _version.py
│ │ │ │ ├── util
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── util.py
│ │ │ │ │ ├── proxy.py
│ │ │ │ │ ├── response.py
│ │ │ │ │ ├── wait.py
│ │ │ │ │ └── connection.py
│ │ │ │ └── filepost.py
│ │ │ ├── certifi-2024.2.2.dist-info
│ │ │ │ ├── top_level.txt
│ │ │ │ ├── WHEEL
│ │ │ │ ├── RECORD
│ │ │ │ ├── LICENSE
│ │ │ │ └── METADATA
│ │ │ ├── requests-2.31.0.dist-info
│ │ │ │ ├── top_level.txt
│ │ │ │ ├── WHEEL
│ │ │ │ ├── RECORD
│ │ │ │ └── METADATA
│ │ │ ├── charset_normalizer-3.3.2.dist-info
│ │ │ │ ├── top_level.txt
│ │ │ │ ├── entry_points.txt
│ │ │ │ ├── WHEEL
│ │ │ │ ├── LICENSE
│ │ │ │ └── RECORD
│ │ │ ├── idna-3.6.dist-info
│ │ │ │ ├── WHEEL
│ │ │ │ ├── RECORD
│ │ │ │ └── LICENSE.md
│ │ │ ├── urllib3-2.2.1.dist-info
│ │ │ │ ├── WHEEL
│ │ │ │ ├── licenses
│ │ │ │ │ └── LICENSE.txt
│ │ │ │ └── RECORD
│ │ │ └── requests
│ │ │ │ ├── __version__.py
│ │ │ │ ├── certs.py
│ │ │ │ ├── hooks.py
│ │ │ │ ├── packages.py
│ │ │ │ ├── _internal_utils.py
│ │ │ │ ├── compat.py
│ │ │ │ ├── structures.py
│ │ │ │ ├── exceptions.py
│ │ │ │ ├── help.py
│ │ │ │ └── status_codes.py
│ │ └── AWS_SAM_CLI_README
│ ├── adl_nested_template.yaml
│ └── template.yaml
├── deps
│ └── 8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4
│ │ ├── certifi
│ │ ├── py.typed
│ │ ├── __init__.py
│ │ ├── __main__.py
│ │ └── core.py
│ │ ├── idna
│ │ ├── py.typed
│ │ ├── package_data.py
│ │ ├── compat.py
│ │ ├── __init__.py
│ │ ├── intranges.py
│ │ └── codec.py
│ │ ├── charset_normalizer
│ │ ├── py.typed
│ │ ├── __main__.py
│ │ ├── version.py
│ │ ├── cli
│ │ │ └── __init__.py
│ │ ├── __init__.py
│ │ └── legacy.py
│ │ ├── urllib3
│ │ ├── contrib
│ │ │ ├── __init__.py
│ │ │ └── emscripten
│ │ │ │ ├── request.py
│ │ │ │ ├── __init__.py
│ │ │ │ └── emscripten_fetch_worker.js
│ │ ├── py.typed
│ │ ├── _version.py
│ │ ├── util
│ │ │ ├── __init__.py
│ │ │ ├── util.py
│ │ │ ├── proxy.py
│ │ │ ├── response.py
│ │ │ ├── wait.py
│ │ │ └── connection.py
│ │ └── filepost.py
│ │ ├── certifi-2024.2.2.dist-info
│ │ ├── top_level.txt
│ │ ├── WHEEL
│ │ ├── RECORD
│ │ ├── LICENSE
│ │ └── METADATA
│ │ ├── requests-2.31.0.dist-info
│ │ ├── top_level.txt
│ │ ├── WHEEL
│ │ ├── RECORD
│ │ └── METADATA
│ │ ├── charset_normalizer-3.3.2.dist-info
│ │ ├── top_level.txt
│ │ ├── entry_points.txt
│ │ ├── WHEEL
│ │ ├── LICENSE
│ │ └── RECORD
│ │ ├── idna-3.6.dist-info
│ │ ├── WHEEL
│ │ ├── RECORD
│ │ └── LICENSE.md
│ │ ├── urllib3-2.2.1.dist-info
│ │ ├── WHEEL
│ │ ├── licenses
│ │ │ └── LICENSE.txt
│ │ └── RECORD
│ │ └── requests
│ │ ├── __version__.py
│ │ ├── certs.py
│ │ ├── hooks.py
│ │ ├── packages.py
│ │ ├── _internal_utils.py
│ │ ├── compat.py
│ │ ├── structures.py
│ │ ├── exceptions.py
│ │ ├── help.py
│ │ └── status_codes.py
├── sync.toml
└── build.toml
├── .idea
├── .gitignore
├── modules.xml
├── aws.xml
└── sam-hello-world.iml
├── samconfig.toml
├── template.yaml
├── events
└── event.json
└── .gitignore
/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/hello_world/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/unit/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/integration/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/hello_world/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
--------------------------------------------------------------------------------
/tests/requirements.txt:
--------------------------------------------------------------------------------
1 | pytest
2 | boto3
3 | requests
4 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/certifi/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/idna/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/charset_normalizer/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/contrib/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/idna/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/certifi/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/charset_normalizer/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/contrib/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/certifi-2024.2.2.dist-info/top_level.txt:
--------------------------------------------------------------------------------
1 | certifi
2 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/idna/package_data.py:
--------------------------------------------------------------------------------
1 | __version__ = '3.6'
2 |
3 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests-2.31.0.dist-info/top_level.txt:
--------------------------------------------------------------------------------
1 | requests
2 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/idna/package_data.py:
--------------------------------------------------------------------------------
1 | __version__ = '3.6'
2 |
3 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/charset_normalizer-3.3.2.dist-info/top_level.txt:
--------------------------------------------------------------------------------
1 | charset_normalizer
2 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/certifi-2024.2.2.dist-info/top_level.txt:
--------------------------------------------------------------------------------
1 | certifi
2 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests-2.31.0.dist-info/top_level.txt:
--------------------------------------------------------------------------------
1 | requests
2 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 | # Editor-based HTTP Client requests
5 | /httpRequests/
6 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/charset_normalizer-3.3.2.dist-info/top_level.txt:
--------------------------------------------------------------------------------
1 | charset_normalizer
2 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/py.typed:
--------------------------------------------------------------------------------
1 | # Instruct type checkers to look for inline type annotations in this package.
2 | # See PEP 561.
3 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/charset_normalizer/__main__.py:
--------------------------------------------------------------------------------
1 | from .cli import cli_detect
2 |
3 | if __name__ == "__main__":
4 | cli_detect()
5 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/idna-3.6.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: flit 3.9.0
3 | Root-Is-Purelib: true
4 | Tag: py3-none-any
5 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/charset_normalizer-3.3.2.dist-info/entry_points.txt:
--------------------------------------------------------------------------------
1 | [console_scripts]
2 | normalizer = charset_normalizer.cli:cli_detect
3 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/certifi/__init__.py:
--------------------------------------------------------------------------------
1 | from .core import contents, where
2 |
3 | __all__ = ["contents", "where"]
4 | __version__ = "2024.02.02"
5 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/charset_normalizer/version.py:
--------------------------------------------------------------------------------
1 | """
2 | Expose version
3 | """
4 |
5 | __version__ = "3.3.2"
6 | VERSION = __version__.split(".")
7 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3-2.2.1.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: hatchling 1.21.1
3 | Root-Is-Purelib: true
4 | Tag: py3-none-any
5 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/_version.py:
--------------------------------------------------------------------------------
1 | # This file is protected via CODEOWNERS
2 | from __future__ import annotations
3 |
4 | __version__ = "2.2.1"
5 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/charset_normalizer/__main__.py:
--------------------------------------------------------------------------------
1 | from .cli import cli_detect
2 |
3 | if __name__ == "__main__":
4 | cli_detect()
5 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/py.typed:
--------------------------------------------------------------------------------
1 | # Instruct type checkers to look for inline type annotations in this package.
2 | # See PEP 561.
3 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/charset_normalizer-3.3.2.dist-info/entry_points.txt:
--------------------------------------------------------------------------------
1 | [console_scripts]
2 | normalizer = charset_normalizer.cli:cli_detect
3 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/idna-3.6.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: flit 3.9.0
3 | Root-Is-Purelib: true
4 | Tag: py3-none-any
5 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/certifi-2024.2.2.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: bdist_wheel (0.42.0)
3 | Root-Is-Purelib: true
4 | Tag: py3-none-any
5 |
6 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests-2.31.0.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: bdist_wheel (0.40.0)
3 | Root-Is-Purelib: true
4 | Tag: py3-none-any
5 |
6 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/certifi/__init__.py:
--------------------------------------------------------------------------------
1 | from .core import contents, where
2 |
3 | __all__ = ["contents", "where"]
4 | __version__ = "2024.02.02"
5 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/AWS_SAM_CLI_README:
--------------------------------------------------------------------------------
1 | This layer contains dependencies of function HelloWorldFunction and automatically added by AWS SAM CLI command 'sam sync'
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/charset_normalizer/version.py:
--------------------------------------------------------------------------------
1 | """
2 | Expose version
3 | """
4 |
5 | __version__ = "3.3.2"
6 | VERSION = __version__.split(".")
7 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3-2.2.1.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: hatchling 1.21.1
3 | Root-Is-Purelib: true
4 | Tag: py3-none-any
5 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/_version.py:
--------------------------------------------------------------------------------
1 | # This file is protected via CODEOWNERS
2 | from __future__ import annotations
3 |
4 | __version__ = "2.2.1"
5 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/charset_normalizer/cli/__init__.py:
--------------------------------------------------------------------------------
1 | from .__main__ import cli_detect, query_yes_no
2 |
3 | __all__ = (
4 | "cli_detect",
5 | "query_yes_no",
6 | )
7 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/certifi-2024.2.2.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: bdist_wheel (0.42.0)
3 | Root-Is-Purelib: true
4 | Tag: py3-none-any
5 |
6 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests-2.31.0.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: bdist_wheel (0.40.0)
3 | Root-Is-Purelib: true
4 | Tag: py3-none-any
5 |
6 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/charset_normalizer/cli/__init__.py:
--------------------------------------------------------------------------------
1 | from .__main__ import cli_detect, query_yes_no
2 |
3 | __all__ = (
4 | "cli_detect",
5 | "query_yes_no",
6 | )
7 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/charset_normalizer-3.3.2.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: bdist_wheel (0.41.2)
3 | Root-Is-Purelib: false
4 | Tag: cp39-cp39-manylinux_2_17_x86_64
5 | Tag: cp39-cp39-manylinux2014_x86_64
6 |
7 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/charset_normalizer-3.3.2.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: bdist_wheel (0.41.2)
3 | Root-Is-Purelib: false
4 | Tag: cp39-cp39-manylinux_2_17_x86_64
5 | Tag: cp39-cp39-manylinux2014_x86_64
6 |
7 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/certifi/__main__.py:
--------------------------------------------------------------------------------
1 | import argparse
2 |
3 | from certifi import contents, where
4 |
5 | parser = argparse.ArgumentParser()
6 | parser.add_argument("-c", "--contents", action="store_true")
7 | args = parser.parse_args()
8 |
9 | if args.contents:
10 | print(contents())
11 | else:
12 | print(where())
13 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/certifi/__main__.py:
--------------------------------------------------------------------------------
1 | import argparse
2 |
3 | from certifi import contents, where
4 |
5 | parser = argparse.ArgumentParser()
6 | parser.add_argument("-c", "--contents", action="store_true")
7 | args = parser.parse_args()
8 |
9 | if args.contents:
10 | print(contents())
11 | else:
12 | print(where())
13 |
--------------------------------------------------------------------------------
/.aws-sam/sync.toml:
--------------------------------------------------------------------------------
1 | # This file is auto generated by SAM CLI sync command
2 |
3 | [sync_state]
4 | dependency_layer = true
5 | latest_infra_sync_time = "2024-03-02T03:38:53.695321"
6 |
7 | [resource_sync_states."AutoDependencyLayerParentSyncFlow:HelloWorldFunction"]
8 | hash = "1e6193310f4cd0bbdf653cbb849b0a30e5e3c05f879ef38676a37d55bdd62765"
9 | sync_time = "2024-03-02T03:54:59.409809"
10 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/idna/compat.py:
--------------------------------------------------------------------------------
1 | from .core import *
2 | from .codec import *
3 | from typing import Any, Union
4 |
5 | def ToASCII(label: str) -> bytes:
6 | return encode(label)
7 |
8 | def ToUnicode(label: Union[bytes, bytearray]) -> str:
9 | return decode(label)
10 |
11 | def nameprep(s: Any) -> None:
12 | raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol')
13 |
14 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/idna/compat.py:
--------------------------------------------------------------------------------
1 | from .core import *
2 | from .codec import *
3 | from typing import Any, Union
4 |
5 | def ToASCII(label: str) -> bytes:
6 | return encode(label)
7 |
8 | def ToUnicode(label: Union[bytes, bytearray]) -> str:
9 | return decode(label)
10 |
11 | def nameprep(s: Any) -> None:
12 | raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol')
13 |
14 |
--------------------------------------------------------------------------------
/.aws-sam/build.toml:
--------------------------------------------------------------------------------
1 | # This file is auto generated by SAM CLI build command
2 |
3 | [function_build_definitions.8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4]
4 | codeuri = "/Users/home/Projects/aws/sam-hello-world/hello_world"
5 | runtime = "python3.9"
6 | architecture = "x86_64"
7 | handler = "app.lambda_handler"
8 | manifest_hash = "3298f13049d19cffaa37ca931dd4d421"
9 | packagetype = "Zip"
10 | functions = ["HelloWorldFunction"]
11 |
12 | [layer_build_definitions]
13 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests/__version__.py:
--------------------------------------------------------------------------------
1 | # .-. .-. .-. . . .-. .-. .-. .-.
2 | # |( |- |.| | | |- `-. | `-.
3 | # ' ' `-' `-`.`-' `-' `-' ' `-'
4 |
5 | __title__ = "requests"
6 | __description__ = "Python HTTP for Humans."
7 | __url__ = "https://requests.readthedocs.io"
8 | __version__ = "2.31.0"
9 | __build__ = 0x023100
10 | __author__ = "Kenneth Reitz"
11 | __author_email__ = "me@kennethreitz.org"
12 | __license__ = "Apache 2.0"
13 | __copyright__ = "Copyright Kenneth Reitz"
14 | __cake__ = "\u2728 \U0001f370 \u2728"
15 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests/certs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | """
4 | requests.certs
5 | ~~~~~~~~~~~~~~
6 |
7 | This module returns the preferred default CA certificate bundle. There is
8 | only one — the one from the certifi package.
9 |
10 | If you are packaging Requests, e.g., for a Linux distribution or a managed
11 | environment, you can change the definition of where() to return a separately
12 | packaged CA bundle.
13 | """
14 | from certifi import where
15 |
16 | if __name__ == "__main__":
17 | print(where())
18 |
--------------------------------------------------------------------------------
/.idea/aws.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests/__version__.py:
--------------------------------------------------------------------------------
1 | # .-. .-. .-. . . .-. .-. .-. .-.
2 | # |( |- |.| | | |- `-. | `-.
3 | # ' ' `-' `-`.`-' `-' `-' ' `-'
4 |
5 | __title__ = "requests"
6 | __description__ = "Python HTTP for Humans."
7 | __url__ = "https://requests.readthedocs.io"
8 | __version__ = "2.31.0"
9 | __build__ = 0x023100
10 | __author__ = "Kenneth Reitz"
11 | __author_email__ = "me@kennethreitz.org"
12 | __license__ = "Apache 2.0"
13 | __copyright__ = "Copyright Kenneth Reitz"
14 | __cake__ = "\u2728 \U0001f370 \u2728"
15 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests/certs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | """
4 | requests.certs
5 | ~~~~~~~~~~~~~~
6 |
7 | This module returns the preferred default CA certificate bundle. There is
8 | only one — the one from the certifi package.
9 |
10 | If you are packaging Requests, e.g., for a Linux distribution or a managed
11 | environment, you can change the definition of where() to return a separately
12 | packaged CA bundle.
13 | """
14 | from certifi import where
15 |
16 | if __name__ == "__main__":
17 | print(where())
18 |
--------------------------------------------------------------------------------
/.idea/sam-hello-world.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/contrib/emscripten/request.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from dataclasses import dataclass, field
4 |
5 | from ..._base_connection import _TYPE_BODY
6 |
7 |
8 | @dataclass
9 | class EmscriptenRequest:
10 | method: str
11 | url: str
12 | params: dict[str, str] | None = None
13 | body: _TYPE_BODY | None = None
14 | headers: dict[str, str] = field(default_factory=dict)
15 | timeout: float = 0
16 | decode_content: bool = True
17 |
18 | def set_header(self, name: str, value: str) -> None:
19 | self.headers[name.capitalize()] = value
20 |
21 | def set_body(self, body: _TYPE_BODY | None) -> None:
22 | self.body = body
23 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/contrib/emscripten/request.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from dataclasses import dataclass, field
4 |
5 | from ..._base_connection import _TYPE_BODY
6 |
7 |
8 | @dataclass
9 | class EmscriptenRequest:
10 | method: str
11 | url: str
12 | params: dict[str, str] | None = None
13 | body: _TYPE_BODY | None = None
14 | headers: dict[str, str] = field(default_factory=dict)
15 | timeout: float = 0
16 | decode_content: bool = True
17 |
18 | def set_header(self, name: str, value: str) -> None:
19 | self.headers[name.capitalize()] = value
20 |
21 | def set_body(self, body: _TYPE_BODY | None) -> None:
22 | self.body = body
23 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/certifi-2024.2.2.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | certifi/__init__.py,sha256=ljtEx-EmmPpTe2SOd5Kzsujm_lUD0fKJVnE9gzce320,94
2 | certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
3 | certifi/cacert.pem,sha256=ejR8qP724p-CtuR4U1WmY1wX-nVeCUD2XxWqj8e9f5I,292541
4 | certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426
5 | certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6 | certifi-2024.2.2.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989
7 | certifi-2024.2.2.dist-info/METADATA,sha256=1noreLRChpOgeSj0uJT1mehiBl8ngh33Guc7KdvzYYM,2170
8 | certifi-2024.2.2.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
9 | certifi-2024.2.2.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
10 | certifi-2024.2.2.dist-info/RECORD,,
11 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/contrib/emscripten/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import urllib3.connection
4 |
5 | from ...connectionpool import HTTPConnectionPool, HTTPSConnectionPool
6 | from .connection import EmscriptenHTTPConnection, EmscriptenHTTPSConnection
7 |
8 |
9 | def inject_into_urllib3() -> None:
10 | # override connection classes to use emscripten specific classes
11 | # n.b. mypy complains about the overriding of classes below
12 | # if it isn't ignored
13 | HTTPConnectionPool.ConnectionCls = EmscriptenHTTPConnection
14 | HTTPSConnectionPool.ConnectionCls = EmscriptenHTTPSConnection
15 | urllib3.connection.HTTPConnection = EmscriptenHTTPConnection # type: ignore[misc,assignment]
16 | urllib3.connection.HTTPSConnection = EmscriptenHTTPSConnection # type: ignore[misc,assignment]
17 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/certifi-2024.2.2.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | certifi/__init__.py,sha256=ljtEx-EmmPpTe2SOd5Kzsujm_lUD0fKJVnE9gzce320,94
2 | certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
3 | certifi/cacert.pem,sha256=ejR8qP724p-CtuR4U1WmY1wX-nVeCUD2XxWqj8e9f5I,292541
4 | certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426
5 | certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6 | certifi-2024.2.2.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989
7 | certifi-2024.2.2.dist-info/METADATA,sha256=1noreLRChpOgeSj0uJT1mehiBl8ngh33Guc7KdvzYYM,2170
8 | certifi-2024.2.2.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
9 | certifi-2024.2.2.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
10 | certifi-2024.2.2.dist-info/RECORD,,
11 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/contrib/emscripten/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import urllib3.connection
4 |
5 | from ...connectionpool import HTTPConnectionPool, HTTPSConnectionPool
6 | from .connection import EmscriptenHTTPConnection, EmscriptenHTTPSConnection
7 |
8 |
9 | def inject_into_urllib3() -> None:
10 | # override connection classes to use emscripten specific classes
11 | # n.b. mypy complains about the overriding of classes below
12 | # if it isn't ignored
13 | HTTPConnectionPool.ConnectionCls = EmscriptenHTTPConnection
14 | HTTPSConnectionPool.ConnectionCls = EmscriptenHTTPSConnection
15 | urllib3.connection.HTTPConnection = EmscriptenHTTPConnection # type: ignore[misc,assignment]
16 | urllib3.connection.HTTPSConnection = EmscriptenHTTPSConnection # type: ignore[misc,assignment]
17 |
--------------------------------------------------------------------------------
/samconfig.toml:
--------------------------------------------------------------------------------
1 | # More information about the configuration file can be found here:
2 | # https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html
3 | version = 0.1
4 |
5 | [default]
6 | [default.global.parameters]
7 | stack_name = "sam-hello-world"
8 |
9 | [default.build.parameters]
10 | cached = true
11 | parallel = true
12 |
13 | [default.validate.parameters]
14 | lint = true
15 |
16 | [default.deploy.parameters]
17 | capabilities = "CAPABILITY_IAM"
18 | confirm_changeset = true
19 | resolve_s3 = true
20 | s3_prefix = "sam-hello-world"
21 | region = "us-east-1"
22 | image_repositories = []
23 |
24 | [default.package.parameters]
25 | resolve_s3 = true
26 |
27 | [default.sync.parameters]
28 | watch = true
29 |
30 | [default.local_start_api.parameters]
31 | warm_containers = "EAGER"
32 |
33 | [default.local_start_lambda.parameters]
34 | warm_containers = "EAGER"
35 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests/hooks.py:
--------------------------------------------------------------------------------
1 | """
2 | requests.hooks
3 | ~~~~~~~~~~~~~~
4 |
5 | This module provides the capabilities for the Requests hooks system.
6 |
7 | Available hooks:
8 |
9 | ``response``:
10 | The response generated from a Request.
11 | """
12 | HOOKS = ["response"]
13 |
14 |
15 | def default_hooks():
16 | return {event: [] for event in HOOKS}
17 |
18 |
19 | # TODO: response is the only one
20 |
21 |
22 | def dispatch_hook(key, hooks, hook_data, **kwargs):
23 | """Dispatches a hook dictionary on a given piece of data."""
24 | hooks = hooks or {}
25 | hooks = hooks.get(key)
26 | if hooks:
27 | if hasattr(hooks, "__call__"):
28 | hooks = [hooks]
29 | for hook in hooks:
30 | _hook_data = hook(hook_data, **kwargs)
31 | if _hook_data is not None:
32 | hook_data = _hook_data
33 | return hook_data
34 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests/hooks.py:
--------------------------------------------------------------------------------
1 | """
2 | requests.hooks
3 | ~~~~~~~~~~~~~~
4 |
5 | This module provides the capabilities for the Requests hooks system.
6 |
7 | Available hooks:
8 |
9 | ``response``:
10 | The response generated from a Request.
11 | """
12 | HOOKS = ["response"]
13 |
14 |
15 | def default_hooks():
16 | return {event: [] for event in HOOKS}
17 |
18 |
19 | # TODO: response is the only one
20 |
21 |
22 | def dispatch_hook(key, hooks, hook_data, **kwargs):
23 | """Dispatches a hook dictionary on a given piece of data."""
24 | hooks = hooks or {}
25 | hooks = hooks.get(key)
26 | if hooks:
27 | if hasattr(hooks, "__call__"):
28 | hooks = [hooks]
29 | for hook in hooks:
30 | _hook_data = hook(hook_data, **kwargs)
31 | if _hook_data is not None:
32 | hook_data = _hook_data
33 | return hook_data
34 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/adl_nested_template.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Description: AWS SAM CLI Nested Stack for Auto Dependency Layer Creation
3 | Metadata:
4 | SamCliInfo: 1.110.0
5 | key: AWS SAM CLI sync command
6 | Outputs:
7 | HelloWorldFunction19d43fc4DepLayer:
8 | Value:
9 | Ref: HelloWorldFunction19d43fc4DepLayer
10 | Resources:
11 | HelloWorldFunction19d43fc4DepLayer:
12 | Metadata:
13 | CreatedBy: AWS SAM CLI sync command
14 | Properties:
15 | CompatibleRuntimes:
16 | - python3.9
17 | ContentUri: /Users/home/Projects/aws/sam-hello-world/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer
18 | Description: Auto created layer for dependencies of function HelloWorldFunction
19 | LayerName: sam-hello-world9b7b0620-HelloWorldFunction19d43fc4-DepLayer
20 | RetentionPolicy: Delete
21 | Type: AWS::Serverless::LayerVersion
22 | Transform: AWS::Serverless-2016-10-31
23 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/idna-3.6.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849
2 | idna/codec.py,sha256=PS6m-XmdST7Wj7J7ulRMakPDt5EBJyYrT3CPtjh-7t4,3426
3 | idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321
4 | idna/core.py,sha256=Bxz9L1rH0N5U-yukGfPuDRTxR2jDUl96NCq1ql3YAUw,12908
5 | idna/idnadata.py,sha256=9u3Ec_GRrhlcbs7QM3pAZ2ObEQzPIOm99FaVOm91UGg,44351
6 | idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881
7 | idna/package_data.py,sha256=y-iv-qJdmHsWVR5FszYwsMo1AQg8qpdU2aU5nT-S2oQ,21
8 | idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9 | idna/uts46data.py,sha256=1KuksWqLuccPXm2uyRVkhfiFLNIhM_H2m4azCcnOqEU,206503
10 | idna-3.6.dist-info/LICENSE.md,sha256=yy-vDKGMbTh-x8tm8yGTn7puZ-nawJ0xR3y52NP-aJk,1541
11 | idna-3.6.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
12 | idna-3.6.dist-info/METADATA,sha256=N93B509dkvvkd_Y0E_VxCHPkVkrD6InxoyfXvX4egds,9888
13 | idna-3.6.dist-info/RECORD,,
14 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/idna-3.6.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849
2 | idna/codec.py,sha256=PS6m-XmdST7Wj7J7ulRMakPDt5EBJyYrT3CPtjh-7t4,3426
3 | idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321
4 | idna/core.py,sha256=Bxz9L1rH0N5U-yukGfPuDRTxR2jDUl96NCq1ql3YAUw,12908
5 | idna/idnadata.py,sha256=9u3Ec_GRrhlcbs7QM3pAZ2ObEQzPIOm99FaVOm91UGg,44351
6 | idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881
7 | idna/package_data.py,sha256=y-iv-qJdmHsWVR5FszYwsMo1AQg8qpdU2aU5nT-S2oQ,21
8 | idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9 | idna/uts46data.py,sha256=1KuksWqLuccPXm2uyRVkhfiFLNIhM_H2m4azCcnOqEU,206503
10 | idna-3.6.dist-info/LICENSE.md,sha256=yy-vDKGMbTh-x8tm8yGTn7puZ-nawJ0xR3y52NP-aJk,1541
11 | idna-3.6.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
12 | idna-3.6.dist-info/METADATA,sha256=N93B509dkvvkd_Y0E_VxCHPkVkrD6InxoyfXvX4egds,9888
13 | idna-3.6.dist-info/RECORD,,
14 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/idna/__init__.py:
--------------------------------------------------------------------------------
1 | from .package_data import __version__
2 | from .core import (
3 | IDNABidiError,
4 | IDNAError,
5 | InvalidCodepoint,
6 | InvalidCodepointContext,
7 | alabel,
8 | check_bidi,
9 | check_hyphen_ok,
10 | check_initial_combiner,
11 | check_label,
12 | check_nfc,
13 | decode,
14 | encode,
15 | ulabel,
16 | uts46_remap,
17 | valid_contextj,
18 | valid_contexto,
19 | valid_label_length,
20 | valid_string_length,
21 | )
22 | from .intranges import intranges_contain
23 |
24 | __all__ = [
25 | "IDNABidiError",
26 | "IDNAError",
27 | "InvalidCodepoint",
28 | "InvalidCodepointContext",
29 | "alabel",
30 | "check_bidi",
31 | "check_hyphen_ok",
32 | "check_initial_combiner",
33 | "check_label",
34 | "check_nfc",
35 | "decode",
36 | "encode",
37 | "intranges_contain",
38 | "ulabel",
39 | "uts46_remap",
40 | "valid_contextj",
41 | "valid_contexto",
42 | "valid_label_length",
43 | "valid_string_length",
44 | ]
45 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/idna/__init__.py:
--------------------------------------------------------------------------------
1 | from .package_data import __version__
2 | from .core import (
3 | IDNABidiError,
4 | IDNAError,
5 | InvalidCodepoint,
6 | InvalidCodepointContext,
7 | alabel,
8 | check_bidi,
9 | check_hyphen_ok,
10 | check_initial_combiner,
11 | check_label,
12 | check_nfc,
13 | decode,
14 | encode,
15 | ulabel,
16 | uts46_remap,
17 | valid_contextj,
18 | valid_contexto,
19 | valid_label_length,
20 | valid_string_length,
21 | )
22 | from .intranges import intranges_contain
23 |
24 | __all__ = [
25 | "IDNABidiError",
26 | "IDNAError",
27 | "InvalidCodepoint",
28 | "InvalidCodepointContext",
29 | "alabel",
30 | "check_bidi",
31 | "check_hyphen_ok",
32 | "check_initial_combiner",
33 | "check_label",
34 | "check_nfc",
35 | "decode",
36 | "encode",
37 | "intranges_contain",
38 | "ulabel",
39 | "uts46_remap",
40 | "valid_contextj",
41 | "valid_contexto",
42 | "valid_label_length",
43 | "valid_string_length",
44 | ]
45 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests/packages.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | try:
4 | import chardet
5 | except ImportError:
6 | import warnings
7 |
8 | import charset_normalizer as chardet
9 |
10 | warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer")
11 |
12 | # This code exists for backwards compatibility reasons.
13 | # I don't like it either. Just look the other way. :)
14 |
15 | for package in ("urllib3", "idna"):
16 | locals()[package] = __import__(package)
17 | # This traversal is apparently necessary such that the identities are
18 | # preserved (requests.packages.urllib3.* is urllib3.*)
19 | for mod in list(sys.modules):
20 | if mod == package or mod.startswith(f"{package}."):
21 | sys.modules[f"requests.packages.{mod}"] = sys.modules[mod]
22 |
23 | target = chardet.__name__
24 | for mod in list(sys.modules):
25 | if mod == target or mod.startswith(f"{target}."):
26 | target = target.replace(target, "chardet")
27 | sys.modules[f"requests.packages.{target}"] = sys.modules[mod]
28 | # Kinda cool, though, right?
29 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/certifi-2024.2.2.dist-info/LICENSE:
--------------------------------------------------------------------------------
1 | This package contains a modified version of ca-bundle.crt:
2 |
3 | ca-bundle.crt -- Bundle of CA Root Certificates
4 |
5 | This is a bundle of X.509 certificates of public Certificate Authorities
6 | (CA). These were automatically extracted from Mozilla's root certificates
7 | file (certdata.txt). This file can be found in the mozilla source tree:
8 | https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt
9 | It contains the certificates in PEM format and therefore
10 | can be directly used with curl / libcurl / php_curl, or with
11 | an Apache+mod_ssl webserver for SSL client authentication.
12 | Just configure this file as the SSLCACertificateFile.#
13 |
14 | ***** BEGIN LICENSE BLOCK *****
15 | This Source Code Form is subject to the terms of the Mozilla Public License,
16 | v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
17 | one at http://mozilla.org/MPL/2.0/.
18 |
19 | ***** END LICENSE BLOCK *****
20 | @(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
21 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests/packages.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | try:
4 | import chardet
5 | except ImportError:
6 | import warnings
7 |
8 | import charset_normalizer as chardet
9 |
10 | warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer")
11 |
12 | # This code exists for backwards compatibility reasons.
13 | # I don't like it either. Just look the other way. :)
14 |
15 | for package in ("urllib3", "idna"):
16 | locals()[package] = __import__(package)
17 | # This traversal is apparently necessary such that the identities are
18 | # preserved (requests.packages.urllib3.* is urllib3.*)
19 | for mod in list(sys.modules):
20 | if mod == package or mod.startswith(f"{package}."):
21 | sys.modules[f"requests.packages.{mod}"] = sys.modules[mod]
22 |
23 | target = chardet.__name__
24 | for mod in list(sys.modules):
25 | if mod == target or mod.startswith(f"{target}."):
26 | target = target.replace(target, "chardet")
27 | sys.modules[f"requests.packages.{target}"] = sys.modules[mod]
28 | # Kinda cool, though, right?
29 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/certifi-2024.2.2.dist-info/LICENSE:
--------------------------------------------------------------------------------
1 | This package contains a modified version of ca-bundle.crt:
2 |
3 | ca-bundle.crt -- Bundle of CA Root Certificates
4 |
5 | This is a bundle of X.509 certificates of public Certificate Authorities
6 | (CA). These were automatically extracted from Mozilla's root certificates
7 | file (certdata.txt). This file can be found in the mozilla source tree:
8 | https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt
9 | It contains the certificates in PEM format and therefore
10 | can be directly used with curl / libcurl / php_curl, or with
11 | an Apache+mod_ssl webserver for SSL client authentication.
12 | Just configure this file as the SSLCACertificateFile.#
13 |
14 | ***** BEGIN LICENSE BLOCK *****
15 | This Source Code Form is subject to the terms of the Mozilla Public License,
16 | v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
17 | one at http://mozilla.org/MPL/2.0/.
18 |
19 | ***** END LICENSE BLOCK *****
20 | @(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
21 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/charset_normalizer-3.3.2.dist-info/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 TAHRI Ahmed R.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/charset_normalizer-3.3.2.dist-info/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 TAHRI Ahmed R.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/util/__init__.py:
--------------------------------------------------------------------------------
1 | # For backwards compatibility, provide imports that used to be here.
2 | from __future__ import annotations
3 |
4 | from .connection import is_connection_dropped
5 | from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
6 | from .response import is_fp_closed
7 | from .retry import Retry
8 | from .ssl_ import (
9 | ALPN_PROTOCOLS,
10 | IS_PYOPENSSL,
11 | SSLContext,
12 | assert_fingerprint,
13 | create_urllib3_context,
14 | resolve_cert_reqs,
15 | resolve_ssl_version,
16 | ssl_wrap_socket,
17 | )
18 | from .timeout import Timeout
19 | from .url import Url, parse_url
20 | from .wait import wait_for_read, wait_for_write
21 |
22 | __all__ = (
23 | "IS_PYOPENSSL",
24 | "SSLContext",
25 | "ALPN_PROTOCOLS",
26 | "Retry",
27 | "Timeout",
28 | "Url",
29 | "assert_fingerprint",
30 | "create_urllib3_context",
31 | "is_connection_dropped",
32 | "is_fp_closed",
33 | "parse_url",
34 | "make_headers",
35 | "resolve_cert_reqs",
36 | "resolve_ssl_version",
37 | "ssl_wrap_socket",
38 | "wait_for_read",
39 | "wait_for_write",
40 | "SKIP_HEADER",
41 | "SKIPPABLE_HEADERS",
42 | )
43 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3-2.2.1.dist-info/licenses/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2008-2020 Andrey Petrov and contributors.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/util/__init__.py:
--------------------------------------------------------------------------------
1 | # For backwards compatibility, provide imports that used to be here.
2 | from __future__ import annotations
3 |
4 | from .connection import is_connection_dropped
5 | from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
6 | from .response import is_fp_closed
7 | from .retry import Retry
8 | from .ssl_ import (
9 | ALPN_PROTOCOLS,
10 | IS_PYOPENSSL,
11 | SSLContext,
12 | assert_fingerprint,
13 | create_urllib3_context,
14 | resolve_cert_reqs,
15 | resolve_ssl_version,
16 | ssl_wrap_socket,
17 | )
18 | from .timeout import Timeout
19 | from .url import Url, parse_url
20 | from .wait import wait_for_read, wait_for_write
21 |
22 | __all__ = (
23 | "IS_PYOPENSSL",
24 | "SSLContext",
25 | "ALPN_PROTOCOLS",
26 | "Retry",
27 | "Timeout",
28 | "Url",
29 | "assert_fingerprint",
30 | "create_urllib3_context",
31 | "is_connection_dropped",
32 | "is_fp_closed",
33 | "parse_url",
34 | "make_headers",
35 | "resolve_cert_reqs",
36 | "resolve_ssl_version",
37 | "ssl_wrap_socket",
38 | "wait_for_read",
39 | "wait_for_write",
40 | "SKIP_HEADER",
41 | "SKIPPABLE_HEADERS",
42 | )
43 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3-2.2.1.dist-info/licenses/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2008-2020 Andrey Petrov and contributors.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/hello_world/app.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | # import requests
4 |
5 |
6 | def lambda_handler(event, context):
7 | """Sample pure Lambda function
8 |
9 | Parameters
10 | ----------
11 | event: dict, required
12 | API Gateway Lambda Proxy Input Format
13 |
14 | Event doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html#api-gateway-simple-proxy-for-lambda-input-format
15 |
16 | context: object, required
17 | Lambda Context runtime methods and attributes
18 |
19 | Context doc: https://docs.aws.amazon.com/lambda/latest/dg/python-context-object.html
20 |
21 | Returns
22 | ------
23 | API Gateway Lambda Proxy Output Format: dict
24 |
25 | Return doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html
26 | """
27 |
28 | # try:
29 | # ip = requests.get("http://checkip.amazonaws.com/")
30 | # except requests.RequestException as e:
31 | # # Send some context about this error to Lambda Logs
32 | # print(e)
33 |
34 | # raise e
35 |
36 | return {
37 | "statusCode": 200,
38 | "body": json.dumps({
39 | "message": "Hello SAM CD/CI w/webstorm integrations",
40 | # "location": ip.text.replace("\n", "")
41 | }),
42 | }
43 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction/app.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | # import requests
4 |
5 |
6 | def lambda_handler(event, context):
7 | """Sample pure Lambda function
8 |
9 | Parameters
10 | ----------
11 | event: dict, required
12 | API Gateway Lambda Proxy Input Format
13 |
14 | Event doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html#api-gateway-simple-proxy-for-lambda-input-format
15 |
16 | context: object, required
17 | Lambda Context runtime methods and attributes
18 |
19 | Context doc: https://docs.aws.amazon.com/lambda/latest/dg/python-context-object.html
20 |
21 | Returns
22 | ------
23 | API Gateway Lambda Proxy Output Format: dict
24 |
25 | Return doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html
26 | """
27 |
28 | # try:
29 | # ip = requests.get("http://checkip.amazonaws.com/")
30 | # except requests.RequestException as e:
31 | # # Send some context about this error to Lambda Logs
32 | # print(e)
33 |
34 | # raise e
35 |
36 | return {
37 | "statusCode": 200,
38 | "body": json.dumps({
39 | "message": "hello AWS SAM",
40 | # "location": ip.text.replace("\n", "")
41 | }),
42 | }
43 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/util/util.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import typing
4 | from types import TracebackType
5 |
6 |
7 | def to_bytes(
8 | x: str | bytes, encoding: str | None = None, errors: str | None = None
9 | ) -> bytes:
10 | if isinstance(x, bytes):
11 | return x
12 | elif not isinstance(x, str):
13 | raise TypeError(f"not expecting type {type(x).__name__}")
14 | if encoding or errors:
15 | return x.encode(encoding or "utf-8", errors=errors or "strict")
16 | return x.encode()
17 |
18 |
19 | def to_str(
20 | x: str | bytes, encoding: str | None = None, errors: str | None = None
21 | ) -> str:
22 | if isinstance(x, str):
23 | return x
24 | elif not isinstance(x, bytes):
25 | raise TypeError(f"not expecting type {type(x).__name__}")
26 | if encoding or errors:
27 | return x.decode(encoding or "utf-8", errors=errors or "strict")
28 | return x.decode()
29 |
30 |
31 | def reraise(
32 | tp: type[BaseException] | None,
33 | value: BaseException,
34 | tb: TracebackType | None = None,
35 | ) -> typing.NoReturn:
36 | try:
37 | if value.__traceback__ is not tb:
38 | raise value.with_traceback(tb)
39 | raise value
40 | finally:
41 | value = None # type: ignore[assignment]
42 | tb = None
43 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/util/proxy.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import typing
4 |
5 | from .url import Url
6 |
7 | if typing.TYPE_CHECKING:
8 | from ..connection import ProxyConfig
9 |
10 |
11 | def connection_requires_http_tunnel(
12 | proxy_url: Url | None = None,
13 | proxy_config: ProxyConfig | None = None,
14 | destination_scheme: str | None = None,
15 | ) -> bool:
16 | """
17 | Returns True if the connection requires an HTTP CONNECT through the proxy.
18 |
19 | :param URL proxy_url:
20 | URL of the proxy.
21 | :param ProxyConfig proxy_config:
22 | Proxy configuration from poolmanager.py
23 | :param str destination_scheme:
24 | The scheme of the destination. (i.e https, http, etc)
25 | """
26 | # If we're not using a proxy, no way to use a tunnel.
27 | if proxy_url is None:
28 | return False
29 |
30 | # HTTP destinations never require tunneling, we always forward.
31 | if destination_scheme == "http":
32 | return False
33 |
34 | # Support for forwarding with HTTPS proxies and HTTPS destinations.
35 | if (
36 | proxy_url.scheme == "https"
37 | and proxy_config
38 | and proxy_config.use_forwarding_for_https
39 | ):
40 | return False
41 |
42 | # Otherwise always use a tunnel.
43 | return True
44 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/util/util.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import typing
4 | from types import TracebackType
5 |
6 |
7 | def to_bytes(
8 | x: str | bytes, encoding: str | None = None, errors: str | None = None
9 | ) -> bytes:
10 | if isinstance(x, bytes):
11 | return x
12 | elif not isinstance(x, str):
13 | raise TypeError(f"not expecting type {type(x).__name__}")
14 | if encoding or errors:
15 | return x.encode(encoding or "utf-8", errors=errors or "strict")
16 | return x.encode()
17 |
18 |
19 | def to_str(
20 | x: str | bytes, encoding: str | None = None, errors: str | None = None
21 | ) -> str:
22 | if isinstance(x, str):
23 | return x
24 | elif not isinstance(x, bytes):
25 | raise TypeError(f"not expecting type {type(x).__name__}")
26 | if encoding or errors:
27 | return x.decode(encoding or "utf-8", errors=errors or "strict")
28 | return x.decode()
29 |
30 |
31 | def reraise(
32 | tp: type[BaseException] | None,
33 | value: BaseException,
34 | tb: TracebackType | None = None,
35 | ) -> typing.NoReturn:
36 | try:
37 | if value.__traceback__ is not tb:
38 | raise value.with_traceback(tb)
39 | raise value
40 | finally:
41 | value = None # type: ignore[assignment]
42 | tb = None
43 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/util/proxy.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import typing
4 |
5 | from .url import Url
6 |
7 | if typing.TYPE_CHECKING:
8 | from ..connection import ProxyConfig
9 |
10 |
11 | def connection_requires_http_tunnel(
12 | proxy_url: Url | None = None,
13 | proxy_config: ProxyConfig | None = None,
14 | destination_scheme: str | None = None,
15 | ) -> bool:
16 | """
17 | Returns True if the connection requires an HTTP CONNECT through the proxy.
18 |
19 | :param URL proxy_url:
20 | URL of the proxy.
21 | :param ProxyConfig proxy_config:
22 | Proxy configuration from poolmanager.py
23 | :param str destination_scheme:
24 | The scheme of the destination. (i.e https, http, etc)
25 | """
26 | # If we're not using a proxy, no way to use a tunnel.
27 | if proxy_url is None:
28 | return False
29 |
30 | # HTTP destinations never require tunneling, we always forward.
31 | if destination_scheme == "http":
32 | return False
33 |
34 | # Support for forwarding with HTTPS proxies and HTTPS destinations.
35 | if (
36 | proxy_url.scheme == "https"
37 | and proxy_config
38 | and proxy_config.use_forwarding_for_https
39 | ):
40 | return False
41 |
42 | # Otherwise always use a tunnel.
43 | return True
44 |
--------------------------------------------------------------------------------
/tests/integration/test_api_gateway.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import boto3
4 | import pytest
5 | import requests
6 |
7 | """
8 | Make sure env variable AWS_SAM_STACK_NAME exists with the name of the stack we are going to test.
9 | """
10 |
11 |
12 | class TestApiGateway:
13 |
14 | @pytest.fixture()
15 | def api_gateway_url(self):
16 | """ Get the API Gateway URL from Cloudformation Stack outputs """
17 | stack_name = os.environ.get("AWS_SAM_STACK_NAME")
18 |
19 | if stack_name is None:
20 | raise ValueError('Please set the AWS_SAM_STACK_NAME environment variable to the name of your stack')
21 |
22 | client = boto3.client("cloudformation")
23 |
24 | try:
25 | response = client.describe_stacks(StackName=stack_name)
26 | except Exception as e:
27 | raise Exception(
28 | f"Cannot find stack {stack_name} \n" f'Please make sure a stack with the name "{stack_name}" exists'
29 | ) from e
30 |
31 | stacks = response["Stacks"]
32 | stack_outputs = stacks[0]["Outputs"]
33 | api_outputs = [output for output in stack_outputs if output["OutputKey"] == "HelloWorldApi"]
34 |
35 | if not api_outputs:
36 | raise KeyError(f"HelloWorldAPI not found in stack {stack_name}")
37 |
38 | return api_outputs[0]["OutputValue"] # Extract url from stack outputs
39 |
40 | def test_api_gateway(self, api_gateway_url):
41 | """ Call the API Gateway endpoint and check the response """
42 | response = requests.get(api_gateway_url)
43 |
44 | assert response.status_code == 200
45 | assert response.json() == {"message": "hello world"}
46 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/idna-3.6.dist-info/LICENSE.md:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2013-2023, Kim Davies and contributors.
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are
8 | met:
9 |
10 | 1. Redistributions of source code must retain the above copyright
11 | notice, this list of conditions and the following disclaimer.
12 |
13 | 2. Redistributions in binary form must reproduce the above copyright
14 | notice, this list of conditions and the following disclaimer in the
15 | documentation and/or other materials provided with the distribution.
16 |
17 | 3. Neither the name of the copyright holder nor the names of its
18 | contributors may be used to endorse or promote products derived from
19 | this software without specific prior written permission.
20 |
21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
27 | TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
28 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
29 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
30 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
31 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/idna-3.6.dist-info/LICENSE.md:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2013-2023, Kim Davies and contributors.
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are
8 | met:
9 |
10 | 1. Redistributions of source code must retain the above copyright
11 | notice, this list of conditions and the following disclaimer.
12 |
13 | 2. Redistributions in binary form must reproduce the above copyright
14 | notice, this list of conditions and the following disclaimer in the
15 | documentation and/or other materials provided with the distribution.
16 |
17 | 3. Neither the name of the copyright holder nor the names of its
18 | contributors may be used to endorse or promote products derived from
19 | this software without specific prior written permission.
20 |
21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
27 | TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
28 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
29 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
30 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
31 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/charset_normalizer/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Charset-Normalizer
4 | ~~~~~~~~~~~~~~
5 | The Real First Universal Charset Detector.
6 | A library that helps you read text from an unknown charset encoding.
7 | Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
8 | All IANA character set names for which the Python core library provides codecs are supported.
9 |
10 | Basic usage:
11 | >>> from charset_normalizer import from_bytes
12 | >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8'))
13 | >>> best_guess = results.best()
14 | >>> str(best_guess)
15 | 'Bсеки човек има право на образование. Oбразованието!'
16 |
17 | Others methods and usages are available - see the full documentation
18 | at .
19 | :copyright: (c) 2021 by Ahmed TAHRI
20 | :license: MIT, see LICENSE for more details.
21 | """
22 | import logging
23 |
24 | from .api import from_bytes, from_fp, from_path, is_binary
25 | from .legacy import detect
26 | from .models import CharsetMatch, CharsetMatches
27 | from .utils import set_logging_handler
28 | from .version import VERSION, __version__
29 |
30 | __all__ = (
31 | "from_fp",
32 | "from_path",
33 | "from_bytes",
34 | "is_binary",
35 | "detect",
36 | "CharsetMatch",
37 | "CharsetMatches",
38 | "__version__",
39 | "VERSION",
40 | "set_logging_handler",
41 | )
42 |
43 | # Attach a NullHandler to the top level logger by default
44 | # https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
45 |
46 | logging.getLogger("charset_normalizer").addHandler(logging.NullHandler())
47 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/charset_normalizer/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Charset-Normalizer
4 | ~~~~~~~~~~~~~~
5 | The Real First Universal Charset Detector.
6 | A library that helps you read text from an unknown charset encoding.
7 | Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
8 | All IANA character set names for which the Python core library provides codecs are supported.
9 |
10 | Basic usage:
11 | >>> from charset_normalizer import from_bytes
12 | >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8'))
13 | >>> best_guess = results.best()
14 | >>> str(best_guess)
15 | 'Bсеки човек има право на образование. Oбразованието!'
16 |
17 | Others methods and usages are available - see the full documentation
18 | at .
19 | :copyright: (c) 2021 by Ahmed TAHRI
20 | :license: MIT, see LICENSE for more details.
21 | """
22 | import logging
23 |
24 | from .api import from_bytes, from_fp, from_path, is_binary
25 | from .legacy import detect
26 | from .models import CharsetMatch, CharsetMatches
27 | from .utils import set_logging_handler
28 | from .version import VERSION, __version__
29 |
30 | __all__ = (
31 | "from_fp",
32 | "from_path",
33 | "from_bytes",
34 | "is_binary",
35 | "detect",
36 | "CharsetMatch",
37 | "CharsetMatches",
38 | "__version__",
39 | "VERSION",
40 | "set_logging_handler",
41 | )
42 |
43 | # Attach a NullHandler to the top level logger by default
44 | # https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
45 |
46 | logging.getLogger("charset_normalizer").addHandler(logging.NullHandler())
47 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests/_internal_utils.py:
--------------------------------------------------------------------------------
1 | """
2 | requests._internal_utils
3 | ~~~~~~~~~~~~~~
4 |
5 | Provides utility functions that are consumed internally by Requests
6 | which depend on extremely few external helpers (such as compat)
7 | """
8 | import re
9 |
10 | from .compat import builtin_str
11 |
12 | _VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$")
13 | _VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$")
14 | _VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$")
15 | _VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$")
16 |
17 | _HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR)
18 | _HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE)
19 | HEADER_VALIDATORS = {
20 | bytes: _HEADER_VALIDATORS_BYTE,
21 | str: _HEADER_VALIDATORS_STR,
22 | }
23 |
24 |
25 | def to_native_string(string, encoding="ascii"):
26 | """Given a string object, regardless of type, returns a representation of
27 | that string in the native string type, encoding and decoding where
28 | necessary. This assumes ASCII unless told otherwise.
29 | """
30 | if isinstance(string, builtin_str):
31 | out = string
32 | else:
33 | out = string.decode(encoding)
34 |
35 | return out
36 |
37 |
38 | def unicode_is_ascii(u_string):
39 | """Determine if unicode string only contains ASCII characters.
40 |
41 | :param str u_string: unicode string to check. Must be unicode
42 | and not Python 2 `str`.
43 | :rtype: bool
44 | """
45 | assert isinstance(u_string, str)
46 | try:
47 | u_string.encode("ascii")
48 | return True
49 | except UnicodeEncodeError:
50 | return False
51 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests/_internal_utils.py:
--------------------------------------------------------------------------------
1 | """
2 | requests._internal_utils
3 | ~~~~~~~~~~~~~~
4 |
5 | Provides utility functions that are consumed internally by Requests
6 | which depend on extremely few external helpers (such as compat)
7 | """
8 | import re
9 |
10 | from .compat import builtin_str
11 |
12 | _VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$")
13 | _VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$")
14 | _VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$")
15 | _VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$")
16 |
17 | _HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR)
18 | _HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE)
19 | HEADER_VALIDATORS = {
20 | bytes: _HEADER_VALIDATORS_BYTE,
21 | str: _HEADER_VALIDATORS_STR,
22 | }
23 |
24 |
25 | def to_native_string(string, encoding="ascii"):
26 | """Given a string object, regardless of type, returns a representation of
27 | that string in the native string type, encoding and decoding where
28 | necessary. This assumes ASCII unless told otherwise.
29 | """
30 | if isinstance(string, builtin_str):
31 | out = string
32 | else:
33 | out = string.decode(encoding)
34 |
35 | return out
36 |
37 |
38 | def unicode_is_ascii(u_string):
39 | """Determine if unicode string only contains ASCII characters.
40 |
41 | :param str u_string: unicode string to check. Must be unicode
42 | and not Python 2 `str`.
43 | :rtype: bool
44 | """
45 | assert isinstance(u_string, str)
46 | try:
47 | u_string.encode("ascii")
48 | return True
49 | except UnicodeEncodeError:
50 | return False
51 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests/compat.py:
--------------------------------------------------------------------------------
1 | """
2 | requests.compat
3 | ~~~~~~~~~~~~~~~
4 |
5 | This module previously handled import compatibility issues
6 | between Python 2 and Python 3. It remains for backwards
7 | compatibility until the next major version.
8 | """
9 |
10 | try:
11 | import chardet
12 | except ImportError:
13 | import charset_normalizer as chardet
14 |
15 | import sys
16 |
17 | # -------
18 | # Pythons
19 | # -------
20 |
21 | # Syntax sugar.
22 | _ver = sys.version_info
23 |
24 | #: Python 2.x?
25 | is_py2 = _ver[0] == 2
26 |
27 | #: Python 3.x?
28 | is_py3 = _ver[0] == 3
29 |
30 | # json/simplejson module import resolution
31 | has_simplejson = False
32 | try:
33 | import simplejson as json
34 |
35 | has_simplejson = True
36 | except ImportError:
37 | import json
38 |
39 | if has_simplejson:
40 | from simplejson import JSONDecodeError
41 | else:
42 | from json import JSONDecodeError
43 |
44 | # Keep OrderedDict for backwards compatibility.
45 | from collections import OrderedDict
46 | from collections.abc import Callable, Mapping, MutableMapping
47 | from http import cookiejar as cookielib
48 | from http.cookies import Morsel
49 | from io import StringIO
50 |
51 | # --------------
52 | # Legacy Imports
53 | # --------------
54 | from urllib.parse import (
55 | quote,
56 | quote_plus,
57 | unquote,
58 | unquote_plus,
59 | urldefrag,
60 | urlencode,
61 | urljoin,
62 | urlparse,
63 | urlsplit,
64 | urlunparse,
65 | )
66 | from urllib.request import (
67 | getproxies,
68 | getproxies_environment,
69 | parse_http_list,
70 | proxy_bypass,
71 | proxy_bypass_environment,
72 | )
73 |
74 | builtin_str = str
75 | str = str
76 | bytes = bytes
77 | basestring = (str, bytes)
78 | numeric_types = (int, float)
79 | integer_types = (int,)
80 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/template.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Transform: AWS::Serverless-2016-10-31
3 | Description: 'sam-hello-world
4 |
5 | Sample SAM Template for sam-hello-world
6 |
7 | '
8 | Globals:
9 | Function:
10 | Timeout: 3
11 | MemorySize: 128
12 | LoggingConfig:
13 | LogFormat: JSON
14 | Resources:
15 | HelloWorldFunction:
16 | Type: AWS::Serverless::Function
17 | Properties:
18 | CodeUri: HelloWorldFunction
19 | Handler: app.lambda_handler
20 | Runtime: python3.9
21 | Architectures:
22 | - x86_64
23 | Events:
24 | HelloWorld:
25 | Type: Api
26 | Properties:
27 | Path: /hello
28 | Method: get
29 | Layers:
30 | - Fn::GetAtt:
31 | - AwsSamAutoDependencyLayerNestedStack
32 | - Outputs.HelloWorldFunction19d43fc4DepLayer
33 | Metadata:
34 | SamResourceId: HelloWorldFunction
35 | AwsSamAutoDependencyLayerNestedStack:
36 | DeletionPolicy: Delete
37 | Metadata:
38 | CreatedBy: AWS SAM CLI sync command
39 | Properties:
40 | TemplateURL: /Users/home/Projects/aws/sam-hello-world/.aws-sam/auto-dependency-layer/adl_nested_template.yaml
41 | Type: AWS::CloudFormation::Stack
42 | Outputs:
43 | HelloWorldApi:
44 | Description: API Gateway endpoint URL for Prod stage for Hello World function
45 | Value:
46 | Fn::Sub: https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/
47 | HelloWorldFunction:
48 | Description: Hello World Lambda Function ARN
49 | Value:
50 | Fn::GetAtt:
51 | - HelloWorldFunction
52 | - Arn
53 | HelloWorldFunctionIamRole:
54 | Description: Implicit IAM Role created for Hello World function
55 | Value:
56 | Fn::GetAtt:
57 | - HelloWorldFunctionRole
58 | - Arn
59 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests/compat.py:
--------------------------------------------------------------------------------
1 | """
2 | requests.compat
3 | ~~~~~~~~~~~~~~~
4 |
5 | This module previously handled import compatibility issues
6 | between Python 2 and Python 3. It remains for backwards
7 | compatibility until the next major version.
8 | """
9 |
10 | try:
11 | import chardet
12 | except ImportError:
13 | import charset_normalizer as chardet
14 |
15 | import sys
16 |
17 | # -------
18 | # Pythons
19 | # -------
20 |
21 | # Syntax sugar.
22 | _ver = sys.version_info
23 |
24 | #: Python 2.x?
25 | is_py2 = _ver[0] == 2
26 |
27 | #: Python 3.x?
28 | is_py3 = _ver[0] == 3
29 |
30 | # json/simplejson module import resolution
31 | has_simplejson = False
32 | try:
33 | import simplejson as json
34 |
35 | has_simplejson = True
36 | except ImportError:
37 | import json
38 |
39 | if has_simplejson:
40 | from simplejson import JSONDecodeError
41 | else:
42 | from json import JSONDecodeError
43 |
44 | # Keep OrderedDict for backwards compatibility.
45 | from collections import OrderedDict
46 | from collections.abc import Callable, Mapping, MutableMapping
47 | from http import cookiejar as cookielib
48 | from http.cookies import Morsel
49 | from io import StringIO
50 |
51 | # --------------
52 | # Legacy Imports
53 | # --------------
54 | from urllib.parse import (
55 | quote,
56 | quote_plus,
57 | unquote,
58 | unquote_plus,
59 | urldefrag,
60 | urlencode,
61 | urljoin,
62 | urlparse,
63 | urlsplit,
64 | urlunparse,
65 | )
66 | from urllib.request import (
67 | getproxies,
68 | getproxies_environment,
69 | parse_http_list,
70 | proxy_bypass,
71 | proxy_bypass_environment,
72 | )
73 |
74 | builtin_str = str
75 | str = str
76 | bytes = bytes
77 | basestring = (str, bytes)
78 | numeric_types = (int, float)
79 | integer_types = (int,)
80 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests-2.31.0.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | requests/__init__.py,sha256=LvmKhjIz8mHaKXthC2Mv5ykZ1d92voyf3oJpd-VuAig,4963
2 | requests/__version__.py,sha256=ssI3Ezt7PaxgkOW45GhtwPUclo_SO_ygtIm4A74IOfw,435
3 | requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495
4 | requests/adapters.py,sha256=v_FmjU5KZ76k-YttShZYB5RprIzhhL8Y3zgW9p4eBQ8,19553
5 | requests/api.py,sha256=q61xcXq4tmiImrvcSVLTbFyCiD2F-L_-hWKGbz4y8vg,6449
6 | requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187
7 | requests/certs.py,sha256=Z9Sb410Anv6jUFTyss0jFFhU6xst8ctELqfy8Ev23gw,429
8 | requests/compat.py,sha256=yxntVOSEHGMrn7FNr_32EEam1ZNAdPRdSE13_yaHzTk,1451
9 | requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560
10 | requests/exceptions.py,sha256=DhveFBclVjTRxhRduVpO-GbMYMID2gmjdLfNEqNpI_U,3811
11 | requests/help.py,sha256=gPX5d_H7Xd88aDABejhqGgl9B1VFRTt5BmiYvL3PzIQ,3875
12 | requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733
13 | requests/models.py,sha256=-DlKi0or8gFAM6VzutobXvvBW_2wrJuOF5NfndTIddA,35223
14 | requests/packages.py,sha256=DXgv-FJIczZITmv0vEBAhWj4W-5CGCIN_ksvgR17Dvs,957
15 | requests/sessions.py,sha256=-LvTzrPtetSTrR3buxu4XhdgMrJFLB1q5D7P--L2Xhw,30373
16 | requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235
17 | requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912
18 | requests/utils.py,sha256=6sx2X3cIVA8BgWOg8odxFy-_lbWDFETU8HI4fU4Rmqw,33448
19 | requests-2.31.0.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
20 | requests-2.31.0.dist-info/METADATA,sha256=eCPokOnbb0FROLrfl0R5EpDvdufsb9CaN4noJH__54I,4634
21 | requests-2.31.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
22 | requests-2.31.0.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9
23 | requests-2.31.0.dist-info/RECORD,,
24 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests-2.31.0.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | requests/__init__.py,sha256=LvmKhjIz8mHaKXthC2Mv5ykZ1d92voyf3oJpd-VuAig,4963
2 | requests/__version__.py,sha256=ssI3Ezt7PaxgkOW45GhtwPUclo_SO_ygtIm4A74IOfw,435
3 | requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495
4 | requests/adapters.py,sha256=v_FmjU5KZ76k-YttShZYB5RprIzhhL8Y3zgW9p4eBQ8,19553
5 | requests/api.py,sha256=q61xcXq4tmiImrvcSVLTbFyCiD2F-L_-hWKGbz4y8vg,6449
6 | requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187
7 | requests/certs.py,sha256=Z9Sb410Anv6jUFTyss0jFFhU6xst8ctELqfy8Ev23gw,429
8 | requests/compat.py,sha256=yxntVOSEHGMrn7FNr_32EEam1ZNAdPRdSE13_yaHzTk,1451
9 | requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560
10 | requests/exceptions.py,sha256=DhveFBclVjTRxhRduVpO-GbMYMID2gmjdLfNEqNpI_U,3811
11 | requests/help.py,sha256=gPX5d_H7Xd88aDABejhqGgl9B1VFRTt5BmiYvL3PzIQ,3875
12 | requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733
13 | requests/models.py,sha256=-DlKi0or8gFAM6VzutobXvvBW_2wrJuOF5NfndTIddA,35223
14 | requests/packages.py,sha256=DXgv-FJIczZITmv0vEBAhWj4W-5CGCIN_ksvgR17Dvs,957
15 | requests/sessions.py,sha256=-LvTzrPtetSTrR3buxu4XhdgMrJFLB1q5D7P--L2Xhw,30373
16 | requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235
17 | requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912
18 | requests/utils.py,sha256=6sx2X3cIVA8BgWOg8odxFy-_lbWDFETU8HI4fU4Rmqw,33448
19 | requests-2.31.0.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
20 | requests-2.31.0.dist-info/METADATA,sha256=eCPokOnbb0FROLrfl0R5EpDvdufsb9CaN4noJH__54I,4634
21 | requests-2.31.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
22 | requests-2.31.0.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9
23 | requests-2.31.0.dist-info/RECORD,,
24 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/charset_normalizer-3.3.2.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | charset_normalizer/constant.py,sha256=p0IsOVcEbPWYPOdWhnhRbjK1YVBy6fs05C5vKC-zoxU,40481
2 | charset_normalizer/md.cpython-39-x86_64-linux-gnu.so,sha256=Y7QSLD5QLoSFAWys0-tL7R6QB7oi5864zM6zr7RWek4,16064
3 | charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577
4 | charset_normalizer/__main__.py,sha256=JxY8bleaENOFlLRb9HfoeZCzAMnn2A1oGR5Xm2eyqg0,73
5 | charset_normalizer/cd.py,sha256=xwZliZcTQFA3jU0c00PRiu9MNxXTFxQkFLWmMW24ZzI,12560
6 | charset_normalizer/legacy.py,sha256=T-QuVMsMeDiQEk8WSszMrzVJg_14AMeSkmHdRYhdl1k,2071
7 | charset_normalizer/md.py,sha256=NkSuVLK13_a8c7BxZ4cGIQ5vOtGIWOdh22WZEvjp-7U,19624
8 | charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9 | charset_normalizer/api.py,sha256=WOlWjy6wT8SeMYFpaGbXZFN1TMXa-s8vZYfkL4G29iQ,21097
10 | charset_normalizer/version.py,sha256=iHKUfHD3kDRSyrh_BN2ojh43TA5-UZQjvbVIEFfpHDs,79
11 | charset_normalizer/models.py,sha256=I5i0s4aKCCgLPY2tUY3pwkgFA-BUbbNxQ7hVkVTt62s,11624
12 | charset_normalizer/md__mypyc.cpython-39-x86_64-linux-gnu.so,sha256=v9J_AZUXJp9wRew3wN6S0ELM8ze9Dk4H-chVJzX5psk,268816
13 | charset_normalizer/utils.py,sha256=teiosMqzKjXyAHXnGdjSBOgnBZwx-SkBbCLrx0UXy8M,11894
14 | charset_normalizer/cli/__init__.py,sha256=D5ERp8P62llm2FuoMzydZ7d9rs8cvvLXqE-1_6oViPc,100
15 | charset_normalizer/cli/__main__.py,sha256=2F-xURZJzo063Ye-2RLJ2wcmURpbKeAzKwpiws65dAs,9744
16 | charset_normalizer-3.3.2.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65
17 | charset_normalizer-3.3.2.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070
18 | charset_normalizer-3.3.2.dist-info/WHEEL,sha256=_BMdtp3IQ4NF7VFMKD4lD9Cik0H3WhEP1vtG22VwXhU,148
19 | charset_normalizer-3.3.2.dist-info/METADATA,sha256=cfLhl5A6SI-F0oclm8w8ux9wshL1nipdeCdVnYb4AaA,33550
20 | charset_normalizer-3.3.2.dist-info/RECORD,,
21 | charset_normalizer-3.3.2.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
22 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/charset_normalizer-3.3.2.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | charset_normalizer/constant.py,sha256=p0IsOVcEbPWYPOdWhnhRbjK1YVBy6fs05C5vKC-zoxU,40481
2 | charset_normalizer/md.cpython-39-x86_64-linux-gnu.so,sha256=Y7QSLD5QLoSFAWys0-tL7R6QB7oi5864zM6zr7RWek4,16064
3 | charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577
4 | charset_normalizer/__main__.py,sha256=JxY8bleaENOFlLRb9HfoeZCzAMnn2A1oGR5Xm2eyqg0,73
5 | charset_normalizer/cd.py,sha256=xwZliZcTQFA3jU0c00PRiu9MNxXTFxQkFLWmMW24ZzI,12560
6 | charset_normalizer/legacy.py,sha256=T-QuVMsMeDiQEk8WSszMrzVJg_14AMeSkmHdRYhdl1k,2071
7 | charset_normalizer/md.py,sha256=NkSuVLK13_a8c7BxZ4cGIQ5vOtGIWOdh22WZEvjp-7U,19624
8 | charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9 | charset_normalizer/api.py,sha256=WOlWjy6wT8SeMYFpaGbXZFN1TMXa-s8vZYfkL4G29iQ,21097
10 | charset_normalizer/version.py,sha256=iHKUfHD3kDRSyrh_BN2ojh43TA5-UZQjvbVIEFfpHDs,79
11 | charset_normalizer/models.py,sha256=I5i0s4aKCCgLPY2tUY3pwkgFA-BUbbNxQ7hVkVTt62s,11624
12 | charset_normalizer/md__mypyc.cpython-39-x86_64-linux-gnu.so,sha256=v9J_AZUXJp9wRew3wN6S0ELM8ze9Dk4H-chVJzX5psk,268816
13 | charset_normalizer/utils.py,sha256=teiosMqzKjXyAHXnGdjSBOgnBZwx-SkBbCLrx0UXy8M,11894
14 | charset_normalizer/cli/__init__.py,sha256=D5ERp8P62llm2FuoMzydZ7d9rs8cvvLXqE-1_6oViPc,100
15 | charset_normalizer/cli/__main__.py,sha256=2F-xURZJzo063Ye-2RLJ2wcmURpbKeAzKwpiws65dAs,9744
16 | charset_normalizer-3.3.2.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65
17 | charset_normalizer-3.3.2.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070
18 | charset_normalizer-3.3.2.dist-info/WHEEL,sha256=_BMdtp3IQ4NF7VFMKD4lD9Cik0H3WhEP1vtG22VwXhU,148
19 | charset_normalizer-3.3.2.dist-info/METADATA,sha256=cfLhl5A6SI-F0oclm8w8ux9wshL1nipdeCdVnYb4AaA,33550
20 | charset_normalizer-3.3.2.dist-info/RECORD,,
21 | charset_normalizer-3.3.2.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
22 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/idna/intranges.py:
--------------------------------------------------------------------------------
1 | """
2 | Given a list of integers, made up of (hopefully) a small number of long runs
3 | of consecutive integers, compute a representation of the form
4 | ((start1, end1), (start2, end2) ...). Then answer the question "was x present
5 | in the original list?" in time O(log(# runs)).
6 | """
7 |
8 | import bisect
9 | from typing import List, Tuple
10 |
11 | def intranges_from_list(list_: List[int]) -> Tuple[int, ...]:
12 | """Represent a list of integers as a sequence of ranges:
13 | ((start_0, end_0), (start_1, end_1), ...), such that the original
14 | integers are exactly those x such that start_i <= x < end_i for some i.
15 |
16 | Ranges are encoded as single integers (start << 32 | end), not as tuples.
17 | """
18 |
19 | sorted_list = sorted(list_)
20 | ranges = []
21 | last_write = -1
22 | for i in range(len(sorted_list)):
23 | if i+1 < len(sorted_list):
24 | if sorted_list[i] == sorted_list[i+1]-1:
25 | continue
26 | current_range = sorted_list[last_write+1:i+1]
27 | ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
28 | last_write = i
29 |
30 | return tuple(ranges)
31 |
32 | def _encode_range(start: int, end: int) -> int:
33 | return (start << 32) | end
34 |
35 | def _decode_range(r: int) -> Tuple[int, int]:
36 | return (r >> 32), (r & ((1 << 32) - 1))
37 |
38 |
39 | def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool:
40 | """Determine if `int_` falls into one of the ranges in `ranges`."""
41 | tuple_ = _encode_range(int_, 0)
42 | pos = bisect.bisect_left(ranges, tuple_)
43 | # we could be immediately ahead of a tuple (start, end)
44 | # with start < int_ <= end
45 | if pos > 0:
46 | left, right = _decode_range(ranges[pos-1])
47 | if left <= int_ < right:
48 | return True
49 | # or we could be immediately behind a tuple (int_, end)
50 | if pos < len(ranges):
51 | left, _ = _decode_range(ranges[pos])
52 | if left == int_:
53 | return True
54 | return False
55 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/idna/intranges.py:
--------------------------------------------------------------------------------
1 | """
2 | Given a list of integers, made up of (hopefully) a small number of long runs
3 | of consecutive integers, compute a representation of the form
4 | ((start1, end1), (start2, end2) ...). Then answer the question "was x present
5 | in the original list?" in time O(log(# runs)).
6 | """
7 |
8 | import bisect
9 | from typing import List, Tuple
10 |
11 | def intranges_from_list(list_: List[int]) -> Tuple[int, ...]:
12 | """Represent a list of integers as a sequence of ranges:
13 | ((start_0, end_0), (start_1, end_1), ...), such that the original
14 | integers are exactly those x such that start_i <= x < end_i for some i.
15 |
16 | Ranges are encoded as single integers (start << 32 | end), not as tuples.
17 | """
18 |
19 | sorted_list = sorted(list_)
20 | ranges = []
21 | last_write = -1
22 | for i in range(len(sorted_list)):
23 | if i+1 < len(sorted_list):
24 | if sorted_list[i] == sorted_list[i+1]-1:
25 | continue
26 | current_range = sorted_list[last_write+1:i+1]
27 | ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
28 | last_write = i
29 |
30 | return tuple(ranges)
31 |
32 | def _encode_range(start: int, end: int) -> int:
33 | return (start << 32) | end
34 |
35 | def _decode_range(r: int) -> Tuple[int, int]:
36 | return (r >> 32), (r & ((1 << 32) - 1))
37 |
38 |
39 | def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool:
40 | """Determine if `int_` falls into one of the ranges in `ranges`."""
41 | tuple_ = _encode_range(int_, 0)
42 | pos = bisect.bisect_left(ranges, tuple_)
43 | # we could be immediately ahead of a tuple (start, end)
44 | # with start < int_ <= end
45 | if pos > 0:
46 | left, right = _decode_range(ranges[pos-1])
47 | if left <= int_ < right:
48 | return True
49 | # or we could be immediately behind a tuple (int_, end)
50 | if pos < len(ranges):
51 | left, _ = _decode_range(ranges[pos])
52 | if left == int_:
53 | return True
54 | return False
55 |
--------------------------------------------------------------------------------
/template.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Transform: AWS::Serverless-2016-10-31
3 | Description: >
4 | sam-hello-world
5 |
6 | Sample SAM Template for sam-hello-world
7 |
8 | # More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst
9 | Globals:
10 | Function:
11 | Timeout: 3
12 | MemorySize: 128aws
13 |
14 | # You can add LoggingConfig parameters such as the Logformat, Log Group, and SystemLogLevel or ApplicationLogLevel. Learn more here https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-resource-function.html#sam-function-loggingconfig.
15 | LoggingConfig:
16 | LogFormat: JSON
17 | Resources:
18 | HelloWorldFunction:
19 | Type: AWS::Serverless::Function # More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction
20 | Properties:
21 | CodeUri: hello_world/
22 | Handler: app.lambda_handler
23 | Runtime: python3.9
24 | Architectures:
25 | - x86_64
26 | Events:
27 | HelloWorld:
28 | Type: Api # More info about API Event Source: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#api
29 | Properties:
30 | Path: /hello
31 | Method: get
32 |
33 | Outputs:
34 | # ServerlessRestApi is an implicit API created out of Events key under Serverless::Function
35 | # Find out more about other implicit resources you can reference within SAM
36 | # https://github.com/awslabs/serverless-application-model/blob/master/docs/internals/generated_resources.rst#api
37 | HelloWorldApi:
38 | Description: API Gateway endpoint URL for Prod stage for Hello World function
39 | Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/"
40 | HelloWorldFunction:
41 | Description: Hello World Lambda Function ARN
42 | Value: !GetAtt HelloWorldFunction.Arn
43 | HelloWorldFunctionIamRole:
44 | Description: Implicit IAM Role created for Hello World function
45 | Value: !GetAtt HelloWorldFunctionRole.Arn
46 |
--------------------------------------------------------------------------------
/events/event.json:
--------------------------------------------------------------------------------
1 | {
2 | "body": "{\"message\": \"hello world\"}",
3 | "resource": "/hello",
4 | "path": "/hello",
5 | "httpMethod": "GET",
6 | "isBase64Encoded": false,
7 | "queryStringParameters": {
8 | "foo": "bar"
9 | },
10 | "pathParameters": {
11 | "proxy": "/path/to/resource"
12 | },
13 | "stageVariables": {
14 | "baz": "qux"
15 | },
16 | "headers": {
17 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
18 | "Accept-Encoding": "gzip, deflate, sdch",
19 | "Accept-Language": "en-US,en;q=0.8",
20 | "Cache-Control": "max-age=0",
21 | "CloudFront-Forwarded-Proto": "https",
22 | "CloudFront-Is-Desktop-Viewer": "true",
23 | "CloudFront-Is-Mobile-Viewer": "false",
24 | "CloudFront-Is-SmartTV-Viewer": "false",
25 | "CloudFront-Is-Tablet-Viewer": "false",
26 | "CloudFront-Viewer-Country": "US",
27 | "Host": "1234567890.execute-api.us-east-1.amazonaws.com",
28 | "Upgrade-Insecure-Requests": "1",
29 | "User-Agent": "Custom User Agent String",
30 | "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)",
31 | "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==",
32 | "X-Forwarded-For": "127.0.0.1, 127.0.0.2",
33 | "X-Forwarded-Port": "443",
34 | "X-Forwarded-Proto": "https"
35 | },
36 | "requestContext": {
37 | "accountId": "123456789012",
38 | "resourceId": "123456",
39 | "stage": "prod",
40 | "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef",
41 | "requestTime": "09/Apr/2015:12:34:56 +0000",
42 | "requestTimeEpoch": 1428582896000,
43 | "identity": {
44 | "cognitoIdentityPoolId": null,
45 | "accountId": null,
46 | "cognitoIdentityId": null,
47 | "caller": null,
48 | "accessKey": null,
49 | "sourceIp": "127.0.0.1",
50 | "cognitoAuthenticationType": null,
51 | "cognitoAuthenticationProvider": null,
52 | "userArn": null,
53 | "userAgent": "Custom User Agent String",
54 | "user": null
55 | },
56 | "path": "/prod/hello",
57 | "resourcePath": "/hello",
58 | "httpMethod": "POST",
59 | "apiId": "1234567890",
60 | "protocol": "HTTP/1.1"
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/charset_normalizer/legacy.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Optional, Union
2 | from warnings import warn
3 |
4 | from .api import from_bytes
5 | from .constant import CHARDET_CORRESPONDENCE
6 |
7 |
8 | def detect(
9 | byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
10 | ) -> Dict[str, Optional[Union[str, float]]]:
11 | """
12 | chardet legacy method
13 | Detect the encoding of the given byte string. It should be mostly backward-compatible.
14 | Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it)
15 | This function is deprecated and should be used to migrate your project easily, consult the documentation for
16 | further information. Not planned for removal.
17 |
18 | :param byte_str: The byte sequence to examine.
19 | :param should_rename_legacy: Should we rename legacy encodings
20 | to their more modern equivalents?
21 | """
22 | if len(kwargs):
23 | warn(
24 | f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()"
25 | )
26 |
27 | if not isinstance(byte_str, (bytearray, bytes)):
28 | raise TypeError( # pragma: nocover
29 | "Expected object of type bytes or bytearray, got: "
30 | "{0}".format(type(byte_str))
31 | )
32 |
33 | if isinstance(byte_str, bytearray):
34 | byte_str = bytes(byte_str)
35 |
36 | r = from_bytes(byte_str).best()
37 |
38 | encoding = r.encoding if r is not None else None
39 | language = r.language if r is not None and r.language != "Unknown" else ""
40 | confidence = 1.0 - r.chaos if r is not None else None
41 |
42 | # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process
43 | # but chardet does return 'utf-8-sig' and it is a valid codec name.
44 | if r is not None and encoding == "utf_8" and r.bom:
45 | encoding += "_sig"
46 |
47 | if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE:
48 | encoding = CHARDET_CORRESPONDENCE[encoding]
49 |
50 | return {
51 | "encoding": encoding,
52 | "language": language,
53 | "confidence": confidence,
54 | }
55 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/charset_normalizer/legacy.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Optional, Union
2 | from warnings import warn
3 |
4 | from .api import from_bytes
5 | from .constant import CHARDET_CORRESPONDENCE
6 |
7 |
8 | def detect(
9 | byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
10 | ) -> Dict[str, Optional[Union[str, float]]]:
11 | """
12 | chardet legacy method
13 | Detect the encoding of the given byte string. It should be mostly backward-compatible.
14 | Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it)
15 | This function is deprecated and should be used to migrate your project easily, consult the documentation for
16 | further information. Not planned for removal.
17 |
18 | :param byte_str: The byte sequence to examine.
19 | :param should_rename_legacy: Should we rename legacy encodings
20 | to their more modern equivalents?
21 | """
22 | if len(kwargs):
23 | warn(
24 | f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()"
25 | )
26 |
27 | if not isinstance(byte_str, (bytearray, bytes)):
28 | raise TypeError( # pragma: nocover
29 | "Expected object of type bytes or bytearray, got: "
30 | "{0}".format(type(byte_str))
31 | )
32 |
33 | if isinstance(byte_str, bytearray):
34 | byte_str = bytes(byte_str)
35 |
36 | r = from_bytes(byte_str).best()
37 |
38 | encoding = r.encoding if r is not None else None
39 | language = r.language if r is not None and r.language != "Unknown" else ""
40 | confidence = 1.0 - r.chaos if r is not None else None
41 |
42 | # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process
43 | # but chardet does return 'utf-8-sig' and it is a valid codec name.
44 | if r is not None and encoding == "utf_8" and r.bom:
45 | encoding += "_sig"
46 |
47 | if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE:
48 | encoding = CHARDET_CORRESPONDENCE[encoding]
49 |
50 | return {
51 | "encoding": encoding,
52 | "language": language,
53 | "confidence": confidence,
54 | }
55 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/certifi-2024.2.2.dist-info/METADATA:
--------------------------------------------------------------------------------
1 | Metadata-Version: 2.1
2 | Name: certifi
3 | Version: 2024.2.2
4 | Summary: Python package for providing Mozilla's CA Bundle.
5 | Home-page: https://github.com/certifi/python-certifi
6 | Author: Kenneth Reitz
7 | Author-email: me@kennethreitz.com
8 | License: MPL-2.0
9 | Project-URL: Source, https://github.com/certifi/python-certifi
10 | Classifier: Development Status :: 5 - Production/Stable
11 | Classifier: Intended Audience :: Developers
12 | Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
13 | Classifier: Natural Language :: English
14 | Classifier: Programming Language :: Python
15 | Classifier: Programming Language :: Python :: 3
16 | Classifier: Programming Language :: Python :: 3 :: Only
17 | Classifier: Programming Language :: Python :: 3.6
18 | Classifier: Programming Language :: Python :: 3.7
19 | Classifier: Programming Language :: Python :: 3.8
20 | Classifier: Programming Language :: Python :: 3.9
21 | Classifier: Programming Language :: Python :: 3.10
22 | Classifier: Programming Language :: Python :: 3.11
23 | Requires-Python: >=3.6
24 | License-File: LICENSE
25 |
26 | Certifi: Python SSL Certificates
27 | ================================
28 |
29 | Certifi provides Mozilla's carefully curated collection of Root Certificates for
30 | validating the trustworthiness of SSL certificates while verifying the identity
31 | of TLS hosts. It has been extracted from the `Requests`_ project.
32 |
33 | Installation
34 | ------------
35 |
36 | ``certifi`` is available on PyPI. Simply install it with ``pip``::
37 |
38 | $ pip install certifi
39 |
40 | Usage
41 | -----
42 |
43 | To reference the installed certificate authority (CA) bundle, you can use the
44 | built-in function::
45 |
46 | >>> import certifi
47 |
48 | >>> certifi.where()
49 | '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
50 |
51 | Or from the command line::
52 |
53 | $ python -m certifi
54 | /usr/local/lib/python3.7/site-packages/certifi/cacert.pem
55 |
56 | Enjoy!
57 |
58 | .. _`Requests`: https://requests.readthedocs.io/en/master/
59 |
60 | Addition/Removal of Certificates
61 | --------------------------------
62 |
63 | Certifi does not support any addition/removal or other modification of the
64 | CA trust store content. This project is intended to provide a reliable and
65 | highly portable root of trust to python deployments. Look to upstream projects
66 | for methods to use alternate trust.
67 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/certifi-2024.2.2.dist-info/METADATA:
--------------------------------------------------------------------------------
1 | Metadata-Version: 2.1
2 | Name: certifi
3 | Version: 2024.2.2
4 | Summary: Python package for providing Mozilla's CA Bundle.
5 | Home-page: https://github.com/certifi/python-certifi
6 | Author: Kenneth Reitz
7 | Author-email: me@kennethreitz.com
8 | License: MPL-2.0
9 | Project-URL: Source, https://github.com/certifi/python-certifi
10 | Classifier: Development Status :: 5 - Production/Stable
11 | Classifier: Intended Audience :: Developers
12 | Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
13 | Classifier: Natural Language :: English
14 | Classifier: Programming Language :: Python
15 | Classifier: Programming Language :: Python :: 3
16 | Classifier: Programming Language :: Python :: 3 :: Only
17 | Classifier: Programming Language :: Python :: 3.6
18 | Classifier: Programming Language :: Python :: 3.7
19 | Classifier: Programming Language :: Python :: 3.8
20 | Classifier: Programming Language :: Python :: 3.9
21 | Classifier: Programming Language :: Python :: 3.10
22 | Classifier: Programming Language :: Python :: 3.11
23 | Requires-Python: >=3.6
24 | License-File: LICENSE
25 |
26 | Certifi: Python SSL Certificates
27 | ================================
28 |
29 | Certifi provides Mozilla's carefully curated collection of Root Certificates for
30 | validating the trustworthiness of SSL certificates while verifying the identity
31 | of TLS hosts. It has been extracted from the `Requests`_ project.
32 |
33 | Installation
34 | ------------
35 |
36 | ``certifi`` is available on PyPI. Simply install it with ``pip``::
37 |
38 | $ pip install certifi
39 |
40 | Usage
41 | -----
42 |
43 | To reference the installed certificate authority (CA) bundle, you can use the
44 | built-in function::
45 |
46 | >>> import certifi
47 |
48 | >>> certifi.where()
49 | '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
50 |
51 | Or from the command line::
52 |
53 | $ python -m certifi
54 | /usr/local/lib/python3.7/site-packages/certifi/cacert.pem
55 |
56 | Enjoy!
57 |
58 | .. _`Requests`: https://requests.readthedocs.io/en/master/
59 |
60 | Addition/Removal of Certificates
61 | --------------------------------
62 |
63 | Certifi does not support any addition/removal or other modification of the
64 | CA trust store content. This project is intended to provide a reliable and
65 | highly portable root of trust to python deployments. Look to upstream projects
66 | for methods to use alternate trust.
67 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/filepost.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import binascii
4 | import codecs
5 | import os
6 | import typing
7 | from io import BytesIO
8 |
9 | from .fields import _TYPE_FIELD_VALUE_TUPLE, RequestField
10 |
11 | writer = codecs.lookup("utf-8")[3]
12 |
13 | _TYPE_FIELDS_SEQUENCE = typing.Sequence[
14 | typing.Union[typing.Tuple[str, _TYPE_FIELD_VALUE_TUPLE], RequestField]
15 | ]
16 | _TYPE_FIELDS = typing.Union[
17 | _TYPE_FIELDS_SEQUENCE,
18 | typing.Mapping[str, _TYPE_FIELD_VALUE_TUPLE],
19 | ]
20 |
21 |
22 | def choose_boundary() -> str:
23 | """
24 | Our embarrassingly-simple replacement for mimetools.choose_boundary.
25 | """
26 | return binascii.hexlify(os.urandom(16)).decode()
27 |
28 |
29 | def iter_field_objects(fields: _TYPE_FIELDS) -> typing.Iterable[RequestField]:
30 | """
31 | Iterate over fields.
32 |
33 | Supports list of (k, v) tuples and dicts, and lists of
34 | :class:`~urllib3.fields.RequestField`.
35 |
36 | """
37 | iterable: typing.Iterable[RequestField | tuple[str, _TYPE_FIELD_VALUE_TUPLE]]
38 |
39 | if isinstance(fields, typing.Mapping):
40 | iterable = fields.items()
41 | else:
42 | iterable = fields
43 |
44 | for field in iterable:
45 | if isinstance(field, RequestField):
46 | yield field
47 | else:
48 | yield RequestField.from_tuples(*field)
49 |
50 |
51 | def encode_multipart_formdata(
52 | fields: _TYPE_FIELDS, boundary: str | None = None
53 | ) -> tuple[bytes, str]:
54 | """
55 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
56 |
57 | :param fields:
58 | Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
59 | Values are processed by :func:`urllib3.fields.RequestField.from_tuples`.
60 |
61 | :param boundary:
62 | If not specified, then a random boundary will be generated using
63 | :func:`urllib3.filepost.choose_boundary`.
64 | """
65 | body = BytesIO()
66 | if boundary is None:
67 | boundary = choose_boundary()
68 |
69 | for field in iter_field_objects(fields):
70 | body.write(f"--{boundary}\r\n".encode("latin-1"))
71 |
72 | writer(body).write(field.render_headers())
73 | data = field.data
74 |
75 | if isinstance(data, int):
76 | data = str(data) # Backwards compatibility
77 |
78 | if isinstance(data, str):
79 | writer(body).write(data)
80 | else:
81 | body.write(data)
82 |
83 | body.write(b"\r\n")
84 |
85 | body.write(f"--{boundary}--\r\n".encode("latin-1"))
86 |
87 | content_type = f"multipart/form-data; boundary={boundary}"
88 |
89 | return body.getvalue(), content_type
90 |
--------------------------------------------------------------------------------
/tests/unit/test_handler.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | import pytest
4 |
5 | from hello_world import app
6 |
7 |
8 | @pytest.fixture()
9 | def apigw_event():
10 | """ Generates API GW Event"""
11 |
12 | return {
13 | "body": '{ "test": "body"}',
14 | "resource": "/{proxy+}",
15 | "requestContext": {
16 | "resourceId": "123456",
17 | "apiId": "1234567890",
18 | "resourcePath": "/{proxy+}",
19 | "httpMethod": "POST",
20 | "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef",
21 | "accountId": "123456789012",
22 | "identity": {
23 | "apiKey": "",
24 | "userArn": "",
25 | "cognitoAuthenticationType": "",
26 | "caller": "",
27 | "userAgent": "Custom User Agent String",
28 | "user": "",
29 | "cognitoIdentityPoolId": "",
30 | "cognitoIdentityId": "",
31 | "cognitoAuthenticationProvider": "",
32 | "sourceIp": "127.0.0.1",
33 | "accountId": "",
34 | },
35 | "stage": "prod",
36 | },
37 | "queryStringParameters": {"foo": "bar"},
38 | "headers": {
39 | "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)",
40 | "Accept-Language": "en-US,en;q=0.8",
41 | "CloudFront-Is-Desktop-Viewer": "true",
42 | "CloudFront-Is-SmartTV-Viewer": "false",
43 | "CloudFront-Is-Mobile-Viewer": "false",
44 | "X-Forwarded-For": "127.0.0.1, 127.0.0.2",
45 | "CloudFront-Viewer-Country": "US",
46 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
47 | "Upgrade-Insecure-Requests": "1",
48 | "X-Forwarded-Port": "443",
49 | "Host": "1234567890.execute-api.us-east-1.amazonaws.com",
50 | "X-Forwarded-Proto": "https",
51 | "X-Amz-Cf-Id": "aaaaaaaaaae3VYQb9jd-nvCd-de396Uhbp027Y2JvkCPNLmGJHqlaA==",
52 | "CloudFront-Is-Tablet-Viewer": "false",
53 | "Cache-Control": "max-age=0",
54 | "User-Agent": "Custom User Agent String",
55 | "CloudFront-Forwarded-Proto": "https",
56 | "Accept-Encoding": "gzip, deflate, sdch",
57 | },
58 | "pathParameters": {"proxy": "/examplepath"},
59 | "httpMethod": "POST",
60 | "stageVariables": {"baz": "qux"},
61 | "path": "/examplepath",
62 | }
63 |
64 |
65 | def test_lambda_handler(apigw_event):
66 |
67 | ret = app.lambda_handler(apigw_event, "")
68 | data = json.loads(ret["body"])
69 |
70 | assert ret["statusCode"] == 200
71 | assert "message" in ret["body"]
72 | assert data["message"] == "hello world"
73 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/filepost.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import binascii
4 | import codecs
5 | import os
6 | import typing
7 | from io import BytesIO
8 |
9 | from .fields import _TYPE_FIELD_VALUE_TUPLE, RequestField
10 |
11 | writer = codecs.lookup("utf-8")[3]
12 |
13 | _TYPE_FIELDS_SEQUENCE = typing.Sequence[
14 | typing.Union[typing.Tuple[str, _TYPE_FIELD_VALUE_TUPLE], RequestField]
15 | ]
16 | _TYPE_FIELDS = typing.Union[
17 | _TYPE_FIELDS_SEQUENCE,
18 | typing.Mapping[str, _TYPE_FIELD_VALUE_TUPLE],
19 | ]
20 |
21 |
22 | def choose_boundary() -> str:
23 | """
24 | Our embarrassingly-simple replacement for mimetools.choose_boundary.
25 | """
26 | return binascii.hexlify(os.urandom(16)).decode()
27 |
28 |
29 | def iter_field_objects(fields: _TYPE_FIELDS) -> typing.Iterable[RequestField]:
30 | """
31 | Iterate over fields.
32 |
33 | Supports list of (k, v) tuples and dicts, and lists of
34 | :class:`~urllib3.fields.RequestField`.
35 |
36 | """
37 | iterable: typing.Iterable[RequestField | tuple[str, _TYPE_FIELD_VALUE_TUPLE]]
38 |
39 | if isinstance(fields, typing.Mapping):
40 | iterable = fields.items()
41 | else:
42 | iterable = fields
43 |
44 | for field in iterable:
45 | if isinstance(field, RequestField):
46 | yield field
47 | else:
48 | yield RequestField.from_tuples(*field)
49 |
50 |
51 | def encode_multipart_formdata(
52 | fields: _TYPE_FIELDS, boundary: str | None = None
53 | ) -> tuple[bytes, str]:
54 | """
55 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
56 |
57 | :param fields:
58 | Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
59 | Values are processed by :func:`urllib3.fields.RequestField.from_tuples`.
60 |
61 | :param boundary:
62 | If not specified, then a random boundary will be generated using
63 | :func:`urllib3.filepost.choose_boundary`.
64 | """
65 | body = BytesIO()
66 | if boundary is None:
67 | boundary = choose_boundary()
68 |
69 | for field in iter_field_objects(fields):
70 | body.write(f"--{boundary}\r\n".encode("latin-1"))
71 |
72 | writer(body).write(field.render_headers())
73 | data = field.data
74 |
75 | if isinstance(data, int):
76 | data = str(data) # Backwards compatibility
77 |
78 | if isinstance(data, str):
79 | writer(body).write(data)
80 | else:
81 | body.write(data)
82 |
83 | body.write(b"\r\n")
84 |
85 | body.write(f"--{boundary}--\r\n".encode("latin-1"))
86 |
87 | content_type = f"multipart/form-data; boundary={boundary}"
88 |
89 | return body.getvalue(), content_type
90 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3-2.2.1.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | urllib3/__init__.py,sha256=JMo1tg1nIV1AeJ2vENC_Txfl0e5h6Gzl9DGVk1rWRbo,6979
2 | urllib3/_base_connection.py,sha256=p-DOG_Me7-sJXO1R9VgDpNmdVU_kIS8VtaC7ptEllA0,5640
3 | urllib3/_collections.py,sha256=vzKA-7X-9resOamEWq52uV1nHshChjbYDvz47H0mMjw,17400
4 | urllib3/_request_methods.py,sha256=ucEpHQyQf06b9o1RxKLkCpzGH0ct-v7X2xGpU6rmmlo,9984
5 | urllib3/_version.py,sha256=12idLAcGmrAURPX52rGioBo33oQ__-ENJEdeqHvUUZg,98
6 | urllib3/connection.py,sha256=zFgaaoqrICsl7-kBp-_4va9m82sYhioAuy4-4iDpK0I,34704
7 | urllib3/connectionpool.py,sha256=XjTfYowLwN5ZzRMO41_OTbGNX4ANifgYVpWsVMRuC00,43556
8 | urllib3/exceptions.py,sha256=RDaiudtR7rqbVKTKpLSgZBBtwaIqV7eZtervZV_mZag,9393
9 | urllib3/fields.py,sha256=8vi0PeRo_pE5chPmJA07LZtMkVls4UrBS1k2xM506jM,10843
10 | urllib3/filepost.py,sha256=-9qJT11cNGjO9dqnI20-oErZuTvNaM18xZZPCjZSbOE,2395
11 | urllib3/http2.py,sha256=4QQcjTM9UYOQZe0r8KnA8anU9ST4p_s3SB3gRTueyPc,7480
12 | urllib3/poolmanager.py,sha256=fcC3OwjFKxha06NsOORwbZOzrVt1pyY-bNCbKiqC0l8,22935
13 | urllib3/py.typed,sha256=UaCuPFa3H8UAakbt-5G8SPacldTOGvJv18pPjUJ5gDY,93
14 | urllib3/response.py,sha256=lmvseToQbkLXuFyA3jcSyCPjTgSfa6YPA4xUhVqq8QI,43874
15 | urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16 | urllib3/contrib/pyopenssl.py,sha256=X31eCYGwB09EkAHX8RhDKC0X0Ki7d0cCVWoMJZUM5bQ,19161
17 | urllib3/contrib/socks.py,sha256=gFS2-zOw4_vLGpUvExOf3fNVT8liz6vhM2t6lBPn3CY,7572
18 | urllib3/contrib/emscripten/__init__.py,sha256=u6KNgzjlFZbuAAXa_ybCR7gQ71VJESnF-IIdDA73brw,733
19 | urllib3/contrib/emscripten/connection.py,sha256=kaBe2tWt7Yy9vNUFRBV7CSyDnfhCYILGxju9KTZj8Sw,8755
20 | urllib3/contrib/emscripten/emscripten_fetch_worker.js,sha256=CDfYF_9CDobtx2lGidyJ1zjDEvwNT5F-dchmVWXDh0E,3655
21 | urllib3/contrib/emscripten/fetch.py,sha256=ymwJlHBBuw6WTpKgPHpdmmrNBxlsr75HqoD4Rn27YXk,14131
22 | urllib3/contrib/emscripten/request.py,sha256=mL28szy1KvE3NJhWor5jNmarp8gwplDU-7gwGZY5g0Q,566
23 | urllib3/contrib/emscripten/response.py,sha256=wIDmdJ4doFWqLl5s86l9n0V70gFjQ2HWaPgz69jM52E,9546
24 | urllib3/util/__init__.py,sha256=-qeS0QceivazvBEKDNFCAI-6ACcdDOE4TMvo7SLNlAQ,1001
25 | urllib3/util/connection.py,sha256=QeUUEuNmhznpuKNPL-B0IVOkMdMCu8oJX62OC0Vpzug,4462
26 | urllib3/util/proxy.py,sha256=seP8-Q5B6bB0dMtwPj-YcZZQ30vHuLqRu-tI0JZ2fzs,1148
27 | urllib3/util/request.py,sha256=PQnBmKUHMQ0hQQ41uhbLNAeA24ke60m6zeiwfwocpGo,8102
28 | urllib3/util/response.py,sha256=vQE639uoEhj1vpjEdxu5lNIhJCSUZkd7pqllUI0BZOA,3374
29 | urllib3/util/retry.py,sha256=WB-7x1m7fQH_-Qqtrk2OGvz93GvBTxc-pRn8Vf3p4mg,18384
30 | urllib3/util/ssl_.py,sha256=FeymdS68RggEROwMB9VLGSqLHq2hRUKnIbQC_bCpGJI,19109
31 | urllib3/util/ssl_match_hostname.py,sha256=gaWqixoYtQ_GKO8fcRGFj3VXeMoqyxQQuUTPgWeiL_M,5812
32 | urllib3/util/ssltransport.py,sha256=SF__JQXVcHBQniFJZp3P9q-UeHM310WVwcBwqT9dCLE,9034
33 | urllib3/util/timeout.py,sha256=4eT1FVeZZU7h7mYD1Jq2OXNe4fxekdNvhoWUkZusRpA,10346
34 | urllib3/util/url.py,sha256=wHORhp80RAXyTlAIkTqLFzSrkU7J34ZDxX-tN65MBZk,15213
35 | urllib3/util/util.py,sha256=j3lbZK1jPyiwD34T8IgJzdWEZVT-4E-0vYIJi9UjeNA,1146
36 | urllib3/util/wait.py,sha256=_ph8IrUR3sqPqi0OopQgJUlH4wzkGeM5CiyA7XGGtmI,4423
37 | urllib3-2.2.1.dist-info/METADATA,sha256=uROmjQwfAbwRYjV9PMdc5JF5NA3kRkpoKafPkNzybfc,6434
38 | urllib3-2.2.1.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87
39 | urllib3-2.2.1.dist-info/licenses/LICENSE.txt,sha256=Ew46ZNX91dCWp1JpRjSn2d8oRGnehuVzIQAmgEHj1oY,1093
40 | urllib3-2.2.1.dist-info/RECORD,,
41 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3-2.2.1.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | urllib3/__init__.py,sha256=JMo1tg1nIV1AeJ2vENC_Txfl0e5h6Gzl9DGVk1rWRbo,6979
2 | urllib3/_base_connection.py,sha256=p-DOG_Me7-sJXO1R9VgDpNmdVU_kIS8VtaC7ptEllA0,5640
3 | urllib3/_collections.py,sha256=vzKA-7X-9resOamEWq52uV1nHshChjbYDvz47H0mMjw,17400
4 | urllib3/_request_methods.py,sha256=ucEpHQyQf06b9o1RxKLkCpzGH0ct-v7X2xGpU6rmmlo,9984
5 | urllib3/_version.py,sha256=12idLAcGmrAURPX52rGioBo33oQ__-ENJEdeqHvUUZg,98
6 | urllib3/connection.py,sha256=zFgaaoqrICsl7-kBp-_4va9m82sYhioAuy4-4iDpK0I,34704
7 | urllib3/connectionpool.py,sha256=XjTfYowLwN5ZzRMO41_OTbGNX4ANifgYVpWsVMRuC00,43556
8 | urllib3/exceptions.py,sha256=RDaiudtR7rqbVKTKpLSgZBBtwaIqV7eZtervZV_mZag,9393
9 | urllib3/fields.py,sha256=8vi0PeRo_pE5chPmJA07LZtMkVls4UrBS1k2xM506jM,10843
10 | urllib3/filepost.py,sha256=-9qJT11cNGjO9dqnI20-oErZuTvNaM18xZZPCjZSbOE,2395
11 | urllib3/http2.py,sha256=4QQcjTM9UYOQZe0r8KnA8anU9ST4p_s3SB3gRTueyPc,7480
12 | urllib3/poolmanager.py,sha256=fcC3OwjFKxha06NsOORwbZOzrVt1pyY-bNCbKiqC0l8,22935
13 | urllib3/py.typed,sha256=UaCuPFa3H8UAakbt-5G8SPacldTOGvJv18pPjUJ5gDY,93
14 | urllib3/response.py,sha256=lmvseToQbkLXuFyA3jcSyCPjTgSfa6YPA4xUhVqq8QI,43874
15 | urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16 | urllib3/contrib/pyopenssl.py,sha256=X31eCYGwB09EkAHX8RhDKC0X0Ki7d0cCVWoMJZUM5bQ,19161
17 | urllib3/contrib/socks.py,sha256=gFS2-zOw4_vLGpUvExOf3fNVT8liz6vhM2t6lBPn3CY,7572
18 | urllib3/contrib/emscripten/__init__.py,sha256=u6KNgzjlFZbuAAXa_ybCR7gQ71VJESnF-IIdDA73brw,733
19 | urllib3/contrib/emscripten/connection.py,sha256=kaBe2tWt7Yy9vNUFRBV7CSyDnfhCYILGxju9KTZj8Sw,8755
20 | urllib3/contrib/emscripten/emscripten_fetch_worker.js,sha256=CDfYF_9CDobtx2lGidyJ1zjDEvwNT5F-dchmVWXDh0E,3655
21 | urllib3/contrib/emscripten/fetch.py,sha256=ymwJlHBBuw6WTpKgPHpdmmrNBxlsr75HqoD4Rn27YXk,14131
22 | urllib3/contrib/emscripten/request.py,sha256=mL28szy1KvE3NJhWor5jNmarp8gwplDU-7gwGZY5g0Q,566
23 | urllib3/contrib/emscripten/response.py,sha256=wIDmdJ4doFWqLl5s86l9n0V70gFjQ2HWaPgz69jM52E,9546
24 | urllib3/util/__init__.py,sha256=-qeS0QceivazvBEKDNFCAI-6ACcdDOE4TMvo7SLNlAQ,1001
25 | urllib3/util/connection.py,sha256=QeUUEuNmhznpuKNPL-B0IVOkMdMCu8oJX62OC0Vpzug,4462
26 | urllib3/util/proxy.py,sha256=seP8-Q5B6bB0dMtwPj-YcZZQ30vHuLqRu-tI0JZ2fzs,1148
27 | urllib3/util/request.py,sha256=PQnBmKUHMQ0hQQ41uhbLNAeA24ke60m6zeiwfwocpGo,8102
28 | urllib3/util/response.py,sha256=vQE639uoEhj1vpjEdxu5lNIhJCSUZkd7pqllUI0BZOA,3374
29 | urllib3/util/retry.py,sha256=WB-7x1m7fQH_-Qqtrk2OGvz93GvBTxc-pRn8Vf3p4mg,18384
30 | urllib3/util/ssl_.py,sha256=FeymdS68RggEROwMB9VLGSqLHq2hRUKnIbQC_bCpGJI,19109
31 | urllib3/util/ssl_match_hostname.py,sha256=gaWqixoYtQ_GKO8fcRGFj3VXeMoqyxQQuUTPgWeiL_M,5812
32 | urllib3/util/ssltransport.py,sha256=SF__JQXVcHBQniFJZp3P9q-UeHM310WVwcBwqT9dCLE,9034
33 | urllib3/util/timeout.py,sha256=4eT1FVeZZU7h7mYD1Jq2OXNe4fxekdNvhoWUkZusRpA,10346
34 | urllib3/util/url.py,sha256=wHORhp80RAXyTlAIkTqLFzSrkU7J34ZDxX-tN65MBZk,15213
35 | urllib3/util/util.py,sha256=j3lbZK1jPyiwD34T8IgJzdWEZVT-4E-0vYIJi9UjeNA,1146
36 | urllib3/util/wait.py,sha256=_ph8IrUR3sqPqi0OopQgJUlH4wzkGeM5CiyA7XGGtmI,4423
37 | urllib3-2.2.1.dist-info/METADATA,sha256=uROmjQwfAbwRYjV9PMdc5JF5NA3kRkpoKafPkNzybfc,6434
38 | urllib3-2.2.1.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87
39 | urllib3-2.2.1.dist-info/licenses/LICENSE.txt,sha256=Ew46ZNX91dCWp1JpRjSn2d8oRGnehuVzIQAmgEHj1oY,1093
40 | urllib3-2.2.1.dist-info/RECORD,,
41 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests/structures.py:
--------------------------------------------------------------------------------
1 | """
2 | requests.structures
3 | ~~~~~~~~~~~~~~~~~~~
4 |
5 | Data structures that power Requests.
6 | """
7 |
8 | from collections import OrderedDict
9 |
10 | from .compat import Mapping, MutableMapping
11 |
12 |
13 | class CaseInsensitiveDict(MutableMapping):
14 | """A case-insensitive ``dict``-like object.
15 |
16 | Implements all methods and operations of
17 | ``MutableMapping`` as well as dict's ``copy``. Also
18 | provides ``lower_items``.
19 |
20 | All keys are expected to be strings. The structure remembers the
21 | case of the last key to be set, and ``iter(instance)``,
22 | ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
23 | will contain case-sensitive keys. However, querying and contains
24 | testing is case insensitive::
25 |
26 | cid = CaseInsensitiveDict()
27 | cid['Accept'] = 'application/json'
28 | cid['aCCEPT'] == 'application/json' # True
29 | list(cid) == ['Accept'] # True
30 |
31 | For example, ``headers['content-encoding']`` will return the
32 | value of a ``'Content-Encoding'`` response header, regardless
33 | of how the header name was originally stored.
34 |
35 | If the constructor, ``.update``, or equality comparison
36 | operations are given keys that have equal ``.lower()``s, the
37 | behavior is undefined.
38 | """
39 |
40 | def __init__(self, data=None, **kwargs):
41 | self._store = OrderedDict()
42 | if data is None:
43 | data = {}
44 | self.update(data, **kwargs)
45 |
46 | def __setitem__(self, key, value):
47 | # Use the lowercased key for lookups, but store the actual
48 | # key alongside the value.
49 | self._store[key.lower()] = (key, value)
50 |
51 | def __getitem__(self, key):
52 | return self._store[key.lower()][1]
53 |
54 | def __delitem__(self, key):
55 | del self._store[key.lower()]
56 |
57 | def __iter__(self):
58 | return (casedkey for casedkey, mappedvalue in self._store.values())
59 |
60 | def __len__(self):
61 | return len(self._store)
62 |
63 | def lower_items(self):
64 | """Like iteritems(), but with all lowercase keys."""
65 | return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items())
66 |
67 | def __eq__(self, other):
68 | if isinstance(other, Mapping):
69 | other = CaseInsensitiveDict(other)
70 | else:
71 | return NotImplemented
72 | # Compare insensitively
73 | return dict(self.lower_items()) == dict(other.lower_items())
74 |
75 | # Copy is required
76 | def copy(self):
77 | return CaseInsensitiveDict(self._store.values())
78 |
79 | def __repr__(self):
80 | return str(dict(self.items()))
81 |
82 |
83 | class LookupDict(dict):
84 | """Dictionary lookup object."""
85 |
86 | def __init__(self, name=None):
87 | self.name = name
88 | super().__init__()
89 |
90 | def __repr__(self):
91 | return f""
92 |
93 | def __getitem__(self, key):
94 | # We allow fall-through here, so values default to None
95 |
96 | return self.__dict__.get(key, None)
97 |
98 | def get(self, key, default=None):
99 | return self.__dict__.get(key, default)
100 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests/structures.py:
--------------------------------------------------------------------------------
1 | """
2 | requests.structures
3 | ~~~~~~~~~~~~~~~~~~~
4 |
5 | Data structures that power Requests.
6 | """
7 |
8 | from collections import OrderedDict
9 |
10 | from .compat import Mapping, MutableMapping
11 |
12 |
13 | class CaseInsensitiveDict(MutableMapping):
14 | """A case-insensitive ``dict``-like object.
15 |
16 | Implements all methods and operations of
17 | ``MutableMapping`` as well as dict's ``copy``. Also
18 | provides ``lower_items``.
19 |
20 | All keys are expected to be strings. The structure remembers the
21 | case of the last key to be set, and ``iter(instance)``,
22 | ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
23 | will contain case-sensitive keys. However, querying and contains
24 | testing is case insensitive::
25 |
26 | cid = CaseInsensitiveDict()
27 | cid['Accept'] = 'application/json'
28 | cid['aCCEPT'] == 'application/json' # True
29 | list(cid) == ['Accept'] # True
30 |
31 | For example, ``headers['content-encoding']`` will return the
32 | value of a ``'Content-Encoding'`` response header, regardless
33 | of how the header name was originally stored.
34 |
35 | If the constructor, ``.update``, or equality comparison
36 | operations are given keys that have equal ``.lower()``s, the
37 | behavior is undefined.
38 | """
39 |
40 | def __init__(self, data=None, **kwargs):
41 | self._store = OrderedDict()
42 | if data is None:
43 | data = {}
44 | self.update(data, **kwargs)
45 |
46 | def __setitem__(self, key, value):
47 | # Use the lowercased key for lookups, but store the actual
48 | # key alongside the value.
49 | self._store[key.lower()] = (key, value)
50 |
51 | def __getitem__(self, key):
52 | return self._store[key.lower()][1]
53 |
54 | def __delitem__(self, key):
55 | del self._store[key.lower()]
56 |
57 | def __iter__(self):
58 | return (casedkey for casedkey, mappedvalue in self._store.values())
59 |
60 | def __len__(self):
61 | return len(self._store)
62 |
63 | def lower_items(self):
64 | """Like iteritems(), but with all lowercase keys."""
65 | return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items())
66 |
67 | def __eq__(self, other):
68 | if isinstance(other, Mapping):
69 | other = CaseInsensitiveDict(other)
70 | else:
71 | return NotImplemented
72 | # Compare insensitively
73 | return dict(self.lower_items()) == dict(other.lower_items())
74 |
75 | # Copy is required
76 | def copy(self):
77 | return CaseInsensitiveDict(self._store.values())
78 |
79 | def __repr__(self):
80 | return str(dict(self.items()))
81 |
82 |
83 | class LookupDict(dict):
84 | """Dictionary lookup object."""
85 |
86 | def __init__(self, name=None):
87 | self.name = name
88 | super().__init__()
89 |
90 | def __repr__(self):
91 | return f""
92 |
93 | def __getitem__(self, key):
94 | # We allow fall-through here, so values default to None
95 |
96 | return self.__dict__.get(key, None)
97 |
98 | def get(self, key, default=None):
99 | return self.__dict__.get(key, default)
100 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/util/response.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import http.client as httplib
4 | from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
5 |
6 | from ..exceptions import HeaderParsingError
7 |
8 |
9 | def is_fp_closed(obj: object) -> bool:
10 | """
11 | Checks whether a given file-like object is closed.
12 |
13 | :param obj:
14 | The file-like object to check.
15 | """
16 |
17 | try:
18 | # Check `isclosed()` first, in case Python3 doesn't set `closed`.
19 | # GH Issue #928
20 | return obj.isclosed() # type: ignore[no-any-return, attr-defined]
21 | except AttributeError:
22 | pass
23 |
24 | try:
25 | # Check via the official file-like-object way.
26 | return obj.closed # type: ignore[no-any-return, attr-defined]
27 | except AttributeError:
28 | pass
29 |
30 | try:
31 | # Check if the object is a container for another file-like object that
32 | # gets released on exhaustion (e.g. HTTPResponse).
33 | return obj.fp is None # type: ignore[attr-defined]
34 | except AttributeError:
35 | pass
36 |
37 | raise ValueError("Unable to determine whether fp is closed.")
38 |
39 |
40 | def assert_header_parsing(headers: httplib.HTTPMessage) -> None:
41 | """
42 | Asserts whether all headers have been successfully parsed.
43 | Extracts encountered errors from the result of parsing headers.
44 |
45 | Only works on Python 3.
46 |
47 | :param http.client.HTTPMessage headers: Headers to verify.
48 |
49 | :raises urllib3.exceptions.HeaderParsingError:
50 | If parsing errors are found.
51 | """
52 |
53 | # This will fail silently if we pass in the wrong kind of parameter.
54 | # To make debugging easier add an explicit check.
55 | if not isinstance(headers, httplib.HTTPMessage):
56 | raise TypeError(f"expected httplib.Message, got {type(headers)}.")
57 |
58 | unparsed_data = None
59 |
60 | # get_payload is actually email.message.Message.get_payload;
61 | # we're only interested in the result if it's not a multipart message
62 | if not headers.is_multipart():
63 | payload = headers.get_payload()
64 |
65 | if isinstance(payload, (bytes, str)):
66 | unparsed_data = payload
67 |
68 | # httplib is assuming a response body is available
69 | # when parsing headers even when httplib only sends
70 | # header data to parse_headers() This results in
71 | # defects on multipart responses in particular.
72 | # See: https://github.com/urllib3/urllib3/issues/800
73 |
74 | # So we ignore the following defects:
75 | # - StartBoundaryNotFoundDefect:
76 | # The claimed start boundary was never found.
77 | # - MultipartInvariantViolationDefect:
78 | # A message claimed to be a multipart but no subparts were found.
79 | defects = [
80 | defect
81 | for defect in headers.defects
82 | if not isinstance(
83 | defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
84 | )
85 | ]
86 |
87 | if defects or unparsed_data:
88 | raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
89 |
90 |
91 | def is_response_to_head(response: httplib.HTTPResponse) -> bool:
92 | """
93 | Checks whether the request of a response has been a HEAD-request.
94 |
95 | :param http.client.HTTPResponse response:
96 | Response to check if the originating request
97 | used 'HEAD' as a method.
98 | """
99 | # FIXME: Can we do this somehow without accessing private httplib _method?
100 | method_str = response._method # type: str # type: ignore[attr-defined]
101 | return method_str.upper() == "HEAD"
102 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/util/response.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import http.client as httplib
4 | from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
5 |
6 | from ..exceptions import HeaderParsingError
7 |
8 |
9 | def is_fp_closed(obj: object) -> bool:
10 | """
11 | Checks whether a given file-like object is closed.
12 |
13 | :param obj:
14 | The file-like object to check.
15 | """
16 |
17 | try:
18 | # Check `isclosed()` first, in case Python3 doesn't set `closed`.
19 | # GH Issue #928
20 | return obj.isclosed() # type: ignore[no-any-return, attr-defined]
21 | except AttributeError:
22 | pass
23 |
24 | try:
25 | # Check via the official file-like-object way.
26 | return obj.closed # type: ignore[no-any-return, attr-defined]
27 | except AttributeError:
28 | pass
29 |
30 | try:
31 | # Check if the object is a container for another file-like object that
32 | # gets released on exhaustion (e.g. HTTPResponse).
33 | return obj.fp is None # type: ignore[attr-defined]
34 | except AttributeError:
35 | pass
36 |
37 | raise ValueError("Unable to determine whether fp is closed.")
38 |
39 |
40 | def assert_header_parsing(headers: httplib.HTTPMessage) -> None:
41 | """
42 | Asserts whether all headers have been successfully parsed.
43 | Extracts encountered errors from the result of parsing headers.
44 |
45 | Only works on Python 3.
46 |
47 | :param http.client.HTTPMessage headers: Headers to verify.
48 |
49 | :raises urllib3.exceptions.HeaderParsingError:
50 | If parsing errors are found.
51 | """
52 |
53 | # This will fail silently if we pass in the wrong kind of parameter.
54 | # To make debugging easier add an explicit check.
55 | if not isinstance(headers, httplib.HTTPMessage):
56 | raise TypeError(f"expected httplib.Message, got {type(headers)}.")
57 |
58 | unparsed_data = None
59 |
60 | # get_payload is actually email.message.Message.get_payload;
61 | # we're only interested in the result if it's not a multipart message
62 | if not headers.is_multipart():
63 | payload = headers.get_payload()
64 |
65 | if isinstance(payload, (bytes, str)):
66 | unparsed_data = payload
67 |
68 | # httplib is assuming a response body is available
69 | # when parsing headers even when httplib only sends
70 | # header data to parse_headers() This results in
71 | # defects on multipart responses in particular.
72 | # See: https://github.com/urllib3/urllib3/issues/800
73 |
74 | # So we ignore the following defects:
75 | # - StartBoundaryNotFoundDefect:
76 | # The claimed start boundary was never found.
77 | # - MultipartInvariantViolationDefect:
78 | # A message claimed to be a multipart but no subparts were found.
79 | defects = [
80 | defect
81 | for defect in headers.defects
82 | if not isinstance(
83 | defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
84 | )
85 | ]
86 |
87 | if defects or unparsed_data:
88 | raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
89 |
90 |
91 | def is_response_to_head(response: httplib.HTTPResponse) -> bool:
92 | """
93 | Checks whether the request of a response has been a HEAD-request.
94 |
95 | :param http.client.HTTPResponse response:
96 | Response to check if the originating request
97 | used 'HEAD' as a method.
98 | """
99 | # FIXME: Can we do this somehow without accessing private httplib _method?
100 | method_str = response._method # type: str # type: ignore[attr-defined]
101 | return method_str.upper() == "HEAD"
102 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/idna/codec.py:
--------------------------------------------------------------------------------
1 | from .core import encode, decode, alabel, ulabel, IDNAError
2 | import codecs
3 | import re
4 | from typing import Any, Tuple, Optional
5 |
6 | _unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]')
7 |
8 | class Codec(codecs.Codec):
9 |
10 | def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]:
11 | if errors != 'strict':
12 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
13 |
14 | if not data:
15 | return b"", 0
16 |
17 | return encode(data), len(data)
18 |
19 | def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]:
20 | if errors != 'strict':
21 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
22 |
23 | if not data:
24 | return '', 0
25 |
26 | return decode(data), len(data)
27 |
28 | class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
29 | def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]:
30 | if errors != 'strict':
31 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
32 |
33 | if not data:
34 | return b'', 0
35 |
36 | labels = _unicode_dots_re.split(data)
37 | trailing_dot = b''
38 | if labels:
39 | if not labels[-1]:
40 | trailing_dot = b'.'
41 | del labels[-1]
42 | elif not final:
43 | # Keep potentially unfinished label until the next call
44 | del labels[-1]
45 | if labels:
46 | trailing_dot = b'.'
47 |
48 | result = []
49 | size = 0
50 | for label in labels:
51 | result.append(alabel(label))
52 | if size:
53 | size += 1
54 | size += len(label)
55 |
56 | # Join with U+002E
57 | result_bytes = b'.'.join(result) + trailing_dot
58 | size += len(trailing_dot)
59 | return result_bytes, size
60 |
61 | class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
62 | def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]:
63 | if errors != 'strict':
64 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
65 |
66 | if not data:
67 | return ('', 0)
68 |
69 | if not isinstance(data, str):
70 | data = str(data, 'ascii')
71 |
72 | labels = _unicode_dots_re.split(data)
73 | trailing_dot = ''
74 | if labels:
75 | if not labels[-1]:
76 | trailing_dot = '.'
77 | del labels[-1]
78 | elif not final:
79 | # Keep potentially unfinished label until the next call
80 | del labels[-1]
81 | if labels:
82 | trailing_dot = '.'
83 |
84 | result = []
85 | size = 0
86 | for label in labels:
87 | result.append(ulabel(label))
88 | if size:
89 | size += 1
90 | size += len(label)
91 |
92 | result_str = '.'.join(result) + trailing_dot
93 | size += len(trailing_dot)
94 | return (result_str, size)
95 |
96 |
97 | class StreamWriter(Codec, codecs.StreamWriter):
98 | pass
99 |
100 |
101 | class StreamReader(Codec, codecs.StreamReader):
102 | pass
103 |
104 |
105 | def search_function(name: str) -> Optional[codecs.CodecInfo]:
106 | if name != 'idna2008':
107 | return None
108 | return codecs.CodecInfo(
109 | name=name,
110 | encode=Codec().encode,
111 | decode=Codec().decode,
112 | incrementalencoder=IncrementalEncoder,
113 | incrementaldecoder=IncrementalDecoder,
114 | streamwriter=StreamWriter,
115 | streamreader=StreamReader,
116 | )
117 |
118 | codecs.register(search_function)
119 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/idna/codec.py:
--------------------------------------------------------------------------------
1 | from .core import encode, decode, alabel, ulabel, IDNAError
2 | import codecs
3 | import re
4 | from typing import Any, Tuple, Optional
5 |
6 | _unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]')
7 |
8 | class Codec(codecs.Codec):
9 |
10 | def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]:
11 | if errors != 'strict':
12 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
13 |
14 | if not data:
15 | return b"", 0
16 |
17 | return encode(data), len(data)
18 |
19 | def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]:
20 | if errors != 'strict':
21 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
22 |
23 | if not data:
24 | return '', 0
25 |
26 | return decode(data), len(data)
27 |
28 | class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
29 | def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]:
30 | if errors != 'strict':
31 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
32 |
33 | if not data:
34 | return b'', 0
35 |
36 | labels = _unicode_dots_re.split(data)
37 | trailing_dot = b''
38 | if labels:
39 | if not labels[-1]:
40 | trailing_dot = b'.'
41 | del labels[-1]
42 | elif not final:
43 | # Keep potentially unfinished label until the next call
44 | del labels[-1]
45 | if labels:
46 | trailing_dot = b'.'
47 |
48 | result = []
49 | size = 0
50 | for label in labels:
51 | result.append(alabel(label))
52 | if size:
53 | size += 1
54 | size += len(label)
55 |
56 | # Join with U+002E
57 | result_bytes = b'.'.join(result) + trailing_dot
58 | size += len(trailing_dot)
59 | return result_bytes, size
60 |
61 | class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
62 | def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]:
63 | if errors != 'strict':
64 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
65 |
66 | if not data:
67 | return ('', 0)
68 |
69 | if not isinstance(data, str):
70 | data = str(data, 'ascii')
71 |
72 | labels = _unicode_dots_re.split(data)
73 | trailing_dot = ''
74 | if labels:
75 | if not labels[-1]:
76 | trailing_dot = '.'
77 | del labels[-1]
78 | elif not final:
79 | # Keep potentially unfinished label until the next call
80 | del labels[-1]
81 | if labels:
82 | trailing_dot = '.'
83 |
84 | result = []
85 | size = 0
86 | for label in labels:
87 | result.append(ulabel(label))
88 | if size:
89 | size += 1
90 | size += len(label)
91 |
92 | result_str = '.'.join(result) + trailing_dot
93 | size += len(trailing_dot)
94 | return (result_str, size)
95 |
96 |
97 | class StreamWriter(Codec, codecs.StreamWriter):
98 | pass
99 |
100 |
101 | class StreamReader(Codec, codecs.StreamReader):
102 | pass
103 |
104 |
105 | def search_function(name: str) -> Optional[codecs.CodecInfo]:
106 | if name != 'idna2008':
107 | return None
108 | return codecs.CodecInfo(
109 | name=name,
110 | encode=Codec().encode,
111 | decode=Codec().decode,
112 | incrementalencoder=IncrementalEncoder,
113 | incrementaldecoder=IncrementalDecoder,
114 | streamwriter=StreamWriter,
115 | streamreader=StreamReader,
116 | )
117 |
118 | codecs.register(search_function)
119 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/contrib/emscripten/emscripten_fetch_worker.js:
--------------------------------------------------------------------------------
1 | let Status = {
2 | SUCCESS_HEADER: -1,
3 | SUCCESS_EOF: -2,
4 | ERROR_TIMEOUT: -3,
5 | ERROR_EXCEPTION: -4,
6 | };
7 |
8 | let connections = {};
9 | let nextConnectionID = 1;
10 | const encoder = new TextEncoder();
11 |
12 | self.addEventListener("message", async function (event) {
13 | if (event.data.close) {
14 | let connectionID = event.data.close;
15 | delete connections[connectionID];
16 | return;
17 | } else if (event.data.getMore) {
18 | let connectionID = event.data.getMore;
19 | let { curOffset, value, reader, intBuffer, byteBuffer } =
20 | connections[connectionID];
21 | // if we still have some in buffer, then just send it back straight away
22 | if (!value || curOffset >= value.length) {
23 | // read another buffer if required
24 | try {
25 | let readResponse = await reader.read();
26 |
27 | if (readResponse.done) {
28 | // read everything - clear connection and return
29 | delete connections[connectionID];
30 | Atomics.store(intBuffer, 0, Status.SUCCESS_EOF);
31 | Atomics.notify(intBuffer, 0);
32 | // finished reading successfully
33 | // return from event handler
34 | return;
35 | }
36 | curOffset = 0;
37 | connections[connectionID].value = readResponse.value;
38 | value = readResponse.value;
39 | } catch (error) {
40 | console.log("Request exception:", error);
41 | let errorBytes = encoder.encode(error.message);
42 | let written = errorBytes.length;
43 | byteBuffer.set(errorBytes);
44 | intBuffer[1] = written;
45 | Atomics.store(intBuffer, 0, Status.ERROR_EXCEPTION);
46 | Atomics.notify(intBuffer, 0);
47 | }
48 | }
49 |
50 | // send as much buffer as we can
51 | let curLen = value.length - curOffset;
52 | if (curLen > byteBuffer.length) {
53 | curLen = byteBuffer.length;
54 | }
55 | byteBuffer.set(value.subarray(curOffset, curOffset + curLen), 0);
56 |
57 | Atomics.store(intBuffer, 0, curLen); // store current length in bytes
58 | Atomics.notify(intBuffer, 0);
59 | curOffset += curLen;
60 | connections[connectionID].curOffset = curOffset;
61 |
62 | return;
63 | } else {
64 | // start fetch
65 | let connectionID = nextConnectionID;
66 | nextConnectionID += 1;
67 | const intBuffer = new Int32Array(event.data.buffer);
68 | const byteBuffer = new Uint8Array(event.data.buffer, 8);
69 | try {
70 | const response = await fetch(event.data.url, event.data.fetchParams);
71 | // return the headers first via textencoder
72 | var headers = [];
73 | for (const pair of response.headers.entries()) {
74 | headers.push([pair[0], pair[1]]);
75 | }
76 | let headerObj = {
77 | headers: headers,
78 | status: response.status,
79 | connectionID,
80 | };
81 | const headerText = JSON.stringify(headerObj);
82 | let headerBytes = encoder.encode(headerText);
83 | let written = headerBytes.length;
84 | byteBuffer.set(headerBytes);
85 | intBuffer[1] = written;
86 | // make a connection
87 | connections[connectionID] = {
88 | reader: response.body.getReader(),
89 | intBuffer: intBuffer,
90 | byteBuffer: byteBuffer,
91 | value: undefined,
92 | curOffset: 0,
93 | };
94 | // set header ready
95 | Atomics.store(intBuffer, 0, Status.SUCCESS_HEADER);
96 | Atomics.notify(intBuffer, 0);
97 | // all fetching after this goes through a new postmessage call with getMore
98 | // this allows for parallel requests
99 | } catch (error) {
100 | console.log("Request exception:", error);
101 | let errorBytes = encoder.encode(error.message);
102 | let written = errorBytes.length;
103 | byteBuffer.set(errorBytes);
104 | intBuffer[1] = written;
105 | Atomics.store(intBuffer, 0, Status.ERROR_EXCEPTION);
106 | Atomics.notify(intBuffer, 0);
107 | }
108 | }
109 | });
110 | self.postMessage({ inited: true });
111 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/contrib/emscripten/emscripten_fetch_worker.js:
--------------------------------------------------------------------------------
1 | let Status = {
2 | SUCCESS_HEADER: -1,
3 | SUCCESS_EOF: -2,
4 | ERROR_TIMEOUT: -3,
5 | ERROR_EXCEPTION: -4,
6 | };
7 |
8 | let connections = {};
9 | let nextConnectionID = 1;
10 | const encoder = new TextEncoder();
11 |
12 | self.addEventListener("message", async function (event) {
13 | if (event.data.close) {
14 | let connectionID = event.data.close;
15 | delete connections[connectionID];
16 | return;
17 | } else if (event.data.getMore) {
18 | let connectionID = event.data.getMore;
19 | let { curOffset, value, reader, intBuffer, byteBuffer } =
20 | connections[connectionID];
21 | // if we still have some in buffer, then just send it back straight away
22 | if (!value || curOffset >= value.length) {
23 | // read another buffer if required
24 | try {
25 | let readResponse = await reader.read();
26 |
27 | if (readResponse.done) {
28 | // read everything - clear connection and return
29 | delete connections[connectionID];
30 | Atomics.store(intBuffer, 0, Status.SUCCESS_EOF);
31 | Atomics.notify(intBuffer, 0);
32 | // finished reading successfully
33 | // return from event handler
34 | return;
35 | }
36 | curOffset = 0;
37 | connections[connectionID].value = readResponse.value;
38 | value = readResponse.value;
39 | } catch (error) {
40 | console.log("Request exception:", error);
41 | let errorBytes = encoder.encode(error.message);
42 | let written = errorBytes.length;
43 | byteBuffer.set(errorBytes);
44 | intBuffer[1] = written;
45 | Atomics.store(intBuffer, 0, Status.ERROR_EXCEPTION);
46 | Atomics.notify(intBuffer, 0);
47 | }
48 | }
49 |
50 | // send as much buffer as we can
51 | let curLen = value.length - curOffset;
52 | if (curLen > byteBuffer.length) {
53 | curLen = byteBuffer.length;
54 | }
55 | byteBuffer.set(value.subarray(curOffset, curOffset + curLen), 0);
56 |
57 | Atomics.store(intBuffer, 0, curLen); // store current length in bytes
58 | Atomics.notify(intBuffer, 0);
59 | curOffset += curLen;
60 | connections[connectionID].curOffset = curOffset;
61 |
62 | return;
63 | } else {
64 | // start fetch
65 | let connectionID = nextConnectionID;
66 | nextConnectionID += 1;
67 | const intBuffer = new Int32Array(event.data.buffer);
68 | const byteBuffer = new Uint8Array(event.data.buffer, 8);
69 | try {
70 | const response = await fetch(event.data.url, event.data.fetchParams);
71 | // return the headers first via textencoder
72 | var headers = [];
73 | for (const pair of response.headers.entries()) {
74 | headers.push([pair[0], pair[1]]);
75 | }
76 | let headerObj = {
77 | headers: headers,
78 | status: response.status,
79 | connectionID,
80 | };
81 | const headerText = JSON.stringify(headerObj);
82 | let headerBytes = encoder.encode(headerText);
83 | let written = headerBytes.length;
84 | byteBuffer.set(headerBytes);
85 | intBuffer[1] = written;
86 | // make a connection
87 | connections[connectionID] = {
88 | reader: response.body.getReader(),
89 | intBuffer: intBuffer,
90 | byteBuffer: byteBuffer,
91 | value: undefined,
92 | curOffset: 0,
93 | };
94 | // set header ready
95 | Atomics.store(intBuffer, 0, Status.SUCCESS_HEADER);
96 | Atomics.notify(intBuffer, 0);
97 | // all fetching after this goes through a new postmessage call with getMore
98 | // this allows for parallel requests
99 | } catch (error) {
100 | console.log("Request exception:", error);
101 | let errorBytes = encoder.encode(error.message);
102 | let written = errorBytes.length;
103 | byteBuffer.set(errorBytes);
104 | intBuffer[1] = written;
105 | Atomics.store(intBuffer, 0, Status.ERROR_EXCEPTION);
106 | Atomics.notify(intBuffer, 0);
107 | }
108 | }
109 | });
110 | self.postMessage({ inited: true });
111 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests/exceptions.py:
--------------------------------------------------------------------------------
1 | """
2 | requests.exceptions
3 | ~~~~~~~~~~~~~~~~~~~
4 |
5 | This module contains the set of Requests' exceptions.
6 | """
7 | from urllib3.exceptions import HTTPError as BaseHTTPError
8 |
9 | from .compat import JSONDecodeError as CompatJSONDecodeError
10 |
11 |
12 | class RequestException(IOError):
13 | """There was an ambiguous exception that occurred while handling your
14 | request.
15 | """
16 |
17 | def __init__(self, *args, **kwargs):
18 | """Initialize RequestException with `request` and `response` objects."""
19 | response = kwargs.pop("response", None)
20 | self.response = response
21 | self.request = kwargs.pop("request", None)
22 | if response is not None and not self.request and hasattr(response, "request"):
23 | self.request = self.response.request
24 | super().__init__(*args, **kwargs)
25 |
26 |
27 | class InvalidJSONError(RequestException):
28 | """A JSON error occurred."""
29 |
30 |
31 | class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
32 | """Couldn't decode the text into json"""
33 |
34 | def __init__(self, *args, **kwargs):
35 | """
36 | Construct the JSONDecodeError instance first with all
37 | args. Then use it's args to construct the IOError so that
38 | the json specific args aren't used as IOError specific args
39 | and the error message from JSONDecodeError is preserved.
40 | """
41 | CompatJSONDecodeError.__init__(self, *args)
42 | InvalidJSONError.__init__(self, *self.args, **kwargs)
43 |
44 |
45 | class HTTPError(RequestException):
46 | """An HTTP error occurred."""
47 |
48 |
49 | class ConnectionError(RequestException):
50 | """A Connection error occurred."""
51 |
52 |
53 | class ProxyError(ConnectionError):
54 | """A proxy error occurred."""
55 |
56 |
57 | class SSLError(ConnectionError):
58 | """An SSL error occurred."""
59 |
60 |
61 | class Timeout(RequestException):
62 | """The request timed out.
63 |
64 | Catching this error will catch both
65 | :exc:`~requests.exceptions.ConnectTimeout` and
66 | :exc:`~requests.exceptions.ReadTimeout` errors.
67 | """
68 |
69 |
70 | class ConnectTimeout(ConnectionError, Timeout):
71 | """The request timed out while trying to connect to the remote server.
72 |
73 | Requests that produced this error are safe to retry.
74 | """
75 |
76 |
77 | class ReadTimeout(Timeout):
78 | """The server did not send any data in the allotted amount of time."""
79 |
80 |
81 | class URLRequired(RequestException):
82 | """A valid URL is required to make a request."""
83 |
84 |
85 | class TooManyRedirects(RequestException):
86 | """Too many redirects."""
87 |
88 |
89 | class MissingSchema(RequestException, ValueError):
90 | """The URL scheme (e.g. http or https) is missing."""
91 |
92 |
93 | class InvalidSchema(RequestException, ValueError):
94 | """The URL scheme provided is either invalid or unsupported."""
95 |
96 |
97 | class InvalidURL(RequestException, ValueError):
98 | """The URL provided was somehow invalid."""
99 |
100 |
101 | class InvalidHeader(RequestException, ValueError):
102 | """The header value provided was somehow invalid."""
103 |
104 |
105 | class InvalidProxyURL(InvalidURL):
106 | """The proxy URL provided is invalid."""
107 |
108 |
109 | class ChunkedEncodingError(RequestException):
110 | """The server declared chunked encoding but sent an invalid chunk."""
111 |
112 |
113 | class ContentDecodingError(RequestException, BaseHTTPError):
114 | """Failed to decode response content."""
115 |
116 |
117 | class StreamConsumedError(RequestException, TypeError):
118 | """The content for this response was already consumed."""
119 |
120 |
121 | class RetryError(RequestException):
122 | """Custom retries logic failed"""
123 |
124 |
125 | class UnrewindableBodyError(RequestException):
126 | """Requests encountered an error when trying to rewind a body."""
127 |
128 |
129 | # Warnings
130 |
131 |
132 | class RequestsWarning(Warning):
133 | """Base warning for Requests."""
134 |
135 |
136 | class FileModeWarning(RequestsWarning, DeprecationWarning):
137 | """A file was opened in text mode, but Requests determined its binary length."""
138 |
139 |
140 | class RequestsDependencyWarning(RequestsWarning):
141 | """An imported dependency doesn't match the expected version range."""
142 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests/help.py:
--------------------------------------------------------------------------------
1 | """Module containing bug report helper(s)."""
2 |
3 | import json
4 | import platform
5 | import ssl
6 | import sys
7 |
8 | import idna
9 | import urllib3
10 |
11 | from . import __version__ as requests_version
12 |
13 | try:
14 | import charset_normalizer
15 | except ImportError:
16 | charset_normalizer = None
17 |
18 | try:
19 | import chardet
20 | except ImportError:
21 | chardet = None
22 |
23 | try:
24 | from urllib3.contrib import pyopenssl
25 | except ImportError:
26 | pyopenssl = None
27 | OpenSSL = None
28 | cryptography = None
29 | else:
30 | import cryptography
31 | import OpenSSL
32 |
33 |
34 | def _implementation():
35 | """Return a dict with the Python implementation and version.
36 |
37 | Provide both the name and the version of the Python implementation
38 | currently running. For example, on CPython 3.10.3 it will return
39 | {'name': 'CPython', 'version': '3.10.3'}.
40 |
41 | This function works best on CPython and PyPy: in particular, it probably
42 | doesn't work for Jython or IronPython. Future investigation should be done
43 | to work out the correct shape of the code for those platforms.
44 | """
45 | implementation = platform.python_implementation()
46 |
47 | if implementation == "CPython":
48 | implementation_version = platform.python_version()
49 | elif implementation == "PyPy":
50 | implementation_version = "{}.{}.{}".format(
51 | sys.pypy_version_info.major,
52 | sys.pypy_version_info.minor,
53 | sys.pypy_version_info.micro,
54 | )
55 | if sys.pypy_version_info.releaselevel != "final":
56 | implementation_version = "".join(
57 | [implementation_version, sys.pypy_version_info.releaselevel]
58 | )
59 | elif implementation == "Jython":
60 | implementation_version = platform.python_version() # Complete Guess
61 | elif implementation == "IronPython":
62 | implementation_version = platform.python_version() # Complete Guess
63 | else:
64 | implementation_version = "Unknown"
65 |
66 | return {"name": implementation, "version": implementation_version}
67 |
68 |
69 | def info():
70 | """Generate information for a bug report."""
71 | try:
72 | platform_info = {
73 | "system": platform.system(),
74 | "release": platform.release(),
75 | }
76 | except OSError:
77 | platform_info = {
78 | "system": "Unknown",
79 | "release": "Unknown",
80 | }
81 |
82 | implementation_info = _implementation()
83 | urllib3_info = {"version": urllib3.__version__}
84 | charset_normalizer_info = {"version": None}
85 | chardet_info = {"version": None}
86 | if charset_normalizer:
87 | charset_normalizer_info = {"version": charset_normalizer.__version__}
88 | if chardet:
89 | chardet_info = {"version": chardet.__version__}
90 |
91 | pyopenssl_info = {
92 | "version": None,
93 | "openssl_version": "",
94 | }
95 | if OpenSSL:
96 | pyopenssl_info = {
97 | "version": OpenSSL.__version__,
98 | "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}",
99 | }
100 | cryptography_info = {
101 | "version": getattr(cryptography, "__version__", ""),
102 | }
103 | idna_info = {
104 | "version": getattr(idna, "__version__", ""),
105 | }
106 |
107 | system_ssl = ssl.OPENSSL_VERSION_NUMBER
108 | system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""}
109 |
110 | return {
111 | "platform": platform_info,
112 | "implementation": implementation_info,
113 | "system_ssl": system_ssl_info,
114 | "using_pyopenssl": pyopenssl is not None,
115 | "using_charset_normalizer": chardet is None,
116 | "pyOpenSSL": pyopenssl_info,
117 | "urllib3": urllib3_info,
118 | "chardet": chardet_info,
119 | "charset_normalizer": charset_normalizer_info,
120 | "cryptography": cryptography_info,
121 | "idna": idna_info,
122 | "requests": {
123 | "version": requests_version,
124 | },
125 | }
126 |
127 |
128 | def main():
129 | """Pretty-print the bug information as JSON."""
130 | print(json.dumps(info(), sort_keys=True, indent=2))
131 |
132 |
133 | if __name__ == "__main__":
134 | main()
135 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests/exceptions.py:
--------------------------------------------------------------------------------
1 | """
2 | requests.exceptions
3 | ~~~~~~~~~~~~~~~~~~~
4 |
5 | This module contains the set of Requests' exceptions.
6 | """
7 | from urllib3.exceptions import HTTPError as BaseHTTPError
8 |
9 | from .compat import JSONDecodeError as CompatJSONDecodeError
10 |
11 |
12 | class RequestException(IOError):
13 | """There was an ambiguous exception that occurred while handling your
14 | request.
15 | """
16 |
17 | def __init__(self, *args, **kwargs):
18 | """Initialize RequestException with `request` and `response` objects."""
19 | response = kwargs.pop("response", None)
20 | self.response = response
21 | self.request = kwargs.pop("request", None)
22 | if response is not None and not self.request and hasattr(response, "request"):
23 | self.request = self.response.request
24 | super().__init__(*args, **kwargs)
25 |
26 |
27 | class InvalidJSONError(RequestException):
28 | """A JSON error occurred."""
29 |
30 |
31 | class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
32 | """Couldn't decode the text into json"""
33 |
34 | def __init__(self, *args, **kwargs):
35 | """
36 | Construct the JSONDecodeError instance first with all
37 | args. Then use it's args to construct the IOError so that
38 | the json specific args aren't used as IOError specific args
39 | and the error message from JSONDecodeError is preserved.
40 | """
41 | CompatJSONDecodeError.__init__(self, *args)
42 | InvalidJSONError.__init__(self, *self.args, **kwargs)
43 |
44 |
45 | class HTTPError(RequestException):
46 | """An HTTP error occurred."""
47 |
48 |
49 | class ConnectionError(RequestException):
50 | """A Connection error occurred."""
51 |
52 |
53 | class ProxyError(ConnectionError):
54 | """A proxy error occurred."""
55 |
56 |
57 | class SSLError(ConnectionError):
58 | """An SSL error occurred."""
59 |
60 |
61 | class Timeout(RequestException):
62 | """The request timed out.
63 |
64 | Catching this error will catch both
65 | :exc:`~requests.exceptions.ConnectTimeout` and
66 | :exc:`~requests.exceptions.ReadTimeout` errors.
67 | """
68 |
69 |
70 | class ConnectTimeout(ConnectionError, Timeout):
71 | """The request timed out while trying to connect to the remote server.
72 |
73 | Requests that produced this error are safe to retry.
74 | """
75 |
76 |
77 | class ReadTimeout(Timeout):
78 | """The server did not send any data in the allotted amount of time."""
79 |
80 |
81 | class URLRequired(RequestException):
82 | """A valid URL is required to make a request."""
83 |
84 |
85 | class TooManyRedirects(RequestException):
86 | """Too many redirects."""
87 |
88 |
89 | class MissingSchema(RequestException, ValueError):
90 | """The URL scheme (e.g. http or https) is missing."""
91 |
92 |
93 | class InvalidSchema(RequestException, ValueError):
94 | """The URL scheme provided is either invalid or unsupported."""
95 |
96 |
97 | class InvalidURL(RequestException, ValueError):
98 | """The URL provided was somehow invalid."""
99 |
100 |
101 | class InvalidHeader(RequestException, ValueError):
102 | """The header value provided was somehow invalid."""
103 |
104 |
105 | class InvalidProxyURL(InvalidURL):
106 | """The proxy URL provided is invalid."""
107 |
108 |
109 | class ChunkedEncodingError(RequestException):
110 | """The server declared chunked encoding but sent an invalid chunk."""
111 |
112 |
113 | class ContentDecodingError(RequestException, BaseHTTPError):
114 | """Failed to decode response content."""
115 |
116 |
117 | class StreamConsumedError(RequestException, TypeError):
118 | """The content for this response was already consumed."""
119 |
120 |
121 | class RetryError(RequestException):
122 | """Custom retries logic failed"""
123 |
124 |
125 | class UnrewindableBodyError(RequestException):
126 | """Requests encountered an error when trying to rewind a body."""
127 |
128 |
129 | # Warnings
130 |
131 |
132 | class RequestsWarning(Warning):
133 | """Base warning for Requests."""
134 |
135 |
136 | class FileModeWarning(RequestsWarning, DeprecationWarning):
137 | """A file was opened in text mode, but Requests determined its binary length."""
138 |
139 |
140 | class RequestsDependencyWarning(RequestsWarning):
141 | """An imported dependency doesn't match the expected version range."""
142 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests/help.py:
--------------------------------------------------------------------------------
1 | """Module containing bug report helper(s)."""
2 |
3 | import json
4 | import platform
5 | import ssl
6 | import sys
7 |
8 | import idna
9 | import urllib3
10 |
11 | from . import __version__ as requests_version
12 |
13 | try:
14 | import charset_normalizer
15 | except ImportError:
16 | charset_normalizer = None
17 |
18 | try:
19 | import chardet
20 | except ImportError:
21 | chardet = None
22 |
23 | try:
24 | from urllib3.contrib import pyopenssl
25 | except ImportError:
26 | pyopenssl = None
27 | OpenSSL = None
28 | cryptography = None
29 | else:
30 | import cryptography
31 | import OpenSSL
32 |
33 |
34 | def _implementation():
35 | """Return a dict with the Python implementation and version.
36 |
37 | Provide both the name and the version of the Python implementation
38 | currently running. For example, on CPython 3.10.3 it will return
39 | {'name': 'CPython', 'version': '3.10.3'}.
40 |
41 | This function works best on CPython and PyPy: in particular, it probably
42 | doesn't work for Jython or IronPython. Future investigation should be done
43 | to work out the correct shape of the code for those platforms.
44 | """
45 | implementation = platform.python_implementation()
46 |
47 | if implementation == "CPython":
48 | implementation_version = platform.python_version()
49 | elif implementation == "PyPy":
50 | implementation_version = "{}.{}.{}".format(
51 | sys.pypy_version_info.major,
52 | sys.pypy_version_info.minor,
53 | sys.pypy_version_info.micro,
54 | )
55 | if sys.pypy_version_info.releaselevel != "final":
56 | implementation_version = "".join(
57 | [implementation_version, sys.pypy_version_info.releaselevel]
58 | )
59 | elif implementation == "Jython":
60 | implementation_version = platform.python_version() # Complete Guess
61 | elif implementation == "IronPython":
62 | implementation_version = platform.python_version() # Complete Guess
63 | else:
64 | implementation_version = "Unknown"
65 |
66 | return {"name": implementation, "version": implementation_version}
67 |
68 |
69 | def info():
70 | """Generate information for a bug report."""
71 | try:
72 | platform_info = {
73 | "system": platform.system(),
74 | "release": platform.release(),
75 | }
76 | except OSError:
77 | platform_info = {
78 | "system": "Unknown",
79 | "release": "Unknown",
80 | }
81 |
82 | implementation_info = _implementation()
83 | urllib3_info = {"version": urllib3.__version__}
84 | charset_normalizer_info = {"version": None}
85 | chardet_info = {"version": None}
86 | if charset_normalizer:
87 | charset_normalizer_info = {"version": charset_normalizer.__version__}
88 | if chardet:
89 | chardet_info = {"version": chardet.__version__}
90 |
91 | pyopenssl_info = {
92 | "version": None,
93 | "openssl_version": "",
94 | }
95 | if OpenSSL:
96 | pyopenssl_info = {
97 | "version": OpenSSL.__version__,
98 | "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}",
99 | }
100 | cryptography_info = {
101 | "version": getattr(cryptography, "__version__", ""),
102 | }
103 | idna_info = {
104 | "version": getattr(idna, "__version__", ""),
105 | }
106 |
107 | system_ssl = ssl.OPENSSL_VERSION_NUMBER
108 | system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""}
109 |
110 | return {
111 | "platform": platform_info,
112 | "implementation": implementation_info,
113 | "system_ssl": system_ssl_info,
114 | "using_pyopenssl": pyopenssl is not None,
115 | "using_charset_normalizer": chardet is None,
116 | "pyOpenSSL": pyopenssl_info,
117 | "urllib3": urllib3_info,
118 | "chardet": chardet_info,
119 | "charset_normalizer": charset_normalizer_info,
120 | "cryptography": cryptography_info,
121 | "idna": idna_info,
122 | "requests": {
123 | "version": requests_version,
124 | },
125 | }
126 |
127 |
128 | def main():
129 | """Pretty-print the bug information as JSON."""
130 | print(json.dumps(info(), sort_keys=True, indent=2))
131 |
132 |
133 | if __name__ == "__main__":
134 | main()
135 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests/status_codes.py:
--------------------------------------------------------------------------------
1 | r"""
2 | The ``codes`` object defines a mapping from common names for HTTP statuses
3 | to their numerical codes, accessible either as attributes or as dictionary
4 | items.
5 |
6 | Example::
7 |
8 | >>> import requests
9 | >>> requests.codes['temporary_redirect']
10 | 307
11 | >>> requests.codes.teapot
12 | 418
13 | >>> requests.codes['\o/']
14 | 200
15 |
16 | Some codes have multiple names, and both upper- and lower-case versions of
17 | the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
18 | ``codes.okay`` all correspond to the HTTP status code 200.
19 | """
20 |
21 | from .structures import LookupDict
22 |
23 | _codes = {
24 | # Informational.
25 | 100: ("continue",),
26 | 101: ("switching_protocols",),
27 | 102: ("processing",),
28 | 103: ("checkpoint",),
29 | 122: ("uri_too_long", "request_uri_too_long"),
30 | 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"),
31 | 201: ("created",),
32 | 202: ("accepted",),
33 | 203: ("non_authoritative_info", "non_authoritative_information"),
34 | 204: ("no_content",),
35 | 205: ("reset_content", "reset"),
36 | 206: ("partial_content", "partial"),
37 | 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"),
38 | 208: ("already_reported",),
39 | 226: ("im_used",),
40 | # Redirection.
41 | 300: ("multiple_choices",),
42 | 301: ("moved_permanently", "moved", "\\o-"),
43 | 302: ("found",),
44 | 303: ("see_other", "other"),
45 | 304: ("not_modified",),
46 | 305: ("use_proxy",),
47 | 306: ("switch_proxy",),
48 | 307: ("temporary_redirect", "temporary_moved", "temporary"),
49 | 308: (
50 | "permanent_redirect",
51 | "resume_incomplete",
52 | "resume",
53 | ), # "resume" and "resume_incomplete" to be removed in 3.0
54 | # Client Error.
55 | 400: ("bad_request", "bad"),
56 | 401: ("unauthorized",),
57 | 402: ("payment_required", "payment"),
58 | 403: ("forbidden",),
59 | 404: ("not_found", "-o-"),
60 | 405: ("method_not_allowed", "not_allowed"),
61 | 406: ("not_acceptable",),
62 | 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"),
63 | 408: ("request_timeout", "timeout"),
64 | 409: ("conflict",),
65 | 410: ("gone",),
66 | 411: ("length_required",),
67 | 412: ("precondition_failed", "precondition"),
68 | 413: ("request_entity_too_large",),
69 | 414: ("request_uri_too_large",),
70 | 415: ("unsupported_media_type", "unsupported_media", "media_type"),
71 | 416: (
72 | "requested_range_not_satisfiable",
73 | "requested_range",
74 | "range_not_satisfiable",
75 | ),
76 | 417: ("expectation_failed",),
77 | 418: ("im_a_teapot", "teapot", "i_am_a_teapot"),
78 | 421: ("misdirected_request",),
79 | 422: ("unprocessable_entity", "unprocessable"),
80 | 423: ("locked",),
81 | 424: ("failed_dependency", "dependency"),
82 | 425: ("unordered_collection", "unordered"),
83 | 426: ("upgrade_required", "upgrade"),
84 | 428: ("precondition_required", "precondition"),
85 | 429: ("too_many_requests", "too_many"),
86 | 431: ("header_fields_too_large", "fields_too_large"),
87 | 444: ("no_response", "none"),
88 | 449: ("retry_with", "retry"),
89 | 450: ("blocked_by_windows_parental_controls", "parental_controls"),
90 | 451: ("unavailable_for_legal_reasons", "legal_reasons"),
91 | 499: ("client_closed_request",),
92 | # Server Error.
93 | 500: ("internal_server_error", "server_error", "/o\\", "✗"),
94 | 501: ("not_implemented",),
95 | 502: ("bad_gateway",),
96 | 503: ("service_unavailable", "unavailable"),
97 | 504: ("gateway_timeout",),
98 | 505: ("http_version_not_supported", "http_version"),
99 | 506: ("variant_also_negotiates",),
100 | 507: ("insufficient_storage",),
101 | 509: ("bandwidth_limit_exceeded", "bandwidth"),
102 | 510: ("not_extended",),
103 | 511: ("network_authentication_required", "network_auth", "network_authentication"),
104 | }
105 |
106 | codes = LookupDict(name="status_codes")
107 |
108 |
109 | def _init():
110 | for code, titles in _codes.items():
111 | for title in titles:
112 | setattr(codes, title, code)
113 | if not title.startswith(("\\", "/")):
114 | setattr(codes, title.upper(), code)
115 |
116 | def doc(code):
117 | names = ", ".join(f"``{n}``" for n in _codes[code])
118 | return "* %d: %s" % (code, names)
119 |
120 | global __doc__
121 | __doc__ = (
122 | __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes))
123 | if __doc__ is not None
124 | else None
125 | )
126 |
127 |
128 | _init()
129 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests/status_codes.py:
--------------------------------------------------------------------------------
1 | r"""
2 | The ``codes`` object defines a mapping from common names for HTTP statuses
3 | to their numerical codes, accessible either as attributes or as dictionary
4 | items.
5 |
6 | Example::
7 |
8 | >>> import requests
9 | >>> requests.codes['temporary_redirect']
10 | 307
11 | >>> requests.codes.teapot
12 | 418
13 | >>> requests.codes['\o/']
14 | 200
15 |
16 | Some codes have multiple names, and both upper- and lower-case versions of
17 | the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
18 | ``codes.okay`` all correspond to the HTTP status code 200.
19 | """
20 |
21 | from .structures import LookupDict
22 |
23 | _codes = {
24 | # Informational.
25 | 100: ("continue",),
26 | 101: ("switching_protocols",),
27 | 102: ("processing",),
28 | 103: ("checkpoint",),
29 | 122: ("uri_too_long", "request_uri_too_long"),
30 | 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"),
31 | 201: ("created",),
32 | 202: ("accepted",),
33 | 203: ("non_authoritative_info", "non_authoritative_information"),
34 | 204: ("no_content",),
35 | 205: ("reset_content", "reset"),
36 | 206: ("partial_content", "partial"),
37 | 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"),
38 | 208: ("already_reported",),
39 | 226: ("im_used",),
40 | # Redirection.
41 | 300: ("multiple_choices",),
42 | 301: ("moved_permanently", "moved", "\\o-"),
43 | 302: ("found",),
44 | 303: ("see_other", "other"),
45 | 304: ("not_modified",),
46 | 305: ("use_proxy",),
47 | 306: ("switch_proxy",),
48 | 307: ("temporary_redirect", "temporary_moved", "temporary"),
49 | 308: (
50 | "permanent_redirect",
51 | "resume_incomplete",
52 | "resume",
53 | ), # "resume" and "resume_incomplete" to be removed in 3.0
54 | # Client Error.
55 | 400: ("bad_request", "bad"),
56 | 401: ("unauthorized",),
57 | 402: ("payment_required", "payment"),
58 | 403: ("forbidden",),
59 | 404: ("not_found", "-o-"),
60 | 405: ("method_not_allowed", "not_allowed"),
61 | 406: ("not_acceptable",),
62 | 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"),
63 | 408: ("request_timeout", "timeout"),
64 | 409: ("conflict",),
65 | 410: ("gone",),
66 | 411: ("length_required",),
67 | 412: ("precondition_failed", "precondition"),
68 | 413: ("request_entity_too_large",),
69 | 414: ("request_uri_too_large",),
70 | 415: ("unsupported_media_type", "unsupported_media", "media_type"),
71 | 416: (
72 | "requested_range_not_satisfiable",
73 | "requested_range",
74 | "range_not_satisfiable",
75 | ),
76 | 417: ("expectation_failed",),
77 | 418: ("im_a_teapot", "teapot", "i_am_a_teapot"),
78 | 421: ("misdirected_request",),
79 | 422: ("unprocessable_entity", "unprocessable"),
80 | 423: ("locked",),
81 | 424: ("failed_dependency", "dependency"),
82 | 425: ("unordered_collection", "unordered"),
83 | 426: ("upgrade_required", "upgrade"),
84 | 428: ("precondition_required", "precondition"),
85 | 429: ("too_many_requests", "too_many"),
86 | 431: ("header_fields_too_large", "fields_too_large"),
87 | 444: ("no_response", "none"),
88 | 449: ("retry_with", "retry"),
89 | 450: ("blocked_by_windows_parental_controls", "parental_controls"),
90 | 451: ("unavailable_for_legal_reasons", "legal_reasons"),
91 | 499: ("client_closed_request",),
92 | # Server Error.
93 | 500: ("internal_server_error", "server_error", "/o\\", "✗"),
94 | 501: ("not_implemented",),
95 | 502: ("bad_gateway",),
96 | 503: ("service_unavailable", "unavailable"),
97 | 504: ("gateway_timeout",),
98 | 505: ("http_version_not_supported", "http_version"),
99 | 506: ("variant_also_negotiates",),
100 | 507: ("insufficient_storage",),
101 | 509: ("bandwidth_limit_exceeded", "bandwidth"),
102 | 510: ("not_extended",),
103 | 511: ("network_authentication_required", "network_auth", "network_authentication"),
104 | }
105 |
106 | codes = LookupDict(name="status_codes")
107 |
108 |
109 | def _init():
110 | for code, titles in _codes.items():
111 | for title in titles:
112 | setattr(codes, title, code)
113 | if not title.startswith(("\\", "/")):
114 | setattr(codes, title.upper(), code)
115 |
116 | def doc(code):
117 | names = ", ".join(f"``{n}``" for n in _codes[code])
118 | return "* %d: %s" % (code, names)
119 |
120 | global __doc__
121 | __doc__ = (
122 | __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes))
123 | if __doc__ is not None
124 | else None
125 | )
126 |
127 |
128 | _init()
129 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/certifi/core.py:
--------------------------------------------------------------------------------
1 | """
2 | certifi.py
3 | ~~~~~~~~~~
4 |
5 | This module returns the installation location of cacert.pem or its contents.
6 | """
7 | import sys
8 | import atexit
9 |
10 | def exit_cacert_ctx() -> None:
11 | _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
12 |
13 |
14 | if sys.version_info >= (3, 11):
15 |
16 | from importlib.resources import as_file, files
17 |
18 | _CACERT_CTX = None
19 | _CACERT_PATH = None
20 |
21 | def where() -> str:
22 | # This is slightly terrible, but we want to delay extracting the file
23 | # in cases where we're inside of a zipimport situation until someone
24 | # actually calls where(), but we don't want to re-extract the file
25 | # on every call of where(), so we'll do it once then store it in a
26 | # global variable.
27 | global _CACERT_CTX
28 | global _CACERT_PATH
29 | if _CACERT_PATH is None:
30 | # This is slightly janky, the importlib.resources API wants you to
31 | # manage the cleanup of this file, so it doesn't actually return a
32 | # path, it returns a context manager that will give you the path
33 | # when you enter it and will do any cleanup when you leave it. In
34 | # the common case of not needing a temporary file, it will just
35 | # return the file system location and the __exit__() is a no-op.
36 | #
37 | # We also have to hold onto the actual context manager, because
38 | # it will do the cleanup whenever it gets garbage collected, so
39 | # we will also store that at the global level as well.
40 | _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
41 | _CACERT_PATH = str(_CACERT_CTX.__enter__())
42 | atexit.register(exit_cacert_ctx)
43 |
44 | return _CACERT_PATH
45 |
46 | def contents() -> str:
47 | return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
48 |
49 | elif sys.version_info >= (3, 7):
50 |
51 | from importlib.resources import path as get_path, read_text
52 |
53 | _CACERT_CTX = None
54 | _CACERT_PATH = None
55 |
56 | def where() -> str:
57 | # This is slightly terrible, but we want to delay extracting the
58 | # file in cases where we're inside of a zipimport situation until
59 | # someone actually calls where(), but we don't want to re-extract
60 | # the file on every call of where(), so we'll do it once then store
61 | # it in a global variable.
62 | global _CACERT_CTX
63 | global _CACERT_PATH
64 | if _CACERT_PATH is None:
65 | # This is slightly janky, the importlib.resources API wants you
66 | # to manage the cleanup of this file, so it doesn't actually
67 | # return a path, it returns a context manager that will give
68 | # you the path when you enter it and will do any cleanup when
69 | # you leave it. In the common case of not needing a temporary
70 | # file, it will just return the file system location and the
71 | # __exit__() is a no-op.
72 | #
73 | # We also have to hold onto the actual context manager, because
74 | # it will do the cleanup whenever it gets garbage collected, so
75 | # we will also store that at the global level as well.
76 | _CACERT_CTX = get_path("certifi", "cacert.pem")
77 | _CACERT_PATH = str(_CACERT_CTX.__enter__())
78 | atexit.register(exit_cacert_ctx)
79 |
80 | return _CACERT_PATH
81 |
82 | def contents() -> str:
83 | return read_text("certifi", "cacert.pem", encoding="ascii")
84 |
85 | else:
86 | import os
87 | import types
88 | from typing import Union
89 |
90 | Package = Union[types.ModuleType, str]
91 | Resource = Union[str, "os.PathLike"]
92 |
93 | # This fallback will work for Python versions prior to 3.7 that lack the
94 | # importlib.resources module but relies on the existing `where` function
95 | # so won't address issues with environments like PyOxidizer that don't set
96 | # __file__ on modules.
97 | def read_text(
98 | package: Package,
99 | resource: Resource,
100 | encoding: str = 'utf-8',
101 | errors: str = 'strict'
102 | ) -> str:
103 | with open(where(), encoding=encoding) as data:
104 | return data.read()
105 |
106 | # If we don't have importlib.resources, then we will just do the old logic
107 | # of assuming we're on the filesystem and munge the path directly.
108 | def where() -> str:
109 | f = os.path.dirname(__file__)
110 |
111 | return os.path.join(f, "cacert.pem")
112 |
113 | def contents() -> str:
114 | return read_text("certifi", "cacert.pem", encoding="ascii")
115 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/certifi/core.py:
--------------------------------------------------------------------------------
1 | """
2 | certifi.py
3 | ~~~~~~~~~~
4 |
5 | This module returns the installation location of cacert.pem or its contents.
6 | """
7 | import sys
8 | import atexit
9 |
10 | def exit_cacert_ctx() -> None:
11 | _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
12 |
13 |
14 | if sys.version_info >= (3, 11):
15 |
16 | from importlib.resources import as_file, files
17 |
18 | _CACERT_CTX = None
19 | _CACERT_PATH = None
20 |
21 | def where() -> str:
22 | # This is slightly terrible, but we want to delay extracting the file
23 | # in cases where we're inside of a zipimport situation until someone
24 | # actually calls where(), but we don't want to re-extract the file
25 | # on every call of where(), so we'll do it once then store it in a
26 | # global variable.
27 | global _CACERT_CTX
28 | global _CACERT_PATH
29 | if _CACERT_PATH is None:
30 | # This is slightly janky, the importlib.resources API wants you to
31 | # manage the cleanup of this file, so it doesn't actually return a
32 | # path, it returns a context manager that will give you the path
33 | # when you enter it and will do any cleanup when you leave it. In
34 | # the common case of not needing a temporary file, it will just
35 | # return the file system location and the __exit__() is a no-op.
36 | #
37 | # We also have to hold onto the actual context manager, because
38 | # it will do the cleanup whenever it gets garbage collected, so
39 | # we will also store that at the global level as well.
40 | _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
41 | _CACERT_PATH = str(_CACERT_CTX.__enter__())
42 | atexit.register(exit_cacert_ctx)
43 |
44 | return _CACERT_PATH
45 |
46 | def contents() -> str:
47 | return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
48 |
49 | elif sys.version_info >= (3, 7):
50 |
51 | from importlib.resources import path as get_path, read_text
52 |
53 | _CACERT_CTX = None
54 | _CACERT_PATH = None
55 |
56 | def where() -> str:
57 | # This is slightly terrible, but we want to delay extracting the
58 | # file in cases where we're inside of a zipimport situation until
59 | # someone actually calls where(), but we don't want to re-extract
60 | # the file on every call of where(), so we'll do it once then store
61 | # it in a global variable.
62 | global _CACERT_CTX
63 | global _CACERT_PATH
64 | if _CACERT_PATH is None:
65 | # This is slightly janky, the importlib.resources API wants you
66 | # to manage the cleanup of this file, so it doesn't actually
67 | # return a path, it returns a context manager that will give
68 | # you the path when you enter it and will do any cleanup when
69 | # you leave it. In the common case of not needing a temporary
70 | # file, it will just return the file system location and the
71 | # __exit__() is a no-op.
72 | #
73 | # We also have to hold onto the actual context manager, because
74 | # it will do the cleanup whenever it gets garbage collected, so
75 | # we will also store that at the global level as well.
76 | _CACERT_CTX = get_path("certifi", "cacert.pem")
77 | _CACERT_PATH = str(_CACERT_CTX.__enter__())
78 | atexit.register(exit_cacert_ctx)
79 |
80 | return _CACERT_PATH
81 |
82 | def contents() -> str:
83 | return read_text("certifi", "cacert.pem", encoding="ascii")
84 |
85 | else:
86 | import os
87 | import types
88 | from typing import Union
89 |
90 | Package = Union[types.ModuleType, str]
91 | Resource = Union[str, "os.PathLike"]
92 |
93 | # This fallback will work for Python versions prior to 3.7 that lack the
94 | # importlib.resources module but relies on the existing `where` function
95 | # so won't address issues with environments like PyOxidizer that don't set
96 | # __file__ on modules.
97 | def read_text(
98 | package: Package,
99 | resource: Resource,
100 | encoding: str = 'utf-8',
101 | errors: str = 'strict'
102 | ) -> str:
103 | with open(where(), encoding=encoding) as data:
104 | return data.read()
105 |
106 | # If we don't have importlib.resources, then we will just do the old logic
107 | # of assuming we're on the filesystem and munge the path directly.
108 | def where() -> str:
109 | f = os.path.dirname(__file__)
110 |
111 | return os.path.join(f, "cacert.pem")
112 |
113 | def contents() -> str:
114 | return read_text("certifi", "cacert.pem", encoding="ascii")
115 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/util/wait.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import select
4 | import socket
5 | from functools import partial
6 |
7 | __all__ = ["wait_for_read", "wait_for_write"]
8 |
9 |
10 | # How should we wait on sockets?
11 | #
12 | # There are two types of APIs you can use for waiting on sockets: the fancy
13 | # modern stateful APIs like epoll/kqueue, and the older stateless APIs like
14 | # select/poll. The stateful APIs are more efficient when you have a lots of
15 | # sockets to keep track of, because you can set them up once and then use them
16 | # lots of times. But we only ever want to wait on a single socket at a time
17 | # and don't want to keep track of state, so the stateless APIs are actually
18 | # more efficient. So we want to use select() or poll().
19 | #
20 | # Now, how do we choose between select() and poll()? On traditional Unixes,
21 | # select() has a strange calling convention that makes it slow, or fail
22 | # altogether, for high-numbered file descriptors. The point of poll() is to fix
23 | # that, so on Unixes, we prefer poll().
24 | #
25 | # On Windows, there is no poll() (or at least Python doesn't provide a wrapper
26 | # for it), but that's OK, because on Windows, select() doesn't have this
27 | # strange calling convention; plain select() works fine.
28 | #
29 | # So: on Windows we use select(), and everywhere else we use poll(). We also
30 | # fall back to select() in case poll() is somehow broken or missing.
31 |
32 |
33 | def select_wait_for_socket(
34 | sock: socket.socket,
35 | read: bool = False,
36 | write: bool = False,
37 | timeout: float | None = None,
38 | ) -> bool:
39 | if not read and not write:
40 | raise RuntimeError("must specify at least one of read=True, write=True")
41 | rcheck = []
42 | wcheck = []
43 | if read:
44 | rcheck.append(sock)
45 | if write:
46 | wcheck.append(sock)
47 | # When doing a non-blocking connect, most systems signal success by
48 | # marking the socket writable. Windows, though, signals success by marked
49 | # it as "exceptional". We paper over the difference by checking the write
50 | # sockets for both conditions. (The stdlib selectors module does the same
51 | # thing.)
52 | fn = partial(select.select, rcheck, wcheck, wcheck)
53 | rready, wready, xready = fn(timeout)
54 | return bool(rready or wready or xready)
55 |
56 |
57 | def poll_wait_for_socket(
58 | sock: socket.socket,
59 | read: bool = False,
60 | write: bool = False,
61 | timeout: float | None = None,
62 | ) -> bool:
63 | if not read and not write:
64 | raise RuntimeError("must specify at least one of read=True, write=True")
65 | mask = 0
66 | if read:
67 | mask |= select.POLLIN
68 | if write:
69 | mask |= select.POLLOUT
70 | poll_obj = select.poll()
71 | poll_obj.register(sock, mask)
72 |
73 | # For some reason, poll() takes timeout in milliseconds
74 | def do_poll(t: float | None) -> list[tuple[int, int]]:
75 | if t is not None:
76 | t *= 1000
77 | return poll_obj.poll(t)
78 |
79 | return bool(do_poll(timeout))
80 |
81 |
82 | def _have_working_poll() -> bool:
83 | # Apparently some systems have a select.poll that fails as soon as you try
84 | # to use it, either due to strange configuration or broken monkeypatching
85 | # from libraries like eventlet/greenlet.
86 | try:
87 | poll_obj = select.poll()
88 | poll_obj.poll(0)
89 | except (AttributeError, OSError):
90 | return False
91 | else:
92 | return True
93 |
94 |
95 | def wait_for_socket(
96 | sock: socket.socket,
97 | read: bool = False,
98 | write: bool = False,
99 | timeout: float | None = None,
100 | ) -> bool:
101 | # We delay choosing which implementation to use until the first time we're
102 | # called. We could do it at import time, but then we might make the wrong
103 | # decision if someone goes wild with monkeypatching select.poll after
104 | # we're imported.
105 | global wait_for_socket
106 | if _have_working_poll():
107 | wait_for_socket = poll_wait_for_socket
108 | elif hasattr(select, "select"):
109 | wait_for_socket = select_wait_for_socket
110 | return wait_for_socket(sock, read, write, timeout)
111 |
112 |
113 | def wait_for_read(sock: socket.socket, timeout: float | None = None) -> bool:
114 | """Waits for reading to be available on a given socket.
115 | Returns True if the socket is readable, or False if the timeout expired.
116 | """
117 | return wait_for_socket(sock, read=True, timeout=timeout)
118 |
119 |
120 | def wait_for_write(sock: socket.socket, timeout: float | None = None) -> bool:
121 | """Waits for writing to be available on a given socket.
122 | Returns True if the socket is readable, or False if the timeout expired.
123 | """
124 | return wait_for_socket(sock, write=True, timeout=timeout)
125 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # Created by https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode
3 |
4 | ### Linux ###
5 | *~
6 |
7 | # temporary files which can be created if a process still has a handle open of a deleted file
8 | .fuse_hidden*
9 |
10 | # KDE directory preferences
11 | .directory
12 |
13 | # Linux trash folder which might appear on any partition or disk
14 | .Trash-*
15 |
16 | # .nfs files are created when an open file is removed but is still being accessed
17 | .nfs*
18 |
19 | ### OSX ###
20 | *.DS_Store
21 | .AppleDouble
22 | .LSOverride
23 |
24 | # Icon must end with two \r
25 | Icon
26 |
27 | # Thumbnails
28 | ._*
29 |
30 | # Files that might appear in the root of a volume
31 | .DocumentRevisions-V100
32 | .fseventsd
33 | .Spotlight-V100
34 | .TemporaryItems
35 | .Trashes
36 | .VolumeIcon.icns
37 | .com.apple.timemachine.donotpresent
38 |
39 | # Directories potentially created on remote AFP share
40 | .AppleDB
41 | .AppleDesktop
42 | Network Trash Folder
43 | Temporary Items
44 | .apdisk
45 |
46 | ### PyCharm ###
47 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
48 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
49 |
50 | # User-specific stuff:
51 | .idea/**/workspace.xml
52 | .idea/**/tasks.xml
53 | .idea/dictionaries
54 | .idea/vcs.xml
55 | .idea/sam-hello-world.iml
56 |
57 | # Sensitive or high-churn files:
58 | .idea/**/dataSources/
59 | .idea/**/dataSources.ids
60 | .idea/**/dataSources.xml
61 | .idea/**/dataSources.local.xml
62 | .idea/**/sqlDataSources.xml
63 | .idea/**/dynamic.xml
64 | .idea/**/uiDesigner.xml
65 |
66 | # Gradle:
67 | .idea/**/gradle.xml
68 | .idea/**/libraries
69 |
70 | # CMake
71 | cmake-build-debug/
72 |
73 | # Mongo Explorer plugin:
74 | .idea/**/mongoSettings.xml
75 |
76 | ## File-based project format:
77 | *.iws
78 |
79 | ## Plugin-specific files:
80 |
81 | # IntelliJ
82 | /out/
83 |
84 | # mpeltonen/sbt-idea plugin
85 | .idea_modules/
86 |
87 | # JIRA plugin
88 | atlassian-ide-plugin.xml
89 |
90 | # Cursive Clojure plugin
91 | .idea/replstate.xml
92 |
93 | # Ruby plugin and RubyMine
94 | /.rakeTasks
95 |
96 | # Crashlytics plugin (for Android Studio and IntelliJ)
97 | com_crashlytics_export_strings.xml
98 | crashlytics.properties
99 | crashlytics-build.properties
100 | fabric.properties
101 |
102 | ### PyCharm Patch ###
103 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
104 |
105 | # *.iml
106 | # modules.xml
107 | # .idea/misc.xml
108 | # *.ipr
109 |
110 | # Sonarlint plugin
111 | .idea/sonarlint
112 |
113 | ### Python ###
114 | # Byte-compiled / optimized / DLL files
115 | __pycache__/
116 | *.py[cod]
117 | *$py.class
118 |
119 | # C extensions
120 | *.so
121 |
122 | # Distribution / packaging
123 | .Python
124 | build/
125 | develop-eggs/
126 | dist/
127 | downloads/
128 | eggs/
129 | .eggs/
130 | lib/
131 | lib64/
132 | parts/
133 | sdist/
134 | var/
135 | wheels/
136 | *.egg-info/
137 | .installed.cfg
138 | *.egg
139 |
140 | # PyInstaller
141 | # Usually these files are written by a python script from a template
142 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
143 | *.manifest
144 | *.spec
145 |
146 | # Installer logs
147 | pip-log.txt
148 | pip-delete-this-directory.txt
149 |
150 | # Unit test / coverage reports
151 | htmlcov/
152 | .tox/
153 | .coverage
154 | .coverage.*
155 | .cache
156 | .pytest_cache/
157 | nosetests.xml
158 | coverage.xml
159 | *.cover
160 | .hypothesis/
161 |
162 | # Translations
163 | *.mo
164 | *.pot
165 |
166 | # Flask stuff:
167 | instance/
168 | .webassets-cache
169 |
170 | # Scrapy stuff:
171 | .scrapy
172 |
173 | # Sphinx documentation
174 | docs/_build/
175 |
176 | # PyBuilder
177 | target/
178 |
179 | # Jupyter Notebook
180 | .ipynb_checkpoints
181 |
182 | # pyenv
183 | .python-version
184 |
185 | # celery beat schedule file
186 | celerybeat-schedule.*
187 |
188 | # SageMath parsed files
189 | *.sage.py
190 |
191 | # Environments
192 | .env
193 | .venv
194 | env/
195 | venv/
196 | ENV/
197 | env.bak/
198 | venv.bak/
199 |
200 | # Spyder project settings
201 | .spyderproject
202 | .spyproject
203 |
204 | # Rope project settings
205 | .ropeproject
206 |
207 | # mkdocs documentation
208 | /site
209 |
210 | # mypy
211 | .mypy_cache/
212 |
213 | ### VisualStudioCode ###
214 | .vscode/*
215 | !.vscode/settings.json
216 | !.vscode/tasks.json
217 | !.vscode/launch.json
218 | !.vscode/extensions.json
219 | .history
220 |
221 | ### Windows ###
222 | # Windows thumbnail cache files
223 | Thumbs.db
224 | ehthumbs.db
225 | ehthumbs_vista.db
226 |
227 | # Folder config file
228 | Desktop.ini
229 |
230 | # Recycle Bin used on file shares
231 | $RECYCLE.BIN/
232 |
233 | # Windows Installer files
234 | *.cab
235 | *.msi
236 | *.msm
237 | *.msp
238 |
239 | # Windows shortcuts
240 | *.lnk
241 |
242 | # Build folder
243 |
244 | */build/*
245 |
246 | # End of https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/util/wait.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import select
4 | import socket
5 | from functools import partial
6 |
7 | __all__ = ["wait_for_read", "wait_for_write"]
8 |
9 |
10 | # How should we wait on sockets?
11 | #
12 | # There are two types of APIs you can use for waiting on sockets: the fancy
13 | # modern stateful APIs like epoll/kqueue, and the older stateless APIs like
14 | # select/poll. The stateful APIs are more efficient when you have a lots of
15 | # sockets to keep track of, because you can set them up once and then use them
16 | # lots of times. But we only ever want to wait on a single socket at a time
17 | # and don't want to keep track of state, so the stateless APIs are actually
18 | # more efficient. So we want to use select() or poll().
19 | #
20 | # Now, how do we choose between select() and poll()? On traditional Unixes,
21 | # select() has a strange calling convention that makes it slow, or fail
22 | # altogether, for high-numbered file descriptors. The point of poll() is to fix
23 | # that, so on Unixes, we prefer poll().
24 | #
25 | # On Windows, there is no poll() (or at least Python doesn't provide a wrapper
26 | # for it), but that's OK, because on Windows, select() doesn't have this
27 | # strange calling convention; plain select() works fine.
28 | #
29 | # So: on Windows we use select(), and everywhere else we use poll(). We also
30 | # fall back to select() in case poll() is somehow broken or missing.
31 |
32 |
33 | def select_wait_for_socket(
34 | sock: socket.socket,
35 | read: bool = False,
36 | write: bool = False,
37 | timeout: float | None = None,
38 | ) -> bool:
39 | if not read and not write:
40 | raise RuntimeError("must specify at least one of read=True, write=True")
41 | rcheck = []
42 | wcheck = []
43 | if read:
44 | rcheck.append(sock)
45 | if write:
46 | wcheck.append(sock)
47 | # When doing a non-blocking connect, most systems signal success by
48 | # marking the socket writable. Windows, though, signals success by marked
49 | # it as "exceptional". We paper over the difference by checking the write
50 | # sockets for both conditions. (The stdlib selectors module does the same
51 | # thing.)
52 | fn = partial(select.select, rcheck, wcheck, wcheck)
53 | rready, wready, xready = fn(timeout)
54 | return bool(rready or wready or xready)
55 |
56 |
57 | def poll_wait_for_socket(
58 | sock: socket.socket,
59 | read: bool = False,
60 | write: bool = False,
61 | timeout: float | None = None,
62 | ) -> bool:
63 | if not read and not write:
64 | raise RuntimeError("must specify at least one of read=True, write=True")
65 | mask = 0
66 | if read:
67 | mask |= select.POLLIN
68 | if write:
69 | mask |= select.POLLOUT
70 | poll_obj = select.poll()
71 | poll_obj.register(sock, mask)
72 |
73 | # For some reason, poll() takes timeout in milliseconds
74 | def do_poll(t: float | None) -> list[tuple[int, int]]:
75 | if t is not None:
76 | t *= 1000
77 | return poll_obj.poll(t)
78 |
79 | return bool(do_poll(timeout))
80 |
81 |
82 | def _have_working_poll() -> bool:
83 | # Apparently some systems have a select.poll that fails as soon as you try
84 | # to use it, either due to strange configuration or broken monkeypatching
85 | # from libraries like eventlet/greenlet.
86 | try:
87 | poll_obj = select.poll()
88 | poll_obj.poll(0)
89 | except (AttributeError, OSError):
90 | return False
91 | else:
92 | return True
93 |
94 |
95 | def wait_for_socket(
96 | sock: socket.socket,
97 | read: bool = False,
98 | write: bool = False,
99 | timeout: float | None = None,
100 | ) -> bool:
101 | # We delay choosing which implementation to use until the first time we're
102 | # called. We could do it at import time, but then we might make the wrong
103 | # decision if someone goes wild with monkeypatching select.poll after
104 | # we're imported.
105 | global wait_for_socket
106 | if _have_working_poll():
107 | wait_for_socket = poll_wait_for_socket
108 | elif hasattr(select, "select"):
109 | wait_for_socket = select_wait_for_socket
110 | return wait_for_socket(sock, read, write, timeout)
111 |
112 |
113 | def wait_for_read(sock: socket.socket, timeout: float | None = None) -> bool:
114 | """Waits for reading to be available on a given socket.
115 | Returns True if the socket is readable, or False if the timeout expired.
116 | """
117 | return wait_for_socket(sock, read=True, timeout=timeout)
118 |
119 |
120 | def wait_for_write(sock: socket.socket, timeout: float | None = None) -> bool:
121 | """Waits for writing to be available on a given socket.
122 | Returns True if the socket is readable, or False if the timeout expired.
123 | """
124 | return wait_for_socket(sock, write=True, timeout=timeout)
125 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/urllib3/util/connection.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import socket
4 | import typing
5 |
6 | from ..exceptions import LocationParseError
7 | from .timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
8 |
9 | _TYPE_SOCKET_OPTIONS = typing.Sequence[typing.Tuple[int, int, typing.Union[int, bytes]]]
10 |
11 | if typing.TYPE_CHECKING:
12 | from .._base_connection import BaseHTTPConnection
13 |
14 |
15 | def is_connection_dropped(conn: BaseHTTPConnection) -> bool: # Platform-specific
16 | """
17 | Returns True if the connection is dropped and should be closed.
18 | :param conn: :class:`urllib3.connection.HTTPConnection` object.
19 | """
20 | return not conn.is_connected
21 |
22 |
23 | # This function is copied from socket.py in the Python 2.7 standard
24 | # library test suite. Added to its signature is only `socket_options`.
25 | # One additional modification is that we avoid binding to IPv6 servers
26 | # discovered in DNS if the system doesn't have IPv6 functionality.
27 | def create_connection(
28 | address: tuple[str, int],
29 | timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
30 | source_address: tuple[str, int] | None = None,
31 | socket_options: _TYPE_SOCKET_OPTIONS | None = None,
32 | ) -> socket.socket:
33 | """Connect to *address* and return the socket object.
34 |
35 | Convenience function. Connect to *address* (a 2-tuple ``(host,
36 | port)``) and return the socket object. Passing the optional
37 | *timeout* parameter will set the timeout on the socket instance
38 | before attempting to connect. If no *timeout* is supplied, the
39 | global default timeout setting returned by :func:`socket.getdefaulttimeout`
40 | is used. If *source_address* is set it must be a tuple of (host, port)
41 | for the socket to bind as a source address before making the connection.
42 | An host of '' or port 0 tells the OS to use the default.
43 | """
44 |
45 | host, port = address
46 | if host.startswith("["):
47 | host = host.strip("[]")
48 | err = None
49 |
50 | # Using the value from allowed_gai_family() in the context of getaddrinfo lets
51 | # us select whether to work with IPv4 DNS records, IPv6 records, or both.
52 | # The original create_connection function always returns all records.
53 | family = allowed_gai_family()
54 |
55 | try:
56 | host.encode("idna")
57 | except UnicodeError:
58 | raise LocationParseError(f"'{host}', label empty or too long") from None
59 |
60 | for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
61 | af, socktype, proto, canonname, sa = res
62 | sock = None
63 | try:
64 | sock = socket.socket(af, socktype, proto)
65 |
66 | # If provided, set socket level options before connecting.
67 | _set_socket_options(sock, socket_options)
68 |
69 | if timeout is not _DEFAULT_TIMEOUT:
70 | sock.settimeout(timeout)
71 | if source_address:
72 | sock.bind(source_address)
73 | sock.connect(sa)
74 | # Break explicitly a reference cycle
75 | err = None
76 | return sock
77 |
78 | except OSError as _:
79 | err = _
80 | if sock is not None:
81 | sock.close()
82 |
83 | if err is not None:
84 | try:
85 | raise err
86 | finally:
87 | # Break explicitly a reference cycle
88 | err = None
89 | else:
90 | raise OSError("getaddrinfo returns an empty list")
91 |
92 |
93 | def _set_socket_options(
94 | sock: socket.socket, options: _TYPE_SOCKET_OPTIONS | None
95 | ) -> None:
96 | if options is None:
97 | return
98 |
99 | for opt in options:
100 | sock.setsockopt(*opt)
101 |
102 |
103 | def allowed_gai_family() -> socket.AddressFamily:
104 | """This function is designed to work in the context of
105 | getaddrinfo, where family=socket.AF_UNSPEC is the default and
106 | will perform a DNS search for both IPv6 and IPv4 records."""
107 |
108 | family = socket.AF_INET
109 | if HAS_IPV6:
110 | family = socket.AF_UNSPEC
111 | return family
112 |
113 |
114 | def _has_ipv6(host: str) -> bool:
115 | """Returns True if the system can bind an IPv6 address."""
116 | sock = None
117 | has_ipv6 = False
118 |
119 | if socket.has_ipv6:
120 | # has_ipv6 returns true if cPython was compiled with IPv6 support.
121 | # It does not tell us if the system has IPv6 support enabled. To
122 | # determine that we must bind to an IPv6 address.
123 | # https://github.com/urllib3/urllib3/pull/611
124 | # https://bugs.python.org/issue658327
125 | try:
126 | sock = socket.socket(socket.AF_INET6)
127 | sock.bind((host, 0))
128 | has_ipv6 = True
129 | except Exception:
130 | pass
131 |
132 | if sock:
133 | sock.close()
134 | return has_ipv6
135 |
136 |
137 | HAS_IPV6 = _has_ipv6("::1")
138 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/urllib3/util/connection.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import socket
4 | import typing
5 |
6 | from ..exceptions import LocationParseError
7 | from .timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
8 |
9 | _TYPE_SOCKET_OPTIONS = typing.Sequence[typing.Tuple[int, int, typing.Union[int, bytes]]]
10 |
11 | if typing.TYPE_CHECKING:
12 | from .._base_connection import BaseHTTPConnection
13 |
14 |
15 | def is_connection_dropped(conn: BaseHTTPConnection) -> bool: # Platform-specific
16 | """
17 | Returns True if the connection is dropped and should be closed.
18 | :param conn: :class:`urllib3.connection.HTTPConnection` object.
19 | """
20 | return not conn.is_connected
21 |
22 |
23 | # This function is copied from socket.py in the Python 2.7 standard
24 | # library test suite. Added to its signature is only `socket_options`.
25 | # One additional modification is that we avoid binding to IPv6 servers
26 | # discovered in DNS if the system doesn't have IPv6 functionality.
27 | def create_connection(
28 | address: tuple[str, int],
29 | timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
30 | source_address: tuple[str, int] | None = None,
31 | socket_options: _TYPE_SOCKET_OPTIONS | None = None,
32 | ) -> socket.socket:
33 | """Connect to *address* and return the socket object.
34 |
35 | Convenience function. Connect to *address* (a 2-tuple ``(host,
36 | port)``) and return the socket object. Passing the optional
37 | *timeout* parameter will set the timeout on the socket instance
38 | before attempting to connect. If no *timeout* is supplied, the
39 | global default timeout setting returned by :func:`socket.getdefaulttimeout`
40 | is used. If *source_address* is set it must be a tuple of (host, port)
41 | for the socket to bind as a source address before making the connection.
42 | An host of '' or port 0 tells the OS to use the default.
43 | """
44 |
45 | host, port = address
46 | if host.startswith("["):
47 | host = host.strip("[]")
48 | err = None
49 |
50 | # Using the value from allowed_gai_family() in the context of getaddrinfo lets
51 | # us select whether to work with IPv4 DNS records, IPv6 records, or both.
52 | # The original create_connection function always returns all records.
53 | family = allowed_gai_family()
54 |
55 | try:
56 | host.encode("idna")
57 | except UnicodeError:
58 | raise LocationParseError(f"'{host}', label empty or too long") from None
59 |
60 | for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
61 | af, socktype, proto, canonname, sa = res
62 | sock = None
63 | try:
64 | sock = socket.socket(af, socktype, proto)
65 |
66 | # If provided, set socket level options before connecting.
67 | _set_socket_options(sock, socket_options)
68 |
69 | if timeout is not _DEFAULT_TIMEOUT:
70 | sock.settimeout(timeout)
71 | if source_address:
72 | sock.bind(source_address)
73 | sock.connect(sa)
74 | # Break explicitly a reference cycle
75 | err = None
76 | return sock
77 |
78 | except OSError as _:
79 | err = _
80 | if sock is not None:
81 | sock.close()
82 |
83 | if err is not None:
84 | try:
85 | raise err
86 | finally:
87 | # Break explicitly a reference cycle
88 | err = None
89 | else:
90 | raise OSError("getaddrinfo returns an empty list")
91 |
92 |
93 | def _set_socket_options(
94 | sock: socket.socket, options: _TYPE_SOCKET_OPTIONS | None
95 | ) -> None:
96 | if options is None:
97 | return
98 |
99 | for opt in options:
100 | sock.setsockopt(*opt)
101 |
102 |
103 | def allowed_gai_family() -> socket.AddressFamily:
104 | """This function is designed to work in the context of
105 | getaddrinfo, where family=socket.AF_UNSPEC is the default and
106 | will perform a DNS search for both IPv6 and IPv4 records."""
107 |
108 | family = socket.AF_INET
109 | if HAS_IPV6:
110 | family = socket.AF_UNSPEC
111 | return family
112 |
113 |
114 | def _has_ipv6(host: str) -> bool:
115 | """Returns True if the system can bind an IPv6 address."""
116 | sock = None
117 | has_ipv6 = False
118 |
119 | if socket.has_ipv6:
120 | # has_ipv6 returns true if cPython was compiled with IPv6 support.
121 | # It does not tell us if the system has IPv6 support enabled. To
122 | # determine that we must bind to an IPv6 address.
123 | # https://github.com/urllib3/urllib3/pull/611
124 | # https://bugs.python.org/issue658327
125 | try:
126 | sock = socket.socket(socket.AF_INET6)
127 | sock.bind((host, 0))
128 | has_ipv6 = True
129 | except Exception:
130 | pass
131 |
132 | if sock:
133 | sock.close()
134 | return has_ipv6
135 |
136 |
137 | HAS_IPV6 = _has_ipv6("::1")
138 |
--------------------------------------------------------------------------------
/.aws-sam/deps/8a950f40-65f4-4cd9-ba01-4b0ad3fb63e4/requests-2.31.0.dist-info/METADATA:
--------------------------------------------------------------------------------
1 | Metadata-Version: 2.1
2 | Name: requests
3 | Version: 2.31.0
4 | Summary: Python HTTP for Humans.
5 | Home-page: https://requests.readthedocs.io
6 | Author: Kenneth Reitz
7 | Author-email: me@kennethreitz.org
8 | License: Apache 2.0
9 | Project-URL: Documentation, https://requests.readthedocs.io
10 | Project-URL: Source, https://github.com/psf/requests
11 | Platform: UNKNOWN
12 | Classifier: Development Status :: 5 - Production/Stable
13 | Classifier: Environment :: Web Environment
14 | Classifier: Intended Audience :: Developers
15 | Classifier: License :: OSI Approved :: Apache Software License
16 | Classifier: Natural Language :: English
17 | Classifier: Operating System :: OS Independent
18 | Classifier: Programming Language :: Python
19 | Classifier: Programming Language :: Python :: 3
20 | Classifier: Programming Language :: Python :: 3.7
21 | Classifier: Programming Language :: Python :: 3.8
22 | Classifier: Programming Language :: Python :: 3.9
23 | Classifier: Programming Language :: Python :: 3.10
24 | Classifier: Programming Language :: Python :: 3.11
25 | Classifier: Programming Language :: Python :: 3 :: Only
26 | Classifier: Programming Language :: Python :: Implementation :: CPython
27 | Classifier: Programming Language :: Python :: Implementation :: PyPy
28 | Classifier: Topic :: Internet :: WWW/HTTP
29 | Classifier: Topic :: Software Development :: Libraries
30 | Requires-Python: >=3.7
31 | Description-Content-Type: text/markdown
32 | License-File: LICENSE
33 | Requires-Dist: charset-normalizer (<4,>=2)
34 | Requires-Dist: idna (<4,>=2.5)
35 | Requires-Dist: urllib3 (<3,>=1.21.1)
36 | Requires-Dist: certifi (>=2017.4.17)
37 | Provides-Extra: security
38 | Provides-Extra: socks
39 | Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks'
40 | Provides-Extra: use_chardet_on_py3
41 | Requires-Dist: chardet (<6,>=3.0.2) ; extra == 'use_chardet_on_py3'
42 |
43 | # Requests
44 |
45 | **Requests** is a simple, yet elegant, HTTP library.
46 |
47 | ```python
48 | >>> import requests
49 | >>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))
50 | >>> r.status_code
51 | 200
52 | >>> r.headers['content-type']
53 | 'application/json; charset=utf8'
54 | >>> r.encoding
55 | 'utf-8'
56 | >>> r.text
57 | '{"authenticated": true, ...'
58 | >>> r.json()
59 | {'authenticated': True, ...}
60 | ```
61 |
62 | Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!
63 |
64 | Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.
65 |
66 | [](https://pepy.tech/project/requests)
67 | [](https://pypi.org/project/requests)
68 | [](https://github.com/psf/requests/graphs/contributors)
69 |
70 | ## Installing Requests and Supported Versions
71 |
72 | Requests is available on PyPI:
73 |
74 | ```console
75 | $ python -m pip install requests
76 | ```
77 |
78 | Requests officially supports Python 3.7+.
79 |
80 | ## Supported Features & Best–Practices
81 |
82 | Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.
83 |
84 | - Keep-Alive & Connection Pooling
85 | - International Domains and URLs
86 | - Sessions with Cookie Persistence
87 | - Browser-style TLS/SSL Verification
88 | - Basic & Digest Authentication
89 | - Familiar `dict`–like Cookies
90 | - Automatic Content Decompression and Decoding
91 | - Multi-part File Uploads
92 | - SOCKS Proxy Support
93 | - Connection Timeouts
94 | - Streaming Downloads
95 | - Automatic honoring of `.netrc`
96 | - Chunked HTTP Requests
97 |
98 | ## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)
99 |
100 | [](https://requests.readthedocs.io)
101 |
102 | ## Cloning the repository
103 |
104 | When cloning the Requests repository, you may need to add the `-c
105 | fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see
106 | [this issue](https://github.com/psf/requests/issues/2690) for more background):
107 |
108 | ```shell
109 | git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git
110 | ```
111 |
112 | You can also apply this setting to your global Git config:
113 |
114 | ```shell
115 | git config --global fetch.fsck.badTimezone ignore
116 | ```
117 |
118 | ---
119 |
120 | [](https://kennethreitz.org) [](https://www.python.org/psf)
121 |
122 |
123 |
--------------------------------------------------------------------------------
/.aws-sam/auto-dependency-layer/HelloWorldFunction19d43fc4DepLayer/python/requests-2.31.0.dist-info/METADATA:
--------------------------------------------------------------------------------
1 | Metadata-Version: 2.1
2 | Name: requests
3 | Version: 2.31.0
4 | Summary: Python HTTP for Humans.
5 | Home-page: https://requests.readthedocs.io
6 | Author: Kenneth Reitz
7 | Author-email: me@kennethreitz.org
8 | License: Apache 2.0
9 | Project-URL: Documentation, https://requests.readthedocs.io
10 | Project-URL: Source, https://github.com/psf/requests
11 | Platform: UNKNOWN
12 | Classifier: Development Status :: 5 - Production/Stable
13 | Classifier: Environment :: Web Environment
14 | Classifier: Intended Audience :: Developers
15 | Classifier: License :: OSI Approved :: Apache Software License
16 | Classifier: Natural Language :: English
17 | Classifier: Operating System :: OS Independent
18 | Classifier: Programming Language :: Python
19 | Classifier: Programming Language :: Python :: 3
20 | Classifier: Programming Language :: Python :: 3.7
21 | Classifier: Programming Language :: Python :: 3.8
22 | Classifier: Programming Language :: Python :: 3.9
23 | Classifier: Programming Language :: Python :: 3.10
24 | Classifier: Programming Language :: Python :: 3.11
25 | Classifier: Programming Language :: Python :: 3 :: Only
26 | Classifier: Programming Language :: Python :: Implementation :: CPython
27 | Classifier: Programming Language :: Python :: Implementation :: PyPy
28 | Classifier: Topic :: Internet :: WWW/HTTP
29 | Classifier: Topic :: Software Development :: Libraries
30 | Requires-Python: >=3.7
31 | Description-Content-Type: text/markdown
32 | License-File: LICENSE
33 | Requires-Dist: charset-normalizer (<4,>=2)
34 | Requires-Dist: idna (<4,>=2.5)
35 | Requires-Dist: urllib3 (<3,>=1.21.1)
36 | Requires-Dist: certifi (>=2017.4.17)
37 | Provides-Extra: security
38 | Provides-Extra: socks
39 | Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks'
40 | Provides-Extra: use_chardet_on_py3
41 | Requires-Dist: chardet (<6,>=3.0.2) ; extra == 'use_chardet_on_py3'
42 |
43 | # Requests
44 |
45 | **Requests** is a simple, yet elegant, HTTP library.
46 |
47 | ```python
48 | >>> import requests
49 | >>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))
50 | >>> r.status_code
51 | 200
52 | >>> r.headers['content-type']
53 | 'application/json; charset=utf8'
54 | >>> r.encoding
55 | 'utf-8'
56 | >>> r.text
57 | '{"authenticated": true, ...'
58 | >>> r.json()
59 | {'authenticated': True, ...}
60 | ```
61 |
62 | Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!
63 |
64 | Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.
65 |
66 | [](https://pepy.tech/project/requests)
67 | [](https://pypi.org/project/requests)
68 | [](https://github.com/psf/requests/graphs/contributors)
69 |
70 | ## Installing Requests and Supported Versions
71 |
72 | Requests is available on PyPI:
73 |
74 | ```console
75 | $ python -m pip install requests
76 | ```
77 |
78 | Requests officially supports Python 3.7+.
79 |
80 | ## Supported Features & Best–Practices
81 |
82 | Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.
83 |
84 | - Keep-Alive & Connection Pooling
85 | - International Domains and URLs
86 | - Sessions with Cookie Persistence
87 | - Browser-style TLS/SSL Verification
88 | - Basic & Digest Authentication
89 | - Familiar `dict`–like Cookies
90 | - Automatic Content Decompression and Decoding
91 | - Multi-part File Uploads
92 | - SOCKS Proxy Support
93 | - Connection Timeouts
94 | - Streaming Downloads
95 | - Automatic honoring of `.netrc`
96 | - Chunked HTTP Requests
97 |
98 | ## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)
99 |
100 | [](https://requests.readthedocs.io)
101 |
102 | ## Cloning the repository
103 |
104 | When cloning the Requests repository, you may need to add the `-c
105 | fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see
106 | [this issue](https://github.com/psf/requests/issues/2690) for more background):
107 |
108 | ```shell
109 | git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git
110 | ```
111 |
112 | You can also apply this setting to your global Git config:
113 |
114 | ```shell
115 | git config --global fetch.fsck.badTimezone ignore
116 | ```
117 |
118 | ---
119 |
120 | [](https://kennethreitz.org) [](https://www.python.org/psf)
121 |
122 |
123 |
--------------------------------------------------------------------------------