├── lib ├── idna │ ├── py.typed │ ├── package_data.py │ ├── compat.py │ ├── __init__.py │ ├── intranges.py │ └── codec.py ├── certifi │ ├── py.typed │ ├── __init__.py │ ├── __main__.py │ └── core.py ├── flox │ ├── version │ ├── settings.py │ ├── clipboard.py │ ├── browser.py │ ├── utils.py │ ├── launcher.py │ └── string_matcher.py ├── charset_normalizer │ ├── py.typed │ ├── cli │ │ └── __init__.py │ ├── version.py │ ├── __init__.py │ └── legacy.py ├── urllib3 │ ├── contrib │ │ ├── __init__.py │ │ ├── _securetransport │ │ │ └── __init__.py │ │ ├── _appengine_environ.py │ │ ├── ntlmpool.py │ │ └── socks.py │ ├── packages │ │ ├── __init__.py │ │ └── backports │ │ │ ├── __init__.py │ │ │ └── makefile.py │ ├── _version.py │ ├── util │ │ ├── queue.py │ │ ├── __init__.py │ │ ├── proxy.py │ │ ├── response.py │ │ ├── request.py │ │ ├── connection.py │ │ ├── wait.py │ │ ├── ssl_match_hostname.py │ │ └── ssltransport.py │ ├── filepost.py │ ├── __init__.py │ └── request.py ├── Flox_lib-0.19.3.dist-info │ ├── REQUESTED │ ├── zip-safe │ ├── INSTALLER │ ├── top_level.txt │ ├── WHEEL │ ├── LICENSE.txt │ ├── RECORD │ └── METADATA ├── flowlauncher-0.2.0.dist-info │ ├── REQUESTED │ ├── INSTALLER │ ├── top_level.txt │ ├── WHEEL │ ├── LICENSE │ ├── RECORD │ └── METADATA ├── idna-3.4.dist-info │ ├── INSTALLER │ ├── WHEEL │ ├── RECORD │ └── LICENSE.md ├── pyperclip-1.8.2.dist-info │ ├── REQUESTED │ ├── INSTALLER │ ├── top_level.txt │ ├── WHEEL │ ├── RECORD │ ├── LICENSE.txt │ ├── AUTHORS.txt │ └── METADATA ├── requests-2.28.1.dist-info │ ├── REQUESTED │ ├── INSTALLER │ ├── top_level.txt │ ├── WHEEL │ ├── RECORD │ └── METADATA ├── urllib3-1.26.15.dist-info │ ├── INSTALLER │ ├── top_level.txt │ ├── WHEEL │ ├── LICENSE.txt │ └── RECORD ├── certifi-2022.12.7.dist-info │ ├── INSTALLER │ ├── top_level.txt │ ├── WHEEL │ ├── RECORD │ ├── LICENSE │ └── METADATA ├── charset_normalizer-2.1.1.dist-info │ ├── INSTALLER │ ├── top_level.txt │ ├── entry_points.txt │ ├── WHEEL │ ├── LICENSE │ └── RECORD ├── bin │ └── normalizer.exe ├── flowlauncher │ ├── __init__.py │ ├── _version.py │ ├── FlowLauncher.py │ └── FlowLauncherAPI.py ├── requests │ ├── __version__.py │ ├── certs.py │ ├── hooks.py │ ├── packages.py │ ├── _internal_utils.py │ ├── compat.py │ ├── structures.py │ ├── exceptions.py │ ├── help.py │ ├── status_codes.py │ ├── __init__.py │ └── api.py └── pyperclip │ └── __main__.py ├── Images └── app.png ├── requirements.txt ├── temp_text.txt ├── system_messages.csv ├── main.py ├── plugin.json ├── .github ├── stale.yaml └── workflows │ ├── stale.yml │ └── main.yml ├── LICENSE ├── SettingsTemplate.yaml ├── .gitignore └── README.md /lib/idna/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/certifi/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/flox/version: -------------------------------------------------------------------------------- 1 | 0.19.3 2 | -------------------------------------------------------------------------------- /lib/charset_normalizer/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/urllib3/contrib/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/urllib3/packages/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/charset_normalizer/cli/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/Flox_lib-0.19.3.dist-info/REQUESTED: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/Flox_lib-0.19.3.dist-info/zip-safe: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /lib/flowlauncher-0.2.0.dist-info/REQUESTED: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/idna-3.4.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lib/pyperclip-1.8.2.dist-info/REQUESTED: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/requests-2.28.1.dist-info/REQUESTED: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/urllib3/packages/backports/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/Flox_lib-0.19.3.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lib/pyperclip-1.8.2.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lib/requests-2.28.1.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lib/urllib3-1.26.15.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lib/urllib3/contrib/_securetransport/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/Flox_lib-0.19.3.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | flox 2 | -------------------------------------------------------------------------------- /lib/certifi-2022.12.7.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lib/flowlauncher-0.2.0.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lib/idna/package_data.py: -------------------------------------------------------------------------------- 1 | __version__ = '3.4' 2 | 3 | -------------------------------------------------------------------------------- /lib/certifi-2022.12.7.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | certifi 2 | -------------------------------------------------------------------------------- /lib/charset_normalizer-2.1.1.dist-info/INSTALLER: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /lib/pyperclip-1.8.2.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | pyperclip 2 | -------------------------------------------------------------------------------- /lib/requests-2.28.1.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | requests 2 | -------------------------------------------------------------------------------- /lib/urllib3-1.26.15.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | urllib3 2 | -------------------------------------------------------------------------------- /lib/flowlauncher-0.2.0.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | flowlauncher 2 | -------------------------------------------------------------------------------- /lib/charset_normalizer-2.1.1.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | charset_normalizer 2 | -------------------------------------------------------------------------------- /lib/urllib3/_version.py: -------------------------------------------------------------------------------- 1 | # This file is protected via CODEOWNERS 2 | __version__ = "1.26.15" 3 | -------------------------------------------------------------------------------- /Images/app.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MichielvanBeers/Flow.Launcher.Plugin.ChatGPT/HEAD/Images/app.png -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MichielvanBeers/Flow.Launcher.Plugin.ChatGPT/HEAD/requirements.txt -------------------------------------------------------------------------------- /lib/bin/normalizer.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MichielvanBeers/Flow.Launcher.Plugin.ChatGPT/HEAD/lib/bin/normalizer.exe -------------------------------------------------------------------------------- /lib/idna-3.4.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: flit 3.7.1 3 | Root-Is-Purelib: true 4 | Tag: py3-none-any 5 | -------------------------------------------------------------------------------- /lib/certifi/__init__.py: -------------------------------------------------------------------------------- 1 | from .core import contents, where 2 | 3 | __all__ = ["contents", "where"] 4 | __version__ = "2022.12.07" 5 | -------------------------------------------------------------------------------- /lib/charset_normalizer/version.py: -------------------------------------------------------------------------------- 1 | """ 2 | Expose version 3 | """ 4 | 5 | __version__ = "2.1.1" 6 | VERSION = __version__.split(".") 7 | -------------------------------------------------------------------------------- /lib/charset_normalizer-2.1.1.dist-info/entry_points.txt: -------------------------------------------------------------------------------- 1 | [console_scripts] 2 | normalizer = charset_normalizer.cli.normalizer:cli_detect 3 | -------------------------------------------------------------------------------- /lib/Flox_lib-0.19.3.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.38.4) 3 | Root-Is-Purelib: true 4 | Tag: py3-none-any 5 | 6 | -------------------------------------------------------------------------------- /lib/pyperclip-1.8.2.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.38.4) 3 | Root-Is-Purelib: true 4 | Tag: py3-none-any 5 | 6 | -------------------------------------------------------------------------------- /lib/requests-2.28.1.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.37.1) 3 | Root-Is-Purelib: true 4 | Tag: py3-none-any 5 | 6 | -------------------------------------------------------------------------------- /lib/certifi-2022.12.7.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.37.0) 3 | Root-Is-Purelib: true 4 | Tag: py3-none-any 5 | 6 | -------------------------------------------------------------------------------- /lib/flowlauncher-0.2.0.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.38.2) 3 | Root-Is-Purelib: true 4 | Tag: py3-none-any 5 | 6 | -------------------------------------------------------------------------------- /lib/charset_normalizer-2.1.1.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.37.1) 3 | Root-Is-Purelib: true 4 | Tag: py3-none-any 5 | 6 | -------------------------------------------------------------------------------- /lib/urllib3-1.26.15.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.38.4) 3 | Root-Is-Purelib: true 4 | Tag: py2-none-any 5 | Tag: py3-none-any 6 | 7 | -------------------------------------------------------------------------------- /temp_text.txt: -------------------------------------------------------------------------------- 1 | ## This file will be used as output for the prompts ## 2 | ## Note that everytime you select Open in Notepad, ## 3 | ## the content of this file be overwritten. ## -------------------------------------------------------------------------------- /lib/certifi/__main__.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | from certifi import contents, where 4 | 5 | parser = argparse.ArgumentParser() 6 | parser.add_argument("-c", "--contents", action="store_true") 7 | args = parser.parse_args() 8 | 9 | if args.contents: 10 | print(contents()) 11 | else: 12 | print(where()) 13 | -------------------------------------------------------------------------------- /lib/flowlauncher/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from ._version import get_versions 4 | from .FlowLauncher import FlowLauncher # noqa 5 | from .FlowLauncherAPI import FlowLauncherAPI # noqa 6 | 7 | __version__ = get_versions()["version"] 8 | del get_versions 9 | 10 | __license__ = 'MIT' 11 | __short_description__ = 'Flow Launcher supports Python by JsonRPC.' 12 | -------------------------------------------------------------------------------- /system_messages.csv: -------------------------------------------------------------------------------- 1 | Key Word;System Message 2 | normal;You are an all-knowing AI bot. 3 | short;You are an all-knowing AI bot. All your answers are short, to the point, and don't give any additional context. 4 | long;You are an all-knowing AI bot. All your answers are in-depth and give both a step-by-step explanation how you came to that answer, as well as references to the resources you used. 5 | -------------------------------------------------------------------------------- /lib/idna/compat.py: -------------------------------------------------------------------------------- 1 | from .core import * 2 | from .codec import * 3 | from typing import Any, Union 4 | 5 | def ToASCII(label: str) -> bytes: 6 | return encode(label) 7 | 8 | def ToUnicode(label: Union[bytes, bytearray]) -> str: 9 | return decode(label) 10 | 11 | def nameprep(s: Any) -> None: 12 | raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') 13 | 14 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import sys 4 | import os 5 | 6 | parent_folder_path = os.path.abspath(os.path.dirname(__file__)) 7 | sys.path.append(parent_folder_path) 8 | sys.path.append(os.path.join(parent_folder_path, "lib")) 9 | sys.path.append(os.path.join(parent_folder_path, "plugin")) 10 | 11 | from plugin.main import ChatGPT 12 | 13 | 14 | if __name__ == "__main__": 15 | ChatGPT() 16 | -------------------------------------------------------------------------------- /lib/requests/__version__.py: -------------------------------------------------------------------------------- 1 | # .-. .-. .-. . . .-. .-. .-. .-. 2 | # |( |- |.| | | |- `-. | `-. 3 | # ' ' `-' `-`.`-' `-' `-' ' `-' 4 | 5 | __title__ = "requests" 6 | __description__ = "Python HTTP for Humans." 7 | __url__ = "https://requests.readthedocs.io" 8 | __version__ = "2.28.1" 9 | __build__ = 0x022801 10 | __author__ = "Kenneth Reitz" 11 | __author_email__ = "me@kennethreitz.org" 12 | __license__ = "Apache 2.0" 13 | __copyright__ = "Copyright 2022 Kenneth Reitz" 14 | __cake__ = "\u2728 \U0001f370 \u2728" 15 | -------------------------------------------------------------------------------- /lib/requests/certs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | requests.certs 5 | ~~~~~~~~~~~~~~ 6 | 7 | This module returns the preferred default CA certificate bundle. There is 8 | only one — the one from the certifi package. 9 | 10 | If you are packaging Requests, e.g., for a Linux distribution or a managed 11 | environment, you can change the definition of where() to return a separately 12 | packaged CA bundle. 13 | """ 14 | from certifi import where 15 | 16 | if __name__ == "__main__": 17 | print(where()) 18 | -------------------------------------------------------------------------------- /lib/flowlauncher/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file was generated by 'versioneer.py' (0.27) from 3 | # revision-control system data, or from the parent directory name of an 4 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 5 | # of this file. 6 | 7 | import json 8 | 9 | version_json = ''' 10 | { 11 | "date": "2022-11-07T10:04:30+0800", 12 | "dirty": false, 13 | "error": null, 14 | "full-revisionid": "c4f12e915842b03ad720f8480f2c901b5fd91cae", 15 | "version": "0.2.0" 16 | } 17 | ''' # END VERSION_JSON 18 | 19 | 20 | def get_versions(): 21 | return json.loads(version_json) 22 | -------------------------------------------------------------------------------- /lib/urllib3/util/queue.py: -------------------------------------------------------------------------------- 1 | import collections 2 | 3 | from ..packages import six 4 | from ..packages.six.moves import queue 5 | 6 | if six.PY2: 7 | # Queue is imported for side effects on MS Windows. See issue #229. 8 | import Queue as _unused_module_Queue # noqa: F401 9 | 10 | 11 | class LifoQueue(queue.Queue): 12 | def _init(self, _): 13 | self.queue = collections.deque() 14 | 15 | def _qsize(self, len=len): 16 | return len(self.queue) 17 | 18 | def _put(self, item): 19 | self.queue.append(item) 20 | 21 | def _get(self): 22 | return self.queue.pop() 23 | -------------------------------------------------------------------------------- /plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "ID": "df432fe0-9ff7-4ba1-b0fd-5ffd26efbf86", 3 | "ActionKeyword": "ai", 4 | "Name": "ChatGPT", 5 | "Description": "Plugin to use OpenAI's ChatGPT in Flow Launcher", 6 | "Author": "MichielvanBeers", 7 | "Version": "1.3.0", 8 | "Language": "python", 9 | "Website": "https://github.com/MichielvanBeers/Flow.Launcher.Plugin.ChatGPT", 10 | "IcoPath": "Images\\app.png", 11 | "ExecuteFileName": "main.py", 12 | "Settings": { 13 | "api_endpoint": { 14 | "type": "input", 15 | "label": "API Endpoint", 16 | "defaultValue": "https://api.openai.com/v1/chat/completions", 17 | "description": "Custom OpenAI API endpoint" 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /.github/stale.yaml: -------------------------------------------------------------------------------- 1 | name: Close inactive issues 2 | on: 3 | schedule: 4 | - cron: "30 1 * * *" 5 | 6 | jobs: 7 | close-issues: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | issues: write 11 | pull-requests: write 12 | steps: 13 | - uses: actions/stale@v5 14 | with: 15 | days-before-issue-stale: 30 16 | days-before-issue-close: 14 17 | stale-issue-label: "stale" 18 | stale-issue-message: "This issue is stale because it has been open for 30 days with no activity." 19 | close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale." 20 | days-before-pr-stale: -1 21 | days-before-pr-close: -1 22 | repo-token: ${{ secrets.GITHUB_TOKEN }} 23 | -------------------------------------------------------------------------------- /lib/pyperclip/__main__.py: -------------------------------------------------------------------------------- 1 | import pyperclip 2 | import sys 3 | 4 | if len(sys.argv) > 1 and sys.argv[1] in ('-c', '--copy'): 5 | if len(sys.argv) > 2: 6 | pyperclip.copy(sys.argv[2]) 7 | else: 8 | pyperclip.copy(sys.stdin.read()) 9 | elif len(sys.argv) > 1 and sys.argv[1] in ('-p', '--paste'): 10 | sys.stdout.write(pyperclip.paste()) 11 | else: 12 | print('Usage: python -m pyperclip [-c | --copy] [text_to_copy] | [-p | --paste]') 13 | print() 14 | print('If a text_to_copy argument is provided, it is copied to the') 15 | print('clipboard. Otherwise, the stdin stream is copied to the') 16 | print('clipboard. (If reading this in from the keyboard, press') 17 | print('CTRL-Z on Windows or CTRL-D on Linux/macOS to stop.') 18 | print('When pasting, the clipboard will be written to stdout.') -------------------------------------------------------------------------------- /lib/requests/hooks.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests.hooks 3 | ~~~~~~~~~~~~~~ 4 | 5 | This module provides the capabilities for the Requests hooks system. 6 | 7 | Available hooks: 8 | 9 | ``response``: 10 | The response generated from a Request. 11 | """ 12 | HOOKS = ["response"] 13 | 14 | 15 | def default_hooks(): 16 | return {event: [] for event in HOOKS} 17 | 18 | 19 | # TODO: response is the only one 20 | 21 | 22 | def dispatch_hook(key, hooks, hook_data, **kwargs): 23 | """Dispatches a hook dictionary on a given piece of data.""" 24 | hooks = hooks or {} 25 | hooks = hooks.get(key) 26 | if hooks: 27 | if hasattr(hooks, "__call__"): 28 | hooks = [hooks] 29 | for hook in hooks: 30 | _hook_data = hook(hook_data, **kwargs) 31 | if _hook_data is not None: 32 | hook_data = _hook_data 33 | return hook_data 34 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: 'Close stale issues and PRs' 2 | on: 3 | schedule: 4 | - cron: '30 1 * * *' 5 | 6 | jobs: 7 | stale: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/stale@v8 11 | with: 12 | stale-issue-message: 'This issue is stale because it has been open 30 days with no activity. Remove stale label or comment or this will be closed in 5 days.' 13 | stale-pr-message: 'This PR is stale because it has been open 45 days with no activity. Remove stale label or comment or this will be closed in 10 days.' 14 | close-issue-message: 'This issue was closed because it has been stalled for 5 days with no activity.' 15 | close-pr-message: 'This PR was closed because it has been stalled for 10 days with no activity.' 16 | days-before-issue-stale: 30 17 | days-before-pr-stale: 45 18 | days-before-issue-close: 5 19 | days-before-pr-close: 10 20 | -------------------------------------------------------------------------------- /lib/pyperclip-1.8.2.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | pyperclip-1.8.2.dist-info/AUTHORS.txt,sha256=TcQG60YcFnJw6-7oPv21RDXesk9QP_wfvg4dJ6mXESQ,1925 2 | pyperclip-1.8.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 3 | pyperclip-1.8.2.dist-info/LICENSE.txt,sha256=9Xk_TEsoHeHy-Szeso4x5X4tq4As6Wewu9EH7Vu8LjU,1514 4 | pyperclip-1.8.2.dist-info/METADATA,sha256=sGXZURtY3v5qi6u87Gu4oetNsroOGXlMwpqQ-_l_n9o,2374 5 | pyperclip-1.8.2.dist-info/RECORD,, 6 | pyperclip-1.8.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 7 | pyperclip-1.8.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 8 | pyperclip-1.8.2.dist-info/top_level.txt,sha256=leI5OPkUKAOaQl9ATsm3ggu-DA_33DH76xC_nLGPH-I,10 9 | pyperclip/__init__.py,sha256=IqvWttZrSEEZB2wqyRRMeoT7eoIdPbNwrMe3v0M-aJs,26043 10 | pyperclip/__main__.py,sha256=itIOl_l6GNHXx707JROTiNPcvZiU3xuhAk66dk3xlwY,765 11 | pyperclip/__pycache__/__init__.cpython-310.pyc,, 12 | pyperclip/__pycache__/__main__.cpython-310.pyc,, 13 | -------------------------------------------------------------------------------- /lib/idna/__init__.py: -------------------------------------------------------------------------------- 1 | from .package_data import __version__ 2 | from .core import ( 3 | IDNABidiError, 4 | IDNAError, 5 | InvalidCodepoint, 6 | InvalidCodepointContext, 7 | alabel, 8 | check_bidi, 9 | check_hyphen_ok, 10 | check_initial_combiner, 11 | check_label, 12 | check_nfc, 13 | decode, 14 | encode, 15 | ulabel, 16 | uts46_remap, 17 | valid_contextj, 18 | valid_contexto, 19 | valid_label_length, 20 | valid_string_length, 21 | ) 22 | from .intranges import intranges_contain 23 | 24 | __all__ = [ 25 | "IDNABidiError", 26 | "IDNAError", 27 | "InvalidCodepoint", 28 | "InvalidCodepointContext", 29 | "alabel", 30 | "check_bidi", 31 | "check_hyphen_ok", 32 | "check_initial_combiner", 33 | "check_label", 34 | "check_nfc", 35 | "decode", 36 | "encode", 37 | "intranges_contain", 38 | "ulabel", 39 | "uts46_remap", 40 | "valid_contextj", 41 | "valid_contexto", 42 | "valid_label_length", 43 | "valid_string_length", 44 | ] 45 | -------------------------------------------------------------------------------- /lib/certifi-2022.12.7.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | certifi-2022.12.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 2 | certifi-2022.12.7.dist-info/LICENSE,sha256=oC9sY4-fuE0G93ZMOrCF2K9-2luTwWbaVDEkeQd8b7A,1052 3 | certifi-2022.12.7.dist-info/METADATA,sha256=chFpcxKhCPEQ3d8-Vz36zr2Micf1eQhKkFFk7_JvJNo,2911 4 | certifi-2022.12.7.dist-info/RECORD,, 5 | certifi-2022.12.7.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 6 | certifi-2022.12.7.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 7 | certifi/__init__.py,sha256=bK_nm9bLJzNvWZc2oZdiTwg2KWD4HSPBWGaM0zUDvMw,94 8 | certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 9 | certifi/__pycache__/__init__.cpython-310.pyc,, 10 | certifi/__pycache__/__main__.cpython-310.pyc,, 11 | certifi/__pycache__/core.cpython-310.pyc,, 12 | certifi/cacert.pem,sha256=LBHDzgj_xA05AxnHK8ENT5COnGNElNZe0svFUHMf1SQ,275233 13 | certifi/core.py,sha256=lhewz0zFb2b4ULsQurElmloYwQoecjWzPqY67P8T7iM,4219 14 | certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 15 | -------------------------------------------------------------------------------- /lib/requests/packages.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | try: 4 | import chardet 5 | except ImportError: 6 | import warnings 7 | 8 | import charset_normalizer as chardet 9 | 10 | warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer") 11 | 12 | # This code exists for backwards compatibility reasons. 13 | # I don't like it either. Just look the other way. :) 14 | 15 | for package in ("urllib3", "idna"): 16 | locals()[package] = __import__(package) 17 | # This traversal is apparently necessary such that the identities are 18 | # preserved (requests.packages.urllib3.* is urllib3.*) 19 | for mod in list(sys.modules): 20 | if mod == package or mod.startswith(f"{package}."): 21 | sys.modules[f"requests.packages.{mod}"] = sys.modules[mod] 22 | 23 | target = chardet.__name__ 24 | for mod in list(sys.modules): 25 | if mod == target or mod.startswith(f"{target}."): 26 | target = target.replace(target, "chardet") 27 | sys.modules[f"requests.packages.{target}"] = sys.modules[mod] 28 | # Kinda cool, though, right? 29 | -------------------------------------------------------------------------------- /lib/urllib3/contrib/_appengine_environ.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides means to detect the App Engine environment. 3 | """ 4 | 5 | import os 6 | 7 | 8 | def is_appengine(): 9 | return is_local_appengine() or is_prod_appengine() 10 | 11 | 12 | def is_appengine_sandbox(): 13 | """Reports if the app is running in the first generation sandbox. 14 | 15 | The second generation runtimes are technically still in a sandbox, but it 16 | is much less restrictive, so generally you shouldn't need to check for it. 17 | see https://cloud.google.com/appengine/docs/standard/runtimes 18 | """ 19 | return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27" 20 | 21 | 22 | def is_local_appengine(): 23 | return "APPENGINE_RUNTIME" in os.environ and os.environ.get( 24 | "SERVER_SOFTWARE", "" 25 | ).startswith("Development/") 26 | 27 | 28 | def is_prod_appengine(): 29 | return "APPENGINE_RUNTIME" in os.environ and os.environ.get( 30 | "SERVER_SOFTWARE", "" 31 | ).startswith("Google App Engine/") 32 | 33 | 34 | def is_prod_appengine_mvms(): 35 | """Deprecated.""" 36 | return False 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Michiel van Beers 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /lib/certifi-2022.12.7.dist-info/LICENSE: -------------------------------------------------------------------------------- 1 | This package contains a modified version of ca-bundle.crt: 2 | 3 | ca-bundle.crt -- Bundle of CA Root Certificates 4 | 5 | Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# 6 | This is a bundle of X.509 certificates of public Certificate Authorities 7 | (CA). These were automatically extracted from Mozilla's root certificates 8 | file (certdata.txt). This file can be found in the mozilla source tree: 9 | https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt 10 | It contains the certificates in PEM format and therefore 11 | can be directly used with curl / libcurl / php_curl, or with 12 | an Apache+mod_ssl webserver for SSL client authentication. 13 | Just configure this file as the SSLCACertificateFile.# 14 | 15 | ***** BEGIN LICENSE BLOCK ***** 16 | This Source Code Form is subject to the terms of the Mozilla Public License, 17 | v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain 18 | one at http://mozilla.org/MPL/2.0/. 19 | 20 | ***** END LICENSE BLOCK ***** 21 | @(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ 22 | -------------------------------------------------------------------------------- /lib/Flox_lib-0.19.3.dist-info/LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) [year] [fullname] 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /lib/charset_normalizer-2.1.1.dist-info/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 TAHRI Ahmed R. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /lib/flowlauncher-0.2.0.dist-info/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Flow-Launcher 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /lib/flowlauncher-0.2.0.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | flowlauncher-0.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 2 | flowlauncher-0.2.0.dist-info/LICENSE,sha256=ghgUeUeZuF3zrJuPItKQfHORRmO9dfY_TFUUtkarxL4,1070 3 | flowlauncher-0.2.0.dist-info/METADATA,sha256=dm4FVJ6W7D4H_HB-M_0nLIuRcIRWeu6qTgxD0ozFiCs,2882 4 | flowlauncher-0.2.0.dist-info/RECORD,, 5 | flowlauncher-0.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 6 | flowlauncher-0.2.0.dist-info/WHEEL,sha256=ZL1lC_LiPDNRgDnOl2taCMc83aPEUZgHHv2h-LDgdiM,92 7 | flowlauncher-0.2.0.dist-info/top_level.txt,sha256=6w21mpB53RUVYxLbLn9bJusgNZKy0BBdP0HtBTDB6WM,13 8 | flowlauncher/FlowLauncher.py,sha256=_6uLoP7sJYKRvvJXTFSZ3QfIu-Xc_fCs2r0MkgEaOh4,1450 9 | flowlauncher/FlowLauncherAPI.py,sha256=Tq3AS_PyHGAgtS5avAocLKruPESoo70l3x8GO6-gxl4,2267 10 | flowlauncher/__init__.py,sha256=aIG4Da4cmfLfXwX99tc6BUX-yuYtropTV5XSEzCDwJo,307 11 | flowlauncher/__pycache__/FlowLauncher.cpython-310.pyc,, 12 | flowlauncher/__pycache__/FlowLauncherAPI.cpython-310.pyc,, 13 | flowlauncher/__pycache__/__init__.cpython-310.pyc,, 14 | flowlauncher/__pycache__/_version.cpython-310.pyc,, 15 | flowlauncher/_version.py,sha256=N1B1W-NeZ6MiRBIgcAjQ6l7RMePfWtsFi7Qs8ENzGSo,497 16 | -------------------------------------------------------------------------------- /lib/urllib3-1.26.15.dist-info/LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Publish Release 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: [ main ] 7 | paths-ignore: 8 | - .github/workflows/* 9 | 10 | permissions: 11 | contents: write 12 | 13 | jobs: 14 | publish: 15 | runs-on: ubuntu-latest 16 | env: 17 | python_ver: 3.10 18 | steps: 19 | - uses: actions/checkout@v2 20 | - name: Set up Python ${{ matrix.python-version }} 21 | uses: actions/setup-python@v2 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | - name: get version 25 | id: version 26 | uses: notiz-dev/github-action-json-property@release 27 | with: 28 | path: 'plugin.json' 29 | prop_path: 'Version' 30 | - run: echo ${{steps.version.outputs.prop}} 31 | - name: Install dependencies 32 | run: | 33 | python -m pip install --upgrade pip 34 | pip install -r ./requirements.txt -t ./lib 35 | zip -r Flow.Launcher.Plugin.ChatGPT.zip . -x '*.git*' 36 | - name: Publish 37 | if: success() 38 | uses: softprops/action-gh-release@v1 39 | with: 40 | files: 'Flow.Launcher.Plugin.ChatGPT.zip' 41 | tag_name: "v${{steps.version.outputs.prop}}" 42 | env: 43 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 44 | -------------------------------------------------------------------------------- /lib/urllib3/util/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | # For backwards compatibility, provide imports that used to be here. 4 | from .connection import is_connection_dropped 5 | from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers 6 | from .response import is_fp_closed 7 | from .retry import Retry 8 | from .ssl_ import ( 9 | ALPN_PROTOCOLS, 10 | HAS_SNI, 11 | IS_PYOPENSSL, 12 | IS_SECURETRANSPORT, 13 | PROTOCOL_TLS, 14 | SSLContext, 15 | assert_fingerprint, 16 | resolve_cert_reqs, 17 | resolve_ssl_version, 18 | ssl_wrap_socket, 19 | ) 20 | from .timeout import Timeout, current_time 21 | from .url import Url, get_host, parse_url, split_first 22 | from .wait import wait_for_read, wait_for_write 23 | 24 | __all__ = ( 25 | "HAS_SNI", 26 | "IS_PYOPENSSL", 27 | "IS_SECURETRANSPORT", 28 | "SSLContext", 29 | "PROTOCOL_TLS", 30 | "ALPN_PROTOCOLS", 31 | "Retry", 32 | "Timeout", 33 | "Url", 34 | "assert_fingerprint", 35 | "current_time", 36 | "is_connection_dropped", 37 | "is_fp_closed", 38 | "get_host", 39 | "parse_url", 40 | "make_headers", 41 | "resolve_cert_reqs", 42 | "resolve_ssl_version", 43 | "split_first", 44 | "ssl_wrap_socket", 45 | "wait_for_read", 46 | "wait_for_write", 47 | "SKIP_HEADER", 48 | "SKIPPABLE_HEADERS", 49 | ) 50 | -------------------------------------------------------------------------------- /lib/idna-3.4.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | idna-3.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 2 | idna-3.4.dist-info/LICENSE.md,sha256=otbk2UC9JNvnuWRc3hmpeSzFHbeuDVrNMBrIYMqj6DY,1523 3 | idna-3.4.dist-info/METADATA,sha256=8aLSf9MFS7oB26pZh2hprg7eJp0UJSc-3rpf_evp4DA,9830 4 | idna-3.4.dist-info/RECORD,, 5 | idna-3.4.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81 6 | idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 7 | idna/__pycache__/__init__.cpython-310.pyc,, 8 | idna/__pycache__/codec.cpython-310.pyc,, 9 | idna/__pycache__/compat.cpython-310.pyc,, 10 | idna/__pycache__/core.cpython-310.pyc,, 11 | idna/__pycache__/idnadata.cpython-310.pyc,, 12 | idna/__pycache__/intranges.cpython-310.pyc,, 13 | idna/__pycache__/package_data.cpython-310.pyc,, 14 | idna/__pycache__/uts46data.cpython-310.pyc,, 15 | idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374 16 | idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 17 | idna/core.py,sha256=1JxchwKzkxBSn7R_oCE12oBu3eVux0VzdxolmIad24M,12950 18 | idna/idnadata.py,sha256=xUjqKqiJV8Ho_XzBpAtv5JFoVPSupK-SUXvtjygUHqw,44375 19 | idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 20 | idna/package_data.py,sha256=C_jHJzmX8PI4xq0jpzmcTMxpb5lDsq4o5VyxQzlVrZE,21 21 | idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 22 | idna/uts46data.py,sha256=zvjZU24s58_uAS850Mcd0NnD0X7_gCMAMjzWNIeUJdc,206539 23 | -------------------------------------------------------------------------------- /lib/flox/settings.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | import json 3 | 4 | class Settings(dict): 5 | 6 | def __init__(self, filepath): 7 | super(Settings, self).__init__() 8 | self._filepath = filepath 9 | self._save = True 10 | if Path(self._filepath).exists(): 11 | self._load() 12 | else: 13 | data = {} 14 | self.update(data) 15 | self.save() 16 | 17 | 18 | def _load(self): 19 | data = {} 20 | with open(self._filepath, 'r') as f: 21 | try: 22 | data.update(json.load(f)) 23 | except json.decoder.JSONDecodeError: 24 | pass 25 | 26 | self._save = False 27 | self.update(data) 28 | self._save = True 29 | 30 | def save(self): 31 | if self._save: 32 | data = {} 33 | data.update(self) 34 | with open(self._filepath, 'w') as f: 35 | json.dump(data, f, sort_keys=True, indent=4) 36 | return 37 | 38 | def __setitem__(self, key, value): 39 | super(Settings, self).__setitem__(key, value) 40 | self.save() 41 | 42 | def __delitem__(self, key): 43 | super(Settings, self).__delitem__(key) 44 | self.save() 45 | 46 | def update(self, *args, **kwargs): 47 | super(Settings, self).update(*args, **kwargs) 48 | self.save() 49 | 50 | def setdefault(self, key, value=None): 51 | ret = super(Settings, self).setdefault(key, value) 52 | self.save() 53 | return ret 54 | -------------------------------------------------------------------------------- /lib/pyperclip-1.8.2.dist-info/LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014, Al Sweigart 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | * Neither the name of the {organization} nor the names of its 15 | contributors may be used to endorse or promote products derived from 16 | this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /lib/requests/_internal_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests._internal_utils 3 | ~~~~~~~~~~~~~~ 4 | 5 | Provides utility functions that are consumed internally by Requests 6 | which depend on extremely few external helpers (such as compat) 7 | """ 8 | import re 9 | 10 | from .compat import builtin_str 11 | 12 | _VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$") 13 | _VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$") 14 | _VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") 15 | _VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") 16 | 17 | HEADER_VALIDATORS = { 18 | bytes: (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE), 19 | str: (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR), 20 | } 21 | 22 | 23 | def to_native_string(string, encoding="ascii"): 24 | """Given a string object, regardless of type, returns a representation of 25 | that string in the native string type, encoding and decoding where 26 | necessary. This assumes ASCII unless told otherwise. 27 | """ 28 | if isinstance(string, builtin_str): 29 | out = string 30 | else: 31 | out = string.decode(encoding) 32 | 33 | return out 34 | 35 | 36 | def unicode_is_ascii(u_string): 37 | """Determine if unicode string only contains ASCII characters. 38 | 39 | :param str u_string: unicode string to check. Must be unicode 40 | and not Python 2 `str`. 41 | :rtype: bool 42 | """ 43 | assert isinstance(u_string, str) 44 | try: 45 | u_string.encode("ascii") 46 | return True 47 | except UnicodeEncodeError: 48 | return False 49 | -------------------------------------------------------------------------------- /lib/idna-3.4.dist-info/LICENSE.md: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2013-2021, Kim Davies 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /lib/Flox_lib-0.19.3.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | Flox_lib-0.19.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 2 | Flox_lib-0.19.3.dist-info/LICENSE.txt,sha256=ACwmltkrXIz5VsEQcrqljq-fat6ZXAMepjXGoe40KtE,1069 3 | Flox_lib-0.19.3.dist-info/METADATA,sha256=pkzy1KjfOUhN9yuNkYRZgJx-4ZJybSRXXOJ_KRydQ1I,1565 4 | Flox_lib-0.19.3.dist-info/RECORD,, 5 | Flox_lib-0.19.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 6 | Flox_lib-0.19.3.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 7 | Flox_lib-0.19.3.dist-info/top_level.txt,sha256=ToVvesHvA4pegruUxqdJ2gkmKACaQRSWPIKAoqyR9_0,5 8 | Flox_lib-0.19.3.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 9 | flox/__init__.py,sha256=_vDPnKHQJfofQZMr4c0QPpnBhQplQ3ZfAb5EsOOhOQM,11328 10 | flox/__pycache__/__init__.cpython-310.pyc,, 11 | flox/__pycache__/browser.cpython-310.pyc,, 12 | flox/__pycache__/clipboard.cpython-310.pyc,, 13 | flox/__pycache__/launcher.cpython-310.pyc,, 14 | flox/__pycache__/settings.cpython-310.pyc,, 15 | flox/__pycache__/string_matcher.cpython-310.pyc,, 16 | flox/__pycache__/utils.cpython-310.pyc,, 17 | flox/browser.py,sha256=olZ3cbG1bzXc97AK0M60CNE6JLMkJhnHdOWpnFrdk_s,2732 18 | flox/clipboard.py,sha256=OKkS8guLa6D1OjnQXdvh_U8cRTrGijWJBepb99mhHtk,2319 19 | flox/launcher.py,sha256=E9je22w2ntnCr45ARe7_5pfGhNCsA5Sx5WvyZeSV8UY,4457 20 | flox/settings.py,sha256=Vi67rPTrp-97vYkdCiVObiLc8m_Tg24i1emn2aahJSI,1351 21 | flox/string_matcher.py,sha256=2WQ3HBX4HMdPKnZr6eVNYML6kOlwjwHlhL59OkIywM4,7877 22 | flox/utils.py,sha256=X5-hQz80UXX94XxvshB7vVHSb4MAoZChoZ29vjgdFE0,3943 23 | flox/version,sha256=bxXCv71PtYt7hPCo-ANvTW8xEs0_DLdZ3_6GzdaWtlA,7 24 | -------------------------------------------------------------------------------- /lib/urllib3/packages/backports/makefile.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | backports.makefile 4 | ~~~~~~~~~~~~~~~~~~ 5 | 6 | Backports the Python 3 ``socket.makefile`` method for use with anything that 7 | wants to create a "fake" socket object. 8 | """ 9 | import io 10 | from socket import SocketIO 11 | 12 | 13 | def backport_makefile( 14 | self, mode="r", buffering=None, encoding=None, errors=None, newline=None 15 | ): 16 | """ 17 | Backport of ``socket.makefile`` from Python 3.5. 18 | """ 19 | if not set(mode) <= {"r", "w", "b"}: 20 | raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) 21 | writing = "w" in mode 22 | reading = "r" in mode or not writing 23 | assert reading or writing 24 | binary = "b" in mode 25 | rawmode = "" 26 | if reading: 27 | rawmode += "r" 28 | if writing: 29 | rawmode += "w" 30 | raw = SocketIO(self, rawmode) 31 | self._makefile_refs += 1 32 | if buffering is None: 33 | buffering = -1 34 | if buffering < 0: 35 | buffering = io.DEFAULT_BUFFER_SIZE 36 | if buffering == 0: 37 | if not binary: 38 | raise ValueError("unbuffered streams must be binary") 39 | return raw 40 | if reading and writing: 41 | buffer = io.BufferedRWPair(raw, raw, buffering) 42 | elif reading: 43 | buffer = io.BufferedReader(raw, buffering) 44 | else: 45 | assert writing 46 | buffer = io.BufferedWriter(raw, buffering) 47 | if binary: 48 | return buffer 49 | text = io.TextIOWrapper(buffer, encoding, errors, newline) 50 | text.mode = mode 51 | return text 52 | -------------------------------------------------------------------------------- /lib/flowlauncher/FlowLauncher.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | import inspect 5 | import sys 6 | from json import loads, dumps 7 | 8 | 9 | class FlowLauncher: 10 | """ 11 | Flow.Launcher python plugin base 12 | """ 13 | 14 | def __init__(self): 15 | 16 | # defalut jsonrpc 17 | self.rpc_request = {'method': 'query', 'parameters': ['']} 18 | self.debugMessage = "" 19 | 20 | if len(sys.argv) > 1: 21 | 22 | # Gets JSON-RPC from Flow Launcher process. 23 | self.rpc_request = loads(sys.argv[1]) 24 | 25 | # proxy is not working now 26 | # self.proxy = self.rpc_request.get("proxy", {}) 27 | 28 | request_method_name = self.rpc_request.get("method", "query") 29 | request_parameters = self.rpc_request.get("parameters", []) 30 | 31 | methods = inspect.getmembers(self, predicate=inspect.ismethod) 32 | request_method = dict(methods)[request_method_name] 33 | results = request_method(*request_parameters) 34 | 35 | if request_method_name in ("query", "context_menu"): 36 | print(dumps({ 37 | "result": results, 38 | "debugMessage": self.debugMessage 39 | })) 40 | 41 | def query(self, param: str = '') -> list: 42 | """ 43 | sub class need to override this method 44 | """ 45 | return [] 46 | 47 | def context_menu(self, data) -> list: 48 | """ 49 | optional context menu entries for a result 50 | """ 51 | return [] 52 | 53 | def debug(self, msg: str): 54 | """ 55 | alert msg 56 | """ 57 | self.debugMessage = msg 58 | -------------------------------------------------------------------------------- /SettingsTemplate.yaml: -------------------------------------------------------------------------------- 1 | body: 2 | - type: passwordBox 3 | attributes: 4 | name: api_key 5 | label: "API Key:" 6 | defaultValue: "" 7 | description: OpenAI API Key 8 | - type: dropdown 9 | attributes: 10 | name: model 11 | label: "Model:" 12 | defaultValue: gpt-3.5-turbo 13 | options: 14 | - gpt-3.5-turbo 15 | - gpt-4 16 | - gpt-4-0125-preview 17 | - gpt-4-turbo-preview 18 | - gpt-4-1106-preview 19 | - gpt-4-0613 20 | - gpt-4o 21 | - gpt-4o-mini 22 | - type: input 23 | attributes: 24 | name: prompt_stop 25 | label: "Prompt stop:" 26 | defaultValue: "||" 27 | description: Characters to indicate end of prompt 28 | - type: input 29 | attributes: 30 | name: default_prompt 31 | label: "Default system prompt:" 32 | defaultValue: "normal" 33 | description: Default key word that will be looked up in the system_messages.csv file 34 | - type: checkbox 35 | attributes: 36 | name: save_conversation 37 | label: 'Save conversation:' 38 | defaultValue: "false" 39 | description: Check to save the conversations for each prompt type in a .txt file in the plugin folder 40 | - type: dropdown 41 | attributes: 42 | name: log_level 43 | label: "Log Level:" 44 | defaultValue: error 45 | options: 46 | - info 47 | - debug 48 | - warning 49 | - error 50 | - critical 51 | - type: input 52 | attributes: 53 | name: api_endpoint 54 | label: "API Endpoint:" 55 | defaultValue: "https://api.openai.com/v1/chat/completions" 56 | description: Custom OpenAI API endpoint 57 | -------------------------------------------------------------------------------- /lib/requests/compat.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests.compat 3 | ~~~~~~~~~~~~~~~ 4 | 5 | This module previously handled import compatibility issues 6 | between Python 2 and Python 3. It remains for backwards 7 | compatibility until the next major version. 8 | """ 9 | 10 | try: 11 | import chardet 12 | except ImportError: 13 | import charset_normalizer as chardet 14 | 15 | import sys 16 | 17 | # ------- 18 | # Pythons 19 | # ------- 20 | 21 | # Syntax sugar. 22 | _ver = sys.version_info 23 | 24 | #: Python 2.x? 25 | is_py2 = _ver[0] == 2 26 | 27 | #: Python 3.x? 28 | is_py3 = _ver[0] == 3 29 | 30 | # json/simplejson module import resolution 31 | has_simplejson = False 32 | try: 33 | import simplejson as json 34 | 35 | has_simplejson = True 36 | except ImportError: 37 | import json 38 | 39 | if has_simplejson: 40 | from simplejson import JSONDecodeError 41 | else: 42 | from json import JSONDecodeError 43 | 44 | # Keep OrderedDict for backwards compatibility. 45 | from collections import OrderedDict 46 | from collections.abc import Callable, Mapping, MutableMapping 47 | from http import cookiejar as cookielib 48 | from http.cookies import Morsel 49 | from io import StringIO 50 | 51 | # -------------- 52 | # Legacy Imports 53 | # -------------- 54 | from urllib.parse import ( 55 | quote, 56 | quote_plus, 57 | unquote, 58 | unquote_plus, 59 | urldefrag, 60 | urlencode, 61 | urljoin, 62 | urlparse, 63 | urlsplit, 64 | urlunparse, 65 | ) 66 | from urllib.request import ( 67 | getproxies, 68 | getproxies_environment, 69 | parse_http_list, 70 | proxy_bypass, 71 | proxy_bypass_environment, 72 | ) 73 | 74 | builtin_str = str 75 | str = str 76 | bytes = bytes 77 | basestring = (str, bytes) 78 | numeric_types = (int, float) 79 | integer_types = (int,) 80 | -------------------------------------------------------------------------------- /lib/urllib3/util/proxy.py: -------------------------------------------------------------------------------- 1 | from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version 2 | 3 | 4 | def connection_requires_http_tunnel( 5 | proxy_url=None, proxy_config=None, destination_scheme=None 6 | ): 7 | """ 8 | Returns True if the connection requires an HTTP CONNECT through the proxy. 9 | 10 | :param URL proxy_url: 11 | URL of the proxy. 12 | :param ProxyConfig proxy_config: 13 | Proxy configuration from poolmanager.py 14 | :param str destination_scheme: 15 | The scheme of the destination. (i.e https, http, etc) 16 | """ 17 | # If we're not using a proxy, no way to use a tunnel. 18 | if proxy_url is None: 19 | return False 20 | 21 | # HTTP destinations never require tunneling, we always forward. 22 | if destination_scheme == "http": 23 | return False 24 | 25 | # Support for forwarding with HTTPS proxies and HTTPS destinations. 26 | if ( 27 | proxy_url.scheme == "https" 28 | and proxy_config 29 | and proxy_config.use_forwarding_for_https 30 | ): 31 | return False 32 | 33 | # Otherwise always use a tunnel. 34 | return True 35 | 36 | 37 | def create_proxy_ssl_context( 38 | ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None 39 | ): 40 | """ 41 | Generates a default proxy ssl context if one hasn't been provided by the 42 | user. 43 | """ 44 | ssl_context = create_urllib3_context( 45 | ssl_version=resolve_ssl_version(ssl_version), 46 | cert_reqs=resolve_cert_reqs(cert_reqs), 47 | ) 48 | 49 | if ( 50 | not ca_certs 51 | and not ca_cert_dir 52 | and not ca_cert_data 53 | and hasattr(ssl_context, "load_default_certs") 54 | ): 55 | ssl_context.load_default_certs() 56 | 57 | return ssl_context 58 | -------------------------------------------------------------------------------- /lib/Flox_lib-0.19.3.dist-info/METADATA: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: Flox-lib 3 | Version: 0.19.3 4 | Summary: Python library to help build Flow Launcher and Wox plugins. 5 | Home-page: https://github.com/Garulf/Flox 6 | Author: William McAllister 7 | Author-email: dev.garulf@gmail.com 8 | License: MIT 9 | Project-URL: Bug Tracker, https://github.com/Garulf/Flox/issues 10 | Classifier: Programming Language :: Python :: 3 11 | Classifier: License :: OSI Approved :: MIT License 12 | Classifier: Operating System :: Microsoft :: Windows 13 | Description-Content-Type: text/markdown 14 | License-File: LICENSE.txt 15 | 16 | [![Release](https://github.com/Garulf/Flox/actions/workflows/release.yml/badge.svg?branch=main)](https://github.com/Garulf/Flox/actions/workflows/release.yml) 17 | # FLOX 18 | 19 | Flox is a Python library to help build Flow Launcher and Wox plugins 20 | 21 | Flox adds many useful methods to speed up plugin development 22 | 23 | Heavily inspired from the great work done by deanishe at: [deanishe/alfred-workflow](https://github.com/deanishe/alfred-workflow) 24 | 25 | ## Installation 26 | 27 | 28 | ### PIP install from pypi 29 | 30 | ``` 31 | pip install flox-lib 32 | ``` 33 | 34 | ### PIP install from github 35 | 36 | ``` 37 | pip install git+https://github.com/garulf/flox.git 38 | ``` 39 | 40 | ## Basic Usage 41 | 42 | ``` 43 | from flox import Flox 44 | 45 | import requests 46 | 47 | # have your class inherit from Flox 48 | class YourClass(Flox): 49 | 50 | def query(self, query): 51 | for _ in range(250): 52 | self.add_item( 53 | title=self.args, 54 | subtitle=str(_) 55 | ) 56 | 57 | def context_menu(self, data): 58 | self.add_item( 59 | title=data, 60 | subtitle=data 61 | ) 62 | 63 | if __name__ == "__main__": 64 | YourClass() 65 | ``` 66 | -------------------------------------------------------------------------------- /lib/charset_normalizer/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Charset-Normalizer 4 | ~~~~~~~~~~~~~~ 5 | The Real First Universal Charset Detector. 6 | A library that helps you read text from an unknown charset encoding. 7 | Motivated by chardet, This package is trying to resolve the issue by taking a new approach. 8 | All IANA character set names for which the Python core library provides codecs are supported. 9 | 10 | Basic usage: 11 | >>> from charset_normalizer import from_bytes 12 | >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) 13 | >>> best_guess = results.best() 14 | >>> str(best_guess) 15 | 'Bсеки човек има право на образование. Oбразованието!' 16 | 17 | Others methods and usages are available - see the full documentation 18 | at . 19 | :copyright: (c) 2021 by Ahmed TAHRI 20 | :license: MIT, see LICENSE for more details. 21 | """ 22 | import logging 23 | 24 | from .api import from_bytes, from_fp, from_path, normalize 25 | from .legacy import ( 26 | CharsetDetector, 27 | CharsetDoctor, 28 | CharsetNormalizerMatch, 29 | CharsetNormalizerMatches, 30 | detect, 31 | ) 32 | from .models import CharsetMatch, CharsetMatches 33 | from .utils import set_logging_handler 34 | from .version import VERSION, __version__ 35 | 36 | __all__ = ( 37 | "from_fp", 38 | "from_path", 39 | "from_bytes", 40 | "normalize", 41 | "detect", 42 | "CharsetMatch", 43 | "CharsetMatches", 44 | "CharsetNormalizerMatch", 45 | "CharsetNormalizerMatches", 46 | "CharsetDetector", 47 | "CharsetDoctor", 48 | "__version__", 49 | "VERSION", 50 | "set_logging_handler", 51 | ) 52 | 53 | # Attach a NullHandler to the top level logger by default 54 | # https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library 55 | 56 | logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) 57 | -------------------------------------------------------------------------------- /lib/pyperclip-1.8.2.dist-info/AUTHORS.txt: -------------------------------------------------------------------------------- 1 | Here is an inevitably incomplete list of MUCH-APPRECIATED CONTRIBUTORS -- 2 | people who have submitted patches, reported bugs, added translations, helped 3 | answer newbie questions, and generally made Pyperclip that much better: 4 | 5 | Al Sweigart 6 | Alexander Cobleigh ‏@cblgh 7 | Andrea Scarpino https://github.com/ilpianista 8 | Aniket Pandey https://github.com/lordaniket06 9 | Anton Yakutovich https://github.com/drakulavich 10 | Brian Levin https://github.com/bnice5000 11 | Carvell Scott https://github.com/CarvellScott 12 | Cees Timmerman https://github.com/CTimmerman 13 | Chris Clark 14 | Christopher Lambert https://github.com/XN137 15 | Chris Woerz https://github.com/erendrake 16 | Edd Barrett https://github.com/vext01 17 | Eugene Yang https://github.com/eugene-yang 18 | Felix Yan https://github.com/felixonmars 19 | fthoma https://github.com/fthoma 20 | Greg Witt https://github.com/GoodGuyGregory 21 | hinlader https://github.com/hinlader 22 | Hugo https://github.com/hugovk 23 | Hynek Cernoch https://github.com/hynekcer 24 | Jason R. Coombs https://github.com/jaraco 25 | Jon Crall https://github.com/Erotemic 26 | Jonathan Slenders https://github.com/jonathanslenders 27 | JustAShoeMaker https://github.com/JustAShoeMaker 28 | Marcelo Glezer https://github.com/gato 29 | Maximilian Hils https://github.com/mhils 30 | Michał Górny https://github.com/mgorny 31 | Nikolaos-Digenis Karagiannis https://github.com/Digenis 32 | Nils Ohlmeier https://github.com/nils-ohlmeier 33 | Orson Peters https://github.com/orlp 34 | pgajdos https://github.com/pgajdos 35 | Six https://github.com/brbsix 36 | Stefan Devai https://github.com/stefandevai 37 | Stefan Scherfke https://github.com/sscherfke 38 | Steve Elam 39 | Tamir Bahar https://github.com/tmr232 40 | Terrel Shumway https://github.com/lernisto 41 | Tim Cuthbertson https://github.com/timbertson 42 | Todd Leonhardt https://github.com/tleonhardt 43 | Troy Sankey https://github.com/pwnage101 44 | utagawa kiki https://github.com/utgwkk 45 | Vince West https://github.com/dvincentwest 46 | ZEDGR https://github.com/ZEDGR 47 | -------------------------------------------------------------------------------- /lib/idna/intranges.py: -------------------------------------------------------------------------------- 1 | """ 2 | Given a list of integers, made up of (hopefully) a small number of long runs 3 | of consecutive integers, compute a representation of the form 4 | ((start1, end1), (start2, end2) ...). Then answer the question "was x present 5 | in the original list?" in time O(log(# runs)). 6 | """ 7 | 8 | import bisect 9 | from typing import List, Tuple 10 | 11 | def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: 12 | """Represent a list of integers as a sequence of ranges: 13 | ((start_0, end_0), (start_1, end_1), ...), such that the original 14 | integers are exactly those x such that start_i <= x < end_i for some i. 15 | 16 | Ranges are encoded as single integers (start << 32 | end), not as tuples. 17 | """ 18 | 19 | sorted_list = sorted(list_) 20 | ranges = [] 21 | last_write = -1 22 | for i in range(len(sorted_list)): 23 | if i+1 < len(sorted_list): 24 | if sorted_list[i] == sorted_list[i+1]-1: 25 | continue 26 | current_range = sorted_list[last_write+1:i+1] 27 | ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) 28 | last_write = i 29 | 30 | return tuple(ranges) 31 | 32 | def _encode_range(start: int, end: int) -> int: 33 | return (start << 32) | end 34 | 35 | def _decode_range(r: int) -> Tuple[int, int]: 36 | return (r >> 32), (r & ((1 << 32) - 1)) 37 | 38 | 39 | def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: 40 | """Determine if `int_` falls into one of the ranges in `ranges`.""" 41 | tuple_ = _encode_range(int_, 0) 42 | pos = bisect.bisect_left(ranges, tuple_) 43 | # we could be immediately ahead of a tuple (start, end) 44 | # with start < int_ <= end 45 | if pos > 0: 46 | left, right = _decode_range(ranges[pos-1]) 47 | if left <= int_ < right: 48 | return True 49 | # or we could be immediately behind a tuple (int_, end) 50 | if pos < len(ranges): 51 | left, _ = _decode_range(ranges[pos]) 52 | if left == int_: 53 | return True 54 | return False 55 | -------------------------------------------------------------------------------- /lib/pyperclip-1.8.2.dist-info/METADATA: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: pyperclip 3 | Version: 1.8.2 4 | Summary: A cross-platform clipboard module for Python. (Only handles plain text for now.) 5 | Home-page: https://github.com/asweigart/pyperclip 6 | Author: Al Sweigart 7 | Author-email: al@inventwithpython.com 8 | License: BSD 9 | Keywords: clipboard copy paste clip xsel xclip 10 | Platform: UNKNOWN 11 | Classifier: Development Status :: 5 - Production/Stable 12 | Classifier: Environment :: Win32 (MS Windows) 13 | Classifier: Environment :: X11 Applications 14 | Classifier: Environment :: MacOS X 15 | Classifier: Intended Audience :: Developers 16 | Classifier: License :: OSI Approved :: BSD License 17 | Classifier: Operating System :: OS Independent 18 | Classifier: Programming Language :: Python 19 | Classifier: Programming Language :: Python :: 2 20 | Classifier: Programming Language :: Python :: 2.6 21 | Classifier: Programming Language :: Python :: 2.7 22 | Classifier: Programming Language :: Python :: 3 23 | Classifier: Programming Language :: Python :: 3.1 24 | Classifier: Programming Language :: Python :: 3.2 25 | Classifier: Programming Language :: Python :: 3.3 26 | Classifier: Programming Language :: Python :: 3.4 27 | Classifier: Programming Language :: Python :: 3.5 28 | Classifier: Programming Language :: Python :: 3.6 29 | Classifier: Programming Language :: Python :: 3.7 30 | Classifier: Programming Language :: Python :: 3.8 31 | Classifier: Programming Language :: Python :: 3.9 32 | License-File: LICENSE.txt 33 | License-File: AUTHORS.txt 34 | 35 | Pyperclip is a cross-platform Python module for copy and paste clipboard functions. It works with Python 2 and 3. 36 | 37 | Install on Windows: `pip install pyperclip` 38 | 39 | Install on Linux/macOS: `pip3 install pyperclip` 40 | 41 | Al Sweigart al@inventwithpython.com 42 | BSD License 43 | 44 | Example Usage 45 | ============= 46 | 47 | >>> import pyperclip 48 | >>> pyperclip.copy('The text to be copied to the clipboard.') 49 | >>> pyperclip.paste() 50 | 'The text to be copied to the clipboard.' 51 | 52 | 53 | Currently only handles plaintext. 54 | 55 | On Windows, no additional modules are needed. 56 | 57 | On Mac, this module makes use of the pbcopy and pbpaste commands, which should come with the os. 58 | 59 | On Linux, this module makes use of the xclip or xsel commands, which should come with the os. Otherwise run "sudo apt-get install xclip" or "sudo apt-get install xsel" (Note: xsel does not always seem to work.) 60 | 61 | Otherwise on Linux, you will need the gtk or PyQt4 modules installed. 62 | 63 | 64 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | wheels/ 22 | pip-wheel-metadata/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | db.sqlite3 61 | db.sqlite3-journal 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # IPython 80 | profile_default/ 81 | ipython_config.py 82 | 83 | # pyenv 84 | .python-version 85 | 86 | # pipenv 87 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 88 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 89 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 90 | # install all needed dependencies. 91 | #Pipfile.lock 92 | 93 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 94 | __pypackages__/ 95 | 96 | # Celery stuff 97 | celerybeat-schedule 98 | celerybeat.pid 99 | 100 | # SageMath parsed files 101 | *.sage.py 102 | 103 | # Environments 104 | .env 105 | .venv 106 | env/ 107 | venv/ 108 | ENV/ 109 | env.bak/ 110 | venv.bak/ 111 | 112 | # Spyder project settings 113 | .spyderproject 114 | .spyproject 115 | 116 | # Rope project settings 117 | .ropeproject 118 | 119 | # mkdocs documentation 120 | /site 121 | 122 | # mypy 123 | .mypy_cache/ 124 | .dmypy.json 125 | dmypy.json 126 | 127 | # Pyre type checker 128 | .pyre/ 129 | 130 | # Config and temp plugin files 131 | system_messages.csv 132 | temp_text.txt 133 | Conversations *.txt -------------------------------------------------------------------------------- /lib/charset_normalizer-2.1.1.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | ../../bin/normalizer.exe,sha256=KTaILy0cIi1WSYW_NvL6vOFym9P3BBVnFqk4l4QU7mc,108457 2 | charset_normalizer-2.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 3 | charset_normalizer-2.1.1.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 4 | charset_normalizer-2.1.1.dist-info/METADATA,sha256=C99l12g4d1E9_UiW-mqPCWx7v2M_lYGWxy1GTOjXSsA,11942 5 | charset_normalizer-2.1.1.dist-info/RECORD,, 6 | charset_normalizer-2.1.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 7 | charset_normalizer-2.1.1.dist-info/entry_points.txt,sha256=uYo8aIGLWv8YgWfSna5HnfY_En4pkF1w4bgawNAXzP0,76 8 | charset_normalizer-2.1.1.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 9 | charset_normalizer/__init__.py,sha256=jGhhf1IcOgCpZsr593E9fPvjWKnflVqHe_LwkOJjInU,1790 10 | charset_normalizer/__pycache__/__init__.cpython-310.pyc,, 11 | charset_normalizer/__pycache__/api.cpython-310.pyc,, 12 | charset_normalizer/__pycache__/cd.cpython-310.pyc,, 13 | charset_normalizer/__pycache__/constant.cpython-310.pyc,, 14 | charset_normalizer/__pycache__/legacy.cpython-310.pyc,, 15 | charset_normalizer/__pycache__/md.cpython-310.pyc,, 16 | charset_normalizer/__pycache__/models.cpython-310.pyc,, 17 | charset_normalizer/__pycache__/utils.cpython-310.pyc,, 18 | charset_normalizer/__pycache__/version.cpython-310.pyc,, 19 | charset_normalizer/api.py,sha256=euVPmjAMbjpqhEHPjfKtyy1mK52U0TOUBUQgM_Qy6eE,19191 20 | charset_normalizer/assets/__init__.py,sha256=r7aakPaRIc2FFG2mw2V8NOTvkl25_euKZ3wPf5SAVa4,15222 21 | charset_normalizer/assets/__pycache__/__init__.cpython-310.pyc,, 22 | charset_normalizer/cd.py,sha256=Pxdkbn4cy0iZF42KTb1FiWIqqKobuz_fDjGwc6JMNBc,10811 23 | charset_normalizer/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 24 | charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc,, 25 | charset_normalizer/cli/__pycache__/normalizer.cpython-310.pyc,, 26 | charset_normalizer/cli/normalizer.py,sha256=FmD1RXeMpRBg_mjR0MaJhNUpM2qZ8wz2neAE7AayBeg,9521 27 | charset_normalizer/constant.py,sha256=NgU-pY8JH2a9lkVT8oKwAFmIUYNKOuSBwZgF9MrlNCM,19157 28 | charset_normalizer/legacy.py,sha256=XKeZOts_HdYQU_Jb3C9ZfOjY2CiUL132k9_nXer8gig,3384 29 | charset_normalizer/md.py,sha256=pZP8IVpSC82D8INA9Tf_y0ijJSRI-UIncZvLdfTWEd4,17642 30 | charset_normalizer/models.py,sha256=i68YdlSLTEI3EEBVXq8TLNAbyyjrLC2OWszc-OBAk9I,13167 31 | charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 32 | charset_normalizer/utils.py,sha256=ykOznhcAeH-ODLBWJuI7t1nbwa1SAfN_bDYTCJGyh4U,11771 33 | charset_normalizer/version.py,sha256=_eh2MA3qS__IajlePQxKBmlw6zaBDvPYlLdEgxgIojw,79 34 | -------------------------------------------------------------------------------- /lib/flox/clipboard.py: -------------------------------------------------------------------------------- 1 | import ctypes 2 | 3 | from ctypes.wintypes import BOOL, HWND, HANDLE, HGLOBAL, UINT, LPVOID 4 | from ctypes import c_size_t as SIZE_T 5 | 6 | # Credit for code goes to Mark Ransom at https://stackoverflow.com/a/25678113 7 | 8 | OpenClipboard = ctypes.windll.user32.OpenClipboard 9 | OpenClipboard.argtypes = HWND, 10 | OpenClipboard.restype = BOOL 11 | EmptyClipboard = ctypes.windll.user32.EmptyClipboard 12 | EmptyClipboard.restype = BOOL 13 | GetClipboardData = ctypes.windll.user32.GetClipboardData 14 | GetClipboardData.argtypes = UINT, 15 | GetClipboardData.restype = HANDLE 16 | SetClipboardData = ctypes.windll.user32.SetClipboardData 17 | SetClipboardData.argtypes = UINT, HANDLE 18 | SetClipboardData.restype = HANDLE 19 | CloseClipboard = ctypes.windll.user32.CloseClipboard 20 | CloseClipboard.restype = BOOL 21 | CF_UNICODETEXT = 13 22 | 23 | GlobalAlloc = ctypes.windll.kernel32.GlobalAlloc 24 | GlobalAlloc.argtypes = UINT, SIZE_T 25 | GlobalAlloc.restype = HGLOBAL 26 | GlobalLock = ctypes.windll.kernel32.GlobalLock 27 | GlobalLock.argtypes = HGLOBAL, 28 | GlobalLock.restype = LPVOID 29 | GlobalUnlock = ctypes.windll.kernel32.GlobalUnlock 30 | GlobalUnlock.argtypes = HGLOBAL, 31 | GlobalSize = ctypes.windll.kernel32.GlobalSize 32 | GlobalSize.argtypes = HGLOBAL, 33 | GlobalSize.restype = SIZE_T 34 | 35 | GMEM_MOVEABLE = 0x0002 36 | GMEM_ZEROINIT = 0x0040 37 | 38 | unicode_type = type(u'') 39 | 40 | class Clipboard(object): 41 | 42 | def get(self): 43 | return get() 44 | 45 | def put(self, text): 46 | return put(text) 47 | 48 | def get(): 49 | """ 50 | Get the contents of the clipboard. 51 | """ 52 | text = None 53 | OpenClipboard(None) 54 | handle = GetClipboardData(CF_UNICODETEXT) 55 | pcontents = GlobalLock(handle) 56 | size = GlobalSize(handle) 57 | if pcontents and size: 58 | raw_data = ctypes.create_string_buffer(size) 59 | ctypes.memmove(raw_data, pcontents, size) 60 | text = raw_data.raw.decode('utf-16le').rstrip(u'\0') 61 | GlobalUnlock(handle) 62 | CloseClipboard() 63 | return text 64 | 65 | def put(s): 66 | """ 67 | Put the given string onto the clipboard. 68 | """ 69 | if not isinstance(s, unicode_type): 70 | s = s.decode('mbcs') 71 | data = s.encode('utf-16le') 72 | OpenClipboard(None) 73 | EmptyClipboard() 74 | handle = GlobalAlloc(GMEM_MOVEABLE | GMEM_ZEROINIT, len(data) + 2) 75 | pcontents = GlobalLock(handle) 76 | ctypes.memmove(pcontents, data, len(data)) 77 | GlobalUnlock(handle) 78 | SetClipboardData(CF_UNICODETEXT, handle) 79 | CloseClipboard() 80 | 81 | def copy(s): 82 | put(s) -------------------------------------------------------------------------------- /lib/flowlauncher/FlowLauncherAPI.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from json import dumps 4 | 5 | class FlowLauncherAPI: 6 | 7 | @classmethod 8 | def change_query(cls, query, requery: bool = False): 9 | """ 10 | change flow launcher query 11 | """ 12 | print(dumps({ 13 | "method": "Flow.Launcher.ChangeQuery", 14 | "parameters": [query, requery]})) 15 | 16 | @classmethod 17 | def shell_run(cls, cmd): 18 | """ 19 | run shell commands 20 | """ 21 | print(dumps({ 22 | "method": "Flow.Launcher.ShellRun", 23 | "parameters": [cmd]})) 24 | 25 | @classmethod 26 | def close_app(cls): 27 | """ 28 | close flow launcher 29 | """ 30 | print(dumps({ 31 | "method": "Flow.Launcher.CloseApp", 32 | "parameters": []})) 33 | 34 | @classmethod 35 | def hide_app(cls): 36 | """ 37 | hide flow launcher 38 | """ 39 | print(dumps({ 40 | "method": "Flow.Launcher.HideApp", 41 | "parameters": []})) 42 | 43 | @classmethod 44 | def show_app(cls): 45 | """ 46 | show flow launcher 47 | """ 48 | print(dumps({ 49 | "method": "Flow.Launcher.ShowApp", 50 | "parameters": []})) 51 | 52 | @classmethod 53 | def show_msg(cls, title: str, sub_title: str, ico_path: str = ""): 54 | """ 55 | show messagebox 56 | """ 57 | print(dumps({ 58 | "method": "Flow.Launcher.ShowMsg", 59 | "parameters": [title, sub_title, ico_path]})) 60 | 61 | @classmethod 62 | def open_setting_dialog(cls): 63 | """ 64 | open setting dialog 65 | """ 66 | print(dumps({ 67 | "method": "Flow.Launcher.OpenSettingDialog", 68 | "parameters": []})) 69 | 70 | @classmethod 71 | def start_loadingbar(cls): 72 | """ 73 | start loading animation in flow launcher 74 | """ 75 | print(dumps({ 76 | "method": "Flow.Launcher.StartLoadingBar", 77 | "parameters": []})) 78 | 79 | @classmethod 80 | def stop_loadingbar(cls): 81 | """ 82 | stop loading animation in flow launcher 83 | """ 84 | print(dumps({ 85 | "method": "Flow.Launcher.StopLoadingBar", 86 | "parameters": []})) 87 | 88 | @classmethod 89 | def reload_plugins(cls): 90 | """ 91 | reload all flow launcher plugins 92 | """ 93 | print(dumps({ 94 | "method": "Flow.Launcher.ReloadPlugins", 95 | "parameters": []})) 96 | -------------------------------------------------------------------------------- /lib/urllib3/filepost.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import binascii 4 | import codecs 5 | import os 6 | from io import BytesIO 7 | 8 | from .fields import RequestField 9 | from .packages import six 10 | from .packages.six import b 11 | 12 | writer = codecs.lookup("utf-8")[3] 13 | 14 | 15 | def choose_boundary(): 16 | """ 17 | Our embarrassingly-simple replacement for mimetools.choose_boundary. 18 | """ 19 | boundary = binascii.hexlify(os.urandom(16)) 20 | if not six.PY2: 21 | boundary = boundary.decode("ascii") 22 | return boundary 23 | 24 | 25 | def iter_field_objects(fields): 26 | """ 27 | Iterate over fields. 28 | 29 | Supports list of (k, v) tuples and dicts, and lists of 30 | :class:`~urllib3.fields.RequestField`. 31 | 32 | """ 33 | if isinstance(fields, dict): 34 | i = six.iteritems(fields) 35 | else: 36 | i = iter(fields) 37 | 38 | for field in i: 39 | if isinstance(field, RequestField): 40 | yield field 41 | else: 42 | yield RequestField.from_tuples(*field) 43 | 44 | 45 | def iter_fields(fields): 46 | """ 47 | .. deprecated:: 1.6 48 | 49 | Iterate over fields. 50 | 51 | The addition of :class:`~urllib3.fields.RequestField` makes this function 52 | obsolete. Instead, use :func:`iter_field_objects`, which returns 53 | :class:`~urllib3.fields.RequestField` objects. 54 | 55 | Supports list of (k, v) tuples and dicts. 56 | """ 57 | if isinstance(fields, dict): 58 | return ((k, v) for k, v in six.iteritems(fields)) 59 | 60 | return ((k, v) for k, v in fields) 61 | 62 | 63 | def encode_multipart_formdata(fields, boundary=None): 64 | """ 65 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format. 66 | 67 | :param fields: 68 | Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). 69 | 70 | :param boundary: 71 | If not specified, then a random boundary will be generated using 72 | :func:`urllib3.filepost.choose_boundary`. 73 | """ 74 | body = BytesIO() 75 | if boundary is None: 76 | boundary = choose_boundary() 77 | 78 | for field in iter_field_objects(fields): 79 | body.write(b("--%s\r\n" % (boundary))) 80 | 81 | writer(body).write(field.render_headers()) 82 | data = field.data 83 | 84 | if isinstance(data, int): 85 | data = str(data) # Backwards compatibility 86 | 87 | if isinstance(data, six.text_type): 88 | writer(body).write(data) 89 | else: 90 | body.write(data) 91 | 92 | body.write(b"\r\n") 93 | 94 | body.write(b("--%s--\r\n" % (boundary))) 95 | 96 | content_type = str("multipart/form-data; boundary=%s" % boundary) 97 | 98 | return body.getvalue(), content_type 99 | -------------------------------------------------------------------------------- /lib/requests-2.28.1.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | requests-2.28.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 2 | requests-2.28.1.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142 3 | requests-2.28.1.dist-info/METADATA,sha256=eoNYSJuPWbql7Til9dKPb--KRU2ouGR4g7UxtZFoHNU,4641 4 | requests-2.28.1.dist-info/RECORD,, 5 | requests-2.28.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 6 | requests-2.28.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 7 | requests-2.28.1.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9 8 | requests/__init__.py,sha256=S2K0jnVP6CSrT51SctFyiB0XfI8H9Nt7EqzERAD44gg,4972 9 | requests/__pycache__/__init__.cpython-310.pyc,, 10 | requests/__pycache__/__version__.cpython-310.pyc,, 11 | requests/__pycache__/_internal_utils.cpython-310.pyc,, 12 | requests/__pycache__/adapters.cpython-310.pyc,, 13 | requests/__pycache__/api.cpython-310.pyc,, 14 | requests/__pycache__/auth.cpython-310.pyc,, 15 | requests/__pycache__/certs.cpython-310.pyc,, 16 | requests/__pycache__/compat.cpython-310.pyc,, 17 | requests/__pycache__/cookies.cpython-310.pyc,, 18 | requests/__pycache__/exceptions.cpython-310.pyc,, 19 | requests/__pycache__/help.cpython-310.pyc,, 20 | requests/__pycache__/hooks.cpython-310.pyc,, 21 | requests/__pycache__/models.cpython-310.pyc,, 22 | requests/__pycache__/packages.cpython-310.pyc,, 23 | requests/__pycache__/sessions.cpython-310.pyc,, 24 | requests/__pycache__/status_codes.cpython-310.pyc,, 25 | requests/__pycache__/structures.cpython-310.pyc,, 26 | requests/__pycache__/utils.cpython-310.pyc,, 27 | requests/__version__.py,sha256=nJVa3ef2yRyeYMhy7yHnRyjjpnNTDykZsE4Sp9irBC4,440 28 | requests/_internal_utils.py,sha256=aSPlF4uDhtfKxEayZJJ7KkAxtormeTfpwKSBSwtmAUw,1397 29 | requests/adapters.py,sha256=sEnHGl4mJz4QHBT8jG6bU5aPinUtdoH3BIuAIzT-X74,21287 30 | requests/api.py,sha256=dyvkDd5itC9z2g0wHl_YfD1yf6YwpGWLO7__8e21nks,6377 31 | requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187 32 | requests/certs.py,sha256=Z9Sb410Anv6jUFTyss0jFFhU6xst8ctELqfy8Ev23gw,429 33 | requests/compat.py,sha256=yxntVOSEHGMrn7FNr_32EEam1ZNAdPRdSE13_yaHzTk,1451 34 | requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560 35 | requests/exceptions.py,sha256=DhveFBclVjTRxhRduVpO-GbMYMID2gmjdLfNEqNpI_U,3811 36 | requests/help.py,sha256=gPX5d_H7Xd88aDABejhqGgl9B1VFRTt5BmiYvL3PzIQ,3875 37 | requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733 38 | requests/models.py,sha256=OiVxiOdlhzpbZoxut2OhKtpYlB7WW4iHQcfqSVmT4H4,35222 39 | requests/packages.py,sha256=DXgv-FJIczZITmv0vEBAhWj4W-5CGCIN_ksvgR17Dvs,957 40 | requests/sessions.py,sha256=KUqJcRRLovNefUs7ScOXSUVCcfSayTFWtbiJ7gOSlTI,30180 41 | requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235 42 | requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912 43 | requests/utils.py,sha256=5_ws-bsKI9EHl7j27yi-6HFzPBKultPgd7HfPrUToWI,33228 44 | -------------------------------------------------------------------------------- /lib/flox/browser.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from subprocess import Popen, PIPE, CREATE_NO_WINDOW 3 | import webbrowser 4 | from winreg import OpenKey, QueryValueEx, HKEY_CURRENT_USER as HKCU, HKEY_LOCAL_MACHINE as HKLM 5 | 6 | log = logging.getLogger(__name__) 7 | 8 | DEFAULT_BROWSER_KEYWORD = "*" 9 | MICROSOFT_EDGE = 'msedge' 10 | CHROME = 'chrome' 11 | FIREFOX = 'firefox' 12 | NEW_WINDOW_ARG = "--new-window" 13 | 14 | 15 | CHROME_PATH = r"SOFTWARE\Microsoft\Windows\CurrentVersion\App Paths\chrome.exe" 16 | FIREFOX_PATH = r"SOFTWARE\Microsoft\Windows\CurrentVersion\App Paths\firefox.exe" 17 | MSEDGE_PATH = r"SOFTWARE\Microsoft\Windows\CurrentVersion\App Paths\msedge.exe" 18 | DEFAULT_BROWSER_PATH = r"Software\Microsoft\Windows\Shell\Associations\UrlAssociations\http\UserChoice" 19 | 20 | DEFAULT_BROWSERS = { 21 | CHROME: CHROME_PATH, 22 | FIREFOX: FIREFOX_PATH, 23 | MICROSOFT_EDGE: MSEDGE_PATH, 24 | DEFAULT_BROWSER_KEYWORD: DEFAULT_BROWSER_PATH 25 | } 26 | 27 | def get_reg(path, base_path=HKLM, name=""): 28 | try: 29 | with OpenKey(base_path, path) as key: 30 | return QueryValueEx(key, name)[0] 31 | except FileNotFoundError: 32 | log.exception(f'Can\'t find browser "{path}"') 33 | 34 | class Browser(object): 35 | 36 | def __init__(self, settings): 37 | self.Name = None 38 | self.Path = None 39 | self.PrivateArg = None 40 | self.EnablePrivate = False 41 | self.OpenInTab = True 42 | self.Editable = False 43 | self.CustomBrowserIndex = settings.get('CustomBrowserIndex', 0) 44 | self.CustomBrowserList = settings.get('CustomBrowserList', []) 45 | try: 46 | self.current_browser = self.CustomBrowserList[self.CustomBrowserIndex] 47 | except IndexError: 48 | self.current_browser = {} 49 | for item in self.current_browser: 50 | setattr(self, item, self.current_browser[item]) 51 | 52 | def open(self, url): 53 | try: 54 | cmd = [self.get_exe(), url] 55 | if self.current_browser.get('EnablePrivate', False): 56 | cmd.append(self.current_browser['PrivateArg']) 57 | if not self.OpenInTab: 58 | cmd.append(NEW_WINDOW_ARG) 59 | log.debug(f'Opening {url} with {cmd}') 60 | Popen(cmd, creationflags=CREATE_NO_WINDOW) 61 | # All else fails, open in default browser and log error 62 | except Exception as e: 63 | log.exception(f'Can\'t open {url} with {self.Name}') 64 | webbrowser.open(url) 65 | 66 | def get_exe(self): 67 | key = self.Path or DEFAULT_BROWSER_KEYWORD 68 | if key == DEFAULT_BROWSER_KEYWORD: 69 | browser = get_reg(DEFAULT_BROWSER_PATH, HKCU, 'Progid') 70 | key = browser.split('-')[0].replace('url', '').replace('HTML', '').lower() 71 | if key in DEFAULT_BROWSERS: 72 | _path = DEFAULT_BROWSERS.get(key) 73 | return get_reg(_path) 74 | else: 75 | return key -------------------------------------------------------------------------------- /lib/certifi-2022.12.7.dist-info/METADATA: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: certifi 3 | Version: 2022.12.7 4 | Summary: Python package for providing Mozilla's CA Bundle. 5 | Home-page: https://github.com/certifi/python-certifi 6 | Author: Kenneth Reitz 7 | Author-email: me@kennethreitz.com 8 | License: MPL-2.0 9 | Project-URL: Source, https://github.com/certifi/python-certifi 10 | Platform: UNKNOWN 11 | Classifier: Development Status :: 5 - Production/Stable 12 | Classifier: Intended Audience :: Developers 13 | Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) 14 | Classifier: Natural Language :: English 15 | Classifier: Programming Language :: Python 16 | Classifier: Programming Language :: Python :: 3 17 | Classifier: Programming Language :: Python :: 3 :: Only 18 | Classifier: Programming Language :: Python :: 3.6 19 | Classifier: Programming Language :: Python :: 3.7 20 | Classifier: Programming Language :: Python :: 3.8 21 | Classifier: Programming Language :: Python :: 3.9 22 | Classifier: Programming Language :: Python :: 3.10 23 | Classifier: Programming Language :: Python :: 3.11 24 | Requires-Python: >=3.6 25 | License-File: LICENSE 26 | 27 | Certifi: Python SSL Certificates 28 | ================================ 29 | 30 | Certifi provides Mozilla's carefully curated collection of Root Certificates for 31 | validating the trustworthiness of SSL certificates while verifying the identity 32 | of TLS hosts. It has been extracted from the `Requests`_ project. 33 | 34 | Installation 35 | ------------ 36 | 37 | ``certifi`` is available on PyPI. Simply install it with ``pip``:: 38 | 39 | $ pip install certifi 40 | 41 | Usage 42 | ----- 43 | 44 | To reference the installed certificate authority (CA) bundle, you can use the 45 | built-in function:: 46 | 47 | >>> import certifi 48 | 49 | >>> certifi.where() 50 | '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' 51 | 52 | Or from the command line:: 53 | 54 | $ python -m certifi 55 | /usr/local/lib/python3.7/site-packages/certifi/cacert.pem 56 | 57 | Enjoy! 58 | 59 | 1024-bit Root Certificates 60 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ 61 | 62 | Browsers and certificate authorities have concluded that 1024-bit keys are 63 | unacceptably weak for certificates, particularly root certificates. For this 64 | reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its 65 | bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) 66 | certificate from the same CA. Because Mozilla removed these certificates from 67 | its bundle, ``certifi`` removed them as well. 68 | 69 | In previous versions, ``certifi`` provided the ``certifi.old_where()`` function 70 | to intentionally re-add the 1024-bit roots back into your bundle. This was not 71 | recommended in production and therefore was removed at the end of 2018. 72 | 73 | .. _`Requests`: https://requests.readthedocs.io/en/master/ 74 | 75 | Addition/Removal of Certificates 76 | -------------------------------- 77 | 78 | Certifi does not support any addition/removal or other modification of the 79 | CA trust store content. This project is intended to provide a reliable and 80 | highly portable root of trust to python deployments. Look to upstream projects 81 | for methods to use alternate trust. 82 | 83 | 84 | -------------------------------------------------------------------------------- /lib/flowlauncher-0.2.0.dist-info/METADATA: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: flowlauncher 3 | Version: 0.2.0 4 | Summary: Flow Launcher supports Python by JsonRPC. 5 | Home-page: https://github.com/Flow-Launcher/Flow.Launcher.JsonRPC.Python 6 | Download-URL: https://github.com/Flow-Launcher/Flow.Launcher.JsonRPC.Python/archive/master.zip 7 | Author: Flow-Launcher 8 | Author-email: Zeroto521@gmail.com 9 | Maintainer: Zero 10 | Maintainer-email: Zeroto521@gmail.com 11 | License: MIT 12 | Platform: Windows 13 | Classifier: Development Status :: 3 - Alpha 14 | Classifier: Intended Audience :: Developers 15 | Classifier: License :: OSI Approved :: MIT License 16 | Classifier: Natural Language :: English 17 | Classifier: Operating System :: Microsoft :: Windows 18 | Classifier: Programming Language :: Python 19 | Classifier: Programming Language :: Python :: 3 20 | Classifier: Programming Language :: Python :: 3 :: Only 21 | Classifier: Programming Language :: Python :: 3.7 22 | Classifier: Programming Language :: Python :: 3.8 23 | Classifier: Programming Language :: Python :: 3.9 24 | Classifier: Programming Language :: Python :: 3.10 25 | Classifier: Topic :: Software Development 26 | Classifier: Topic :: Software Development :: Libraries 27 | Classifier: Topic :: Software Development :: Libraries :: Application Frameworks 28 | Description-Content-Type: text/markdown 29 | License-File: LICENSE 30 | 31 | # Flow.Launcher.JsonRPC.Python 32 | 33 | [![](https://img.shields.io/pypi/v/flowlauncher.svg?style=for-the-badge)](https://pypi.org/project/flowlauncher/) 34 | [![PyPI pyversions](https://img.shields.io/pypi/pyversions/flowlauncher?style=for-the-badge)](https://pypi.org/project/flowlauncher/) 35 | 36 | Flow Launcher supports Python by JsonRPC. 37 | 38 | ## JSON-RPC 39 | 40 | > [JSON-RPC](https://en.wikipedia.org/wiki/JSON-RPC) is a remote procedure call protocol encoded in JSON. 41 | 42 | In Flow Launcher, we use JSON-RPC as a **local** procedure call protocol to bind Flow and other program languages. 43 | 44 | So we need to build a **common API** between Flow and Plugin. 45 | 46 | ![JsonRPC](./assets/jsonrpc.png) 47 | 48 | ### Example 49 | 50 | - `-->` denotes data sent to FLow. 51 | - `<--` denotes data coming from Flow. 52 | 53 | ```json 54 | --> {"method": "query", "parameters": [""]} 55 | <-- {"Title": "title", "SubTitle": "sub title", "IconPath": "favicon.ico"} 56 | ``` 57 | 58 | 59 | 60 | ## Installation 61 | 62 | ### Using `pip` 63 | 64 | ``` bash 65 | >>> pip install flowlauncher 66 | ``` 67 | 68 | ### Using `pip` + `git` 69 | 70 | ``` bash 71 | >>> pip install git+https://github.com/Flow-Launcher/Flow.Launcher.JsonRPC.Python.git 72 | ``` 73 | 74 | ### Using `git` 75 | 76 | ``` bash 77 | >>> git clone https://github.com/Flow-Launcher/Flow.Launcher.JsonRPC.Python.git 78 | >>> cd Flow.Launcher.JsonRPC.Python 79 | >>> python setup.py install 80 | ``` 81 | 82 | 83 | 84 | ### License 85 | 86 | This project is under the [MIT](./LICENSE) license. 87 | 88 | Some of the orignal codes from [JsonRPC/wox.py](https://github.com/Wox-launcher/Wox/blob/master/JsonRPC/wox.py) which is under the [MIT](https://github.com/Wox-launcher/Wox/blob/master/LICENSE) license. 89 | -------------------------------------------------------------------------------- /lib/requests/structures.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests.structures 3 | ~~~~~~~~~~~~~~~~~~~ 4 | 5 | Data structures that power Requests. 6 | """ 7 | 8 | from collections import OrderedDict 9 | 10 | from .compat import Mapping, MutableMapping 11 | 12 | 13 | class CaseInsensitiveDict(MutableMapping): 14 | """A case-insensitive ``dict``-like object. 15 | 16 | Implements all methods and operations of 17 | ``MutableMapping`` as well as dict's ``copy``. Also 18 | provides ``lower_items``. 19 | 20 | All keys are expected to be strings. The structure remembers the 21 | case of the last key to be set, and ``iter(instance)``, 22 | ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` 23 | will contain case-sensitive keys. However, querying and contains 24 | testing is case insensitive:: 25 | 26 | cid = CaseInsensitiveDict() 27 | cid['Accept'] = 'application/json' 28 | cid['aCCEPT'] == 'application/json' # True 29 | list(cid) == ['Accept'] # True 30 | 31 | For example, ``headers['content-encoding']`` will return the 32 | value of a ``'Content-Encoding'`` response header, regardless 33 | of how the header name was originally stored. 34 | 35 | If the constructor, ``.update``, or equality comparison 36 | operations are given keys that have equal ``.lower()``s, the 37 | behavior is undefined. 38 | """ 39 | 40 | def __init__(self, data=None, **kwargs): 41 | self._store = OrderedDict() 42 | if data is None: 43 | data = {} 44 | self.update(data, **kwargs) 45 | 46 | def __setitem__(self, key, value): 47 | # Use the lowercased key for lookups, but store the actual 48 | # key alongside the value. 49 | self._store[key.lower()] = (key, value) 50 | 51 | def __getitem__(self, key): 52 | return self._store[key.lower()][1] 53 | 54 | def __delitem__(self, key): 55 | del self._store[key.lower()] 56 | 57 | def __iter__(self): 58 | return (casedkey for casedkey, mappedvalue in self._store.values()) 59 | 60 | def __len__(self): 61 | return len(self._store) 62 | 63 | def lower_items(self): 64 | """Like iteritems(), but with all lowercase keys.""" 65 | return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items()) 66 | 67 | def __eq__(self, other): 68 | if isinstance(other, Mapping): 69 | other = CaseInsensitiveDict(other) 70 | else: 71 | return NotImplemented 72 | # Compare insensitively 73 | return dict(self.lower_items()) == dict(other.lower_items()) 74 | 75 | # Copy is required 76 | def copy(self): 77 | return CaseInsensitiveDict(self._store.values()) 78 | 79 | def __repr__(self): 80 | return str(dict(self.items())) 81 | 82 | 83 | class LookupDict(dict): 84 | """Dictionary lookup object.""" 85 | 86 | def __init__(self, name=None): 87 | self.name = name 88 | super().__init__() 89 | 90 | def __repr__(self): 91 | return f"" 92 | 93 | def __getitem__(self, key): 94 | # We allow fall-through here, so values default to None 95 | 96 | return self.__dict__.get(key, None) 97 | 98 | def get(self, key, default=None): 99 | return self.__dict__.get(key, default) 100 | -------------------------------------------------------------------------------- /lib/charset_normalizer/legacy.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | from typing import Dict, Optional, Union 3 | 4 | from .api import from_bytes, from_fp, from_path, normalize 5 | from .constant import CHARDET_CORRESPONDENCE 6 | from .models import CharsetMatch, CharsetMatches 7 | 8 | 9 | def detect(byte_str: bytes) -> Dict[str, Optional[Union[str, float]]]: 10 | """ 11 | chardet legacy method 12 | Detect the encoding of the given byte string. It should be mostly backward-compatible. 13 | Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) 14 | This function is deprecated and should be used to migrate your project easily, consult the documentation for 15 | further information. Not planned for removal. 16 | 17 | :param byte_str: The byte sequence to examine. 18 | """ 19 | if not isinstance(byte_str, (bytearray, bytes)): 20 | raise TypeError( # pragma: nocover 21 | "Expected object of type bytes or bytearray, got: " 22 | "{0}".format(type(byte_str)) 23 | ) 24 | 25 | if isinstance(byte_str, bytearray): 26 | byte_str = bytes(byte_str) 27 | 28 | r = from_bytes(byte_str).best() 29 | 30 | encoding = r.encoding if r is not None else None 31 | language = r.language if r is not None and r.language != "Unknown" else "" 32 | confidence = 1.0 - r.chaos if r is not None else None 33 | 34 | # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process 35 | # but chardet does return 'utf-8-sig' and it is a valid codec name. 36 | if r is not None and encoding == "utf_8" and r.bom: 37 | encoding += "_sig" 38 | 39 | return { 40 | "encoding": encoding 41 | if encoding not in CHARDET_CORRESPONDENCE 42 | else CHARDET_CORRESPONDENCE[encoding], 43 | "language": language, 44 | "confidence": confidence, 45 | } 46 | 47 | 48 | class CharsetNormalizerMatch(CharsetMatch): 49 | pass 50 | 51 | 52 | class CharsetNormalizerMatches(CharsetMatches): 53 | @staticmethod 54 | def from_fp(*args, **kwargs): # type: ignore 55 | warnings.warn( # pragma: nocover 56 | "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " 57 | "and scheduled to be removed in 3.0", 58 | DeprecationWarning, 59 | ) 60 | return from_fp(*args, **kwargs) # pragma: nocover 61 | 62 | @staticmethod 63 | def from_bytes(*args, **kwargs): # type: ignore 64 | warnings.warn( # pragma: nocover 65 | "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " 66 | "and scheduled to be removed in 3.0", 67 | DeprecationWarning, 68 | ) 69 | return from_bytes(*args, **kwargs) # pragma: nocover 70 | 71 | @staticmethod 72 | def from_path(*args, **kwargs): # type: ignore 73 | warnings.warn( # pragma: nocover 74 | "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " 75 | "and scheduled to be removed in 3.0", 76 | DeprecationWarning, 77 | ) 78 | return from_path(*args, **kwargs) # pragma: nocover 79 | 80 | @staticmethod 81 | def normalize(*args, **kwargs): # type: ignore 82 | warnings.warn( # pragma: nocover 83 | "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " 84 | "and scheduled to be removed in 3.0", 85 | DeprecationWarning, 86 | ) 87 | return normalize(*args, **kwargs) # pragma: nocover 88 | 89 | 90 | class CharsetDetector(CharsetNormalizerMatches): 91 | pass 92 | 93 | 94 | class CharsetDoctor(CharsetNormalizerMatches): 95 | pass 96 | -------------------------------------------------------------------------------- /lib/urllib3/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more 3 | """ 4 | from __future__ import absolute_import 5 | 6 | # Set default logging handler to avoid "No handler found" warnings. 7 | import logging 8 | import warnings 9 | from logging import NullHandler 10 | 11 | from . import exceptions 12 | from ._version import __version__ 13 | from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url 14 | from .filepost import encode_multipart_formdata 15 | from .poolmanager import PoolManager, ProxyManager, proxy_from_url 16 | from .response import HTTPResponse 17 | from .util.request import make_headers 18 | from .util.retry import Retry 19 | from .util.timeout import Timeout 20 | from .util.url import get_host 21 | 22 | # === NOTE TO REPACKAGERS AND VENDORS === 23 | # Please delete this block, this logic is only 24 | # for urllib3 being distributed via PyPI. 25 | # See: https://github.com/urllib3/urllib3/issues/2680 26 | try: 27 | import urllib3_secure_extra # type: ignore # noqa: F401 28 | except ImportError: 29 | pass 30 | else: 31 | warnings.warn( 32 | "'urllib3[secure]' extra is deprecated and will be removed " 33 | "in a future release of urllib3 2.x. Read more in this issue: " 34 | "https://github.com/urllib3/urllib3/issues/2680", 35 | category=DeprecationWarning, 36 | stacklevel=2, 37 | ) 38 | 39 | __author__ = "Andrey Petrov (andrey.petrov@shazow.net)" 40 | __license__ = "MIT" 41 | __version__ = __version__ 42 | 43 | __all__ = ( 44 | "HTTPConnectionPool", 45 | "HTTPSConnectionPool", 46 | "PoolManager", 47 | "ProxyManager", 48 | "HTTPResponse", 49 | "Retry", 50 | "Timeout", 51 | "add_stderr_logger", 52 | "connection_from_url", 53 | "disable_warnings", 54 | "encode_multipart_formdata", 55 | "get_host", 56 | "make_headers", 57 | "proxy_from_url", 58 | ) 59 | 60 | logging.getLogger(__name__).addHandler(NullHandler()) 61 | 62 | 63 | def add_stderr_logger(level=logging.DEBUG): 64 | """ 65 | Helper for quickly adding a StreamHandler to the logger. Useful for 66 | debugging. 67 | 68 | Returns the handler after adding it. 69 | """ 70 | # This method needs to be in this __init__.py to get the __name__ correct 71 | # even if urllib3 is vendored within another package. 72 | logger = logging.getLogger(__name__) 73 | handler = logging.StreamHandler() 74 | handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) 75 | logger.addHandler(handler) 76 | logger.setLevel(level) 77 | logger.debug("Added a stderr logging handler to logger: %s", __name__) 78 | return handler 79 | 80 | 81 | # ... Clean up. 82 | del NullHandler 83 | 84 | 85 | # All warning filters *must* be appended unless you're really certain that they 86 | # shouldn't be: otherwise, it's very hard for users to use most Python 87 | # mechanisms to silence them. 88 | # SecurityWarning's always go off by default. 89 | warnings.simplefilter("always", exceptions.SecurityWarning, append=True) 90 | # SubjectAltNameWarning's should go off once per host 91 | warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True) 92 | # InsecurePlatformWarning's don't vary between requests, so we keep it default. 93 | warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True) 94 | # SNIMissingWarnings should go off only once. 95 | warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True) 96 | 97 | 98 | def disable_warnings(category=exceptions.HTTPWarning): 99 | """ 100 | Helper for quickly disabling all urllib3 warnings. 101 | """ 102 | warnings.simplefilter("ignore", category) 103 | -------------------------------------------------------------------------------- /lib/idna/codec.py: -------------------------------------------------------------------------------- 1 | from .core import encode, decode, alabel, ulabel, IDNAError 2 | import codecs 3 | import re 4 | from typing import Tuple, Optional 5 | 6 | _unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') 7 | 8 | class Codec(codecs.Codec): 9 | 10 | def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]: 11 | if errors != 'strict': 12 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) 13 | 14 | if not data: 15 | return b"", 0 16 | 17 | return encode(data), len(data) 18 | 19 | def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]: 20 | if errors != 'strict': 21 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) 22 | 23 | if not data: 24 | return '', 0 25 | 26 | return decode(data), len(data) 27 | 28 | class IncrementalEncoder(codecs.BufferedIncrementalEncoder): 29 | def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore 30 | if errors != 'strict': 31 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) 32 | 33 | if not data: 34 | return "", 0 35 | 36 | labels = _unicode_dots_re.split(data) 37 | trailing_dot = '' 38 | if labels: 39 | if not labels[-1]: 40 | trailing_dot = '.' 41 | del labels[-1] 42 | elif not final: 43 | # Keep potentially unfinished label until the next call 44 | del labels[-1] 45 | if labels: 46 | trailing_dot = '.' 47 | 48 | result = [] 49 | size = 0 50 | for label in labels: 51 | result.append(alabel(label)) 52 | if size: 53 | size += 1 54 | size += len(label) 55 | 56 | # Join with U+002E 57 | result_str = '.'.join(result) + trailing_dot # type: ignore 58 | size += len(trailing_dot) 59 | return result_str, size 60 | 61 | class IncrementalDecoder(codecs.BufferedIncrementalDecoder): 62 | def _buffer_decode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore 63 | if errors != 'strict': 64 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) 65 | 66 | if not data: 67 | return ('', 0) 68 | 69 | labels = _unicode_dots_re.split(data) 70 | trailing_dot = '' 71 | if labels: 72 | if not labels[-1]: 73 | trailing_dot = '.' 74 | del labels[-1] 75 | elif not final: 76 | # Keep potentially unfinished label until the next call 77 | del labels[-1] 78 | if labels: 79 | trailing_dot = '.' 80 | 81 | result = [] 82 | size = 0 83 | for label in labels: 84 | result.append(ulabel(label)) 85 | if size: 86 | size += 1 87 | size += len(label) 88 | 89 | result_str = '.'.join(result) + trailing_dot 90 | size += len(trailing_dot) 91 | return (result_str, size) 92 | 93 | 94 | class StreamWriter(Codec, codecs.StreamWriter): 95 | pass 96 | 97 | 98 | class StreamReader(Codec, codecs.StreamReader): 99 | pass 100 | 101 | 102 | def getregentry() -> codecs.CodecInfo: 103 | # Compatibility as a search_function for codecs.register() 104 | return codecs.CodecInfo( 105 | name='idna', 106 | encode=Codec().encode, # type: ignore 107 | decode=Codec().decode, # type: ignore 108 | incrementalencoder=IncrementalEncoder, 109 | incrementaldecoder=IncrementalDecoder, 110 | streamwriter=StreamWriter, 111 | streamreader=StreamReader, 112 | ) 113 | -------------------------------------------------------------------------------- /lib/urllib3/util/response.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect 4 | 5 | from ..exceptions import HeaderParsingError 6 | from ..packages.six.moves import http_client as httplib 7 | 8 | 9 | def is_fp_closed(obj): 10 | """ 11 | Checks whether a given file-like object is closed. 12 | 13 | :param obj: 14 | The file-like object to check. 15 | """ 16 | 17 | try: 18 | # Check `isclosed()` first, in case Python3 doesn't set `closed`. 19 | # GH Issue #928 20 | return obj.isclosed() 21 | except AttributeError: 22 | pass 23 | 24 | try: 25 | # Check via the official file-like-object way. 26 | return obj.closed 27 | except AttributeError: 28 | pass 29 | 30 | try: 31 | # Check if the object is a container for another file-like object that 32 | # gets released on exhaustion (e.g. HTTPResponse). 33 | return obj.fp is None 34 | except AttributeError: 35 | pass 36 | 37 | raise ValueError("Unable to determine whether fp is closed.") 38 | 39 | 40 | def assert_header_parsing(headers): 41 | """ 42 | Asserts whether all headers have been successfully parsed. 43 | Extracts encountered errors from the result of parsing headers. 44 | 45 | Only works on Python 3. 46 | 47 | :param http.client.HTTPMessage headers: Headers to verify. 48 | 49 | :raises urllib3.exceptions.HeaderParsingError: 50 | If parsing errors are found. 51 | """ 52 | 53 | # This will fail silently if we pass in the wrong kind of parameter. 54 | # To make debugging easier add an explicit check. 55 | if not isinstance(headers, httplib.HTTPMessage): 56 | raise TypeError("expected httplib.Message, got {0}.".format(type(headers))) 57 | 58 | defects = getattr(headers, "defects", None) 59 | get_payload = getattr(headers, "get_payload", None) 60 | 61 | unparsed_data = None 62 | if get_payload: 63 | # get_payload is actually email.message.Message.get_payload; 64 | # we're only interested in the result if it's not a multipart message 65 | if not headers.is_multipart(): 66 | payload = get_payload() 67 | 68 | if isinstance(payload, (bytes, str)): 69 | unparsed_data = payload 70 | if defects: 71 | # httplib is assuming a response body is available 72 | # when parsing headers even when httplib only sends 73 | # header data to parse_headers() This results in 74 | # defects on multipart responses in particular. 75 | # See: https://github.com/urllib3/urllib3/issues/800 76 | 77 | # So we ignore the following defects: 78 | # - StartBoundaryNotFoundDefect: 79 | # The claimed start boundary was never found. 80 | # - MultipartInvariantViolationDefect: 81 | # A message claimed to be a multipart but no subparts were found. 82 | defects = [ 83 | defect 84 | for defect in defects 85 | if not isinstance( 86 | defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect) 87 | ) 88 | ] 89 | 90 | if defects or unparsed_data: 91 | raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) 92 | 93 | 94 | def is_response_to_head(response): 95 | """ 96 | Checks whether the request of a response has been a HEAD-request. 97 | Handles the quirks of AppEngine. 98 | 99 | :param http.client.HTTPResponse response: 100 | Response to check if the originating request 101 | used 'HEAD' as a method. 102 | """ 103 | # FIXME: Can we do this somehow without accessing private httplib _method? 104 | method = response._method 105 | if isinstance(method, int): # Platform-specific: Appengine 106 | return method == 3 107 | return method.upper() == "HEAD" 108 | -------------------------------------------------------------------------------- /lib/requests/exceptions.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests.exceptions 3 | ~~~~~~~~~~~~~~~~~~~ 4 | 5 | This module contains the set of Requests' exceptions. 6 | """ 7 | from urllib3.exceptions import HTTPError as BaseHTTPError 8 | 9 | from .compat import JSONDecodeError as CompatJSONDecodeError 10 | 11 | 12 | class RequestException(IOError): 13 | """There was an ambiguous exception that occurred while handling your 14 | request. 15 | """ 16 | 17 | def __init__(self, *args, **kwargs): 18 | """Initialize RequestException with `request` and `response` objects.""" 19 | response = kwargs.pop("response", None) 20 | self.response = response 21 | self.request = kwargs.pop("request", None) 22 | if response is not None and not self.request and hasattr(response, "request"): 23 | self.request = self.response.request 24 | super().__init__(*args, **kwargs) 25 | 26 | 27 | class InvalidJSONError(RequestException): 28 | """A JSON error occurred.""" 29 | 30 | 31 | class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError): 32 | """Couldn't decode the text into json""" 33 | 34 | def __init__(self, *args, **kwargs): 35 | """ 36 | Construct the JSONDecodeError instance first with all 37 | args. Then use it's args to construct the IOError so that 38 | the json specific args aren't used as IOError specific args 39 | and the error message from JSONDecodeError is preserved. 40 | """ 41 | CompatJSONDecodeError.__init__(self, *args) 42 | InvalidJSONError.__init__(self, *self.args, **kwargs) 43 | 44 | 45 | class HTTPError(RequestException): 46 | """An HTTP error occurred.""" 47 | 48 | 49 | class ConnectionError(RequestException): 50 | """A Connection error occurred.""" 51 | 52 | 53 | class ProxyError(ConnectionError): 54 | """A proxy error occurred.""" 55 | 56 | 57 | class SSLError(ConnectionError): 58 | """An SSL error occurred.""" 59 | 60 | 61 | class Timeout(RequestException): 62 | """The request timed out. 63 | 64 | Catching this error will catch both 65 | :exc:`~requests.exceptions.ConnectTimeout` and 66 | :exc:`~requests.exceptions.ReadTimeout` errors. 67 | """ 68 | 69 | 70 | class ConnectTimeout(ConnectionError, Timeout): 71 | """The request timed out while trying to connect to the remote server. 72 | 73 | Requests that produced this error are safe to retry. 74 | """ 75 | 76 | 77 | class ReadTimeout(Timeout): 78 | """The server did not send any data in the allotted amount of time.""" 79 | 80 | 81 | class URLRequired(RequestException): 82 | """A valid URL is required to make a request.""" 83 | 84 | 85 | class TooManyRedirects(RequestException): 86 | """Too many redirects.""" 87 | 88 | 89 | class MissingSchema(RequestException, ValueError): 90 | """The URL scheme (e.g. http or https) is missing.""" 91 | 92 | 93 | class InvalidSchema(RequestException, ValueError): 94 | """The URL scheme provided is either invalid or unsupported.""" 95 | 96 | 97 | class InvalidURL(RequestException, ValueError): 98 | """The URL provided was somehow invalid.""" 99 | 100 | 101 | class InvalidHeader(RequestException, ValueError): 102 | """The header value provided was somehow invalid.""" 103 | 104 | 105 | class InvalidProxyURL(InvalidURL): 106 | """The proxy URL provided is invalid.""" 107 | 108 | 109 | class ChunkedEncodingError(RequestException): 110 | """The server declared chunked encoding but sent an invalid chunk.""" 111 | 112 | 113 | class ContentDecodingError(RequestException, BaseHTTPError): 114 | """Failed to decode response content.""" 115 | 116 | 117 | class StreamConsumedError(RequestException, TypeError): 118 | """The content for this response was already consumed.""" 119 | 120 | 121 | class RetryError(RequestException): 122 | """Custom retries logic failed""" 123 | 124 | 125 | class UnrewindableBodyError(RequestException): 126 | """Requests encountered an error when trying to rewind a body.""" 127 | 128 | 129 | # Warnings 130 | 131 | 132 | class RequestsWarning(Warning): 133 | """Base warning for Requests.""" 134 | 135 | 136 | class FileModeWarning(RequestsWarning, DeprecationWarning): 137 | """A file was opened in text mode, but Requests determined its binary length.""" 138 | 139 | 140 | class RequestsDependencyWarning(RequestsWarning): 141 | """An imported dependency doesn't match the expected version range.""" 142 | -------------------------------------------------------------------------------- /lib/requests/help.py: -------------------------------------------------------------------------------- 1 | """Module containing bug report helper(s).""" 2 | 3 | import json 4 | import platform 5 | import ssl 6 | import sys 7 | 8 | import idna 9 | import urllib3 10 | 11 | from . import __version__ as requests_version 12 | 13 | try: 14 | import charset_normalizer 15 | except ImportError: 16 | charset_normalizer = None 17 | 18 | try: 19 | import chardet 20 | except ImportError: 21 | chardet = None 22 | 23 | try: 24 | from urllib3.contrib import pyopenssl 25 | except ImportError: 26 | pyopenssl = None 27 | OpenSSL = None 28 | cryptography = None 29 | else: 30 | import cryptography 31 | import OpenSSL 32 | 33 | 34 | def _implementation(): 35 | """Return a dict with the Python implementation and version. 36 | 37 | Provide both the name and the version of the Python implementation 38 | currently running. For example, on CPython 3.10.3 it will return 39 | {'name': 'CPython', 'version': '3.10.3'}. 40 | 41 | This function works best on CPython and PyPy: in particular, it probably 42 | doesn't work for Jython or IronPython. Future investigation should be done 43 | to work out the correct shape of the code for those platforms. 44 | """ 45 | implementation = platform.python_implementation() 46 | 47 | if implementation == "CPython": 48 | implementation_version = platform.python_version() 49 | elif implementation == "PyPy": 50 | implementation_version = "{}.{}.{}".format( 51 | sys.pypy_version_info.major, 52 | sys.pypy_version_info.minor, 53 | sys.pypy_version_info.micro, 54 | ) 55 | if sys.pypy_version_info.releaselevel != "final": 56 | implementation_version = "".join( 57 | [implementation_version, sys.pypy_version_info.releaselevel] 58 | ) 59 | elif implementation == "Jython": 60 | implementation_version = platform.python_version() # Complete Guess 61 | elif implementation == "IronPython": 62 | implementation_version = platform.python_version() # Complete Guess 63 | else: 64 | implementation_version = "Unknown" 65 | 66 | return {"name": implementation, "version": implementation_version} 67 | 68 | 69 | def info(): 70 | """Generate information for a bug report.""" 71 | try: 72 | platform_info = { 73 | "system": platform.system(), 74 | "release": platform.release(), 75 | } 76 | except OSError: 77 | platform_info = { 78 | "system": "Unknown", 79 | "release": "Unknown", 80 | } 81 | 82 | implementation_info = _implementation() 83 | urllib3_info = {"version": urllib3.__version__} 84 | charset_normalizer_info = {"version": None} 85 | chardet_info = {"version": None} 86 | if charset_normalizer: 87 | charset_normalizer_info = {"version": charset_normalizer.__version__} 88 | if chardet: 89 | chardet_info = {"version": chardet.__version__} 90 | 91 | pyopenssl_info = { 92 | "version": None, 93 | "openssl_version": "", 94 | } 95 | if OpenSSL: 96 | pyopenssl_info = { 97 | "version": OpenSSL.__version__, 98 | "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}", 99 | } 100 | cryptography_info = { 101 | "version": getattr(cryptography, "__version__", ""), 102 | } 103 | idna_info = { 104 | "version": getattr(idna, "__version__", ""), 105 | } 106 | 107 | system_ssl = ssl.OPENSSL_VERSION_NUMBER 108 | system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""} 109 | 110 | return { 111 | "platform": platform_info, 112 | "implementation": implementation_info, 113 | "system_ssl": system_ssl_info, 114 | "using_pyopenssl": pyopenssl is not None, 115 | "using_charset_normalizer": chardet is None, 116 | "pyOpenSSL": pyopenssl_info, 117 | "urllib3": urllib3_info, 118 | "chardet": chardet_info, 119 | "charset_normalizer": charset_normalizer_info, 120 | "cryptography": cryptography_info, 121 | "idna": idna_info, 122 | "requests": { 123 | "version": requests_version, 124 | }, 125 | } 126 | 127 | 128 | def main(): 129 | """Pretty-print the bug information as JSON.""" 130 | print(json.dumps(info(), sort_keys=True, indent=2)) 131 | 132 | 133 | if __name__ == "__main__": 134 | main() 135 | -------------------------------------------------------------------------------- /lib/flox/utils.py: -------------------------------------------------------------------------------- 1 | from tempfile import gettempdir 2 | from urllib import request 3 | from urllib.error import URLError 4 | from pathlib import Path 5 | from functools import wraps 6 | import json 7 | import os 8 | from time import time 9 | import socket 10 | from concurrent.futures import ThreadPoolExecutor 11 | import logging 12 | 13 | logging = logging.getLogger(__name__) 14 | 15 | URL_SCHEMA = [ 16 | 'http://', 17 | 'https://', 18 | ] 19 | socket.setdefaulttimeout(15) 20 | 21 | def cache(file_name:str, max_age=30, dir=gettempdir()): 22 | """ 23 | Cache decorator 24 | """ 25 | def decorator(func): 26 | @wraps(func) 27 | def wrapper(*args, **kwargs): 28 | cache_file = Path(dir, file_name) 29 | if not Path(cache_file).is_absolute(): 30 | cache_file = Path(gettempdir(), cache_file) 31 | if cache_file.exists() and file_age(cache_file) < max_age and cache_file.stat().st_size != 0: 32 | with open(cache_file, 'r', encoding='utf-8') as f: 33 | try: 34 | cache = json.load(f) 35 | except json.JSONDecodeError: 36 | logging.warning('Unable to read cache file: %s', cache_file) 37 | f.close() 38 | os.remove(cache_file) 39 | else: 40 | return cache 41 | data = func(*args, **kwargs) 42 | if data is None: 43 | return None 44 | if len(data) != 0: 45 | try: 46 | write_json(data, cache_file) 47 | except FileNotFoundError: 48 | logging.warning('Unable to write cache file: %s', cache_file) 49 | return data 50 | return wrapper 51 | return decorator 52 | 53 | def read_json(path:str): 54 | """ 55 | Read json file 56 | """ 57 | with open(path, 'r', encoding='utf-8') as f: 58 | data = json.load(f) 59 | 60 | def write_json(data, path): 61 | if not Path(path).parent.exists(): 62 | Path(path).parent.mkdir(parents=True) 63 | with open(path, 'w') as f: 64 | json.dump(data, f) 65 | 66 | def file_age(path): 67 | age = time() - path.stat().st_mtime 68 | return age 69 | 70 | def get_cache(path, max_age=0): 71 | if Path(path).exists() and file_age(path) < max_age and path.stat().st_size != 0: 72 | return read_json(path) 73 | return None 74 | 75 | def refresh_cache(file_name:str, dir:str=gettempdir()): 76 | """ 77 | Touch cache file 78 | """ 79 | cache_file = Path(dir, file_name) 80 | if cache_file.exists(): 81 | cache_file.touch() 82 | 83 | def cache_path(file_name:str, dir:str=gettempdir()): 84 | """ 85 | Return path to cache file 86 | """ 87 | return Path(dir, file_name) 88 | 89 | def remove_cache(file_name:str, dir:str=gettempdir()): 90 | """ 91 | Remove cache file 92 | """ 93 | cache_file = Path(dir, file_name) 94 | if cache_file.exists(): 95 | cache_file.unlink() 96 | 97 | def download_file(url:str, path, **kwargs): 98 | """ 99 | Download file from url and save it to dir 100 | 101 | Args: 102 | url (str): image url. 103 | dir (str): directory to save image. 104 | file_name (str): file name to save image. 105 | 106 | Keyword Args: 107 | force_download (bool): Force download image even if it exists. 108 | """ 109 | force_download = kwargs.pop('force_download', False) 110 | if not force_download and path.exists(): 111 | return 112 | try: 113 | request.urlretrieve(url, path) 114 | except URLError as e: 115 | logging.exception(f'Unable to download: {url}') 116 | return Path(path) 117 | 118 | def get_icon(url:str, path, file_name:str=None, **kwargs): 119 | for schema in URL_SCHEMA: 120 | if url.startswith(schema): 121 | break 122 | else: 123 | return url 124 | executor = kwargs.pop('executor', False) 125 | if file_name is None: 126 | file_name = url.split('/')[-1] 127 | if not Path(path).is_absolute(): 128 | path = Path(gettempdir(), path) 129 | if not path.exists(): 130 | path.mkdir(parents=True) 131 | full_path = Path(path, file_name) 132 | if not full_path.exists(): 133 | if executor is False: 134 | download_file(url, full_path) 135 | else: 136 | executor.submit(download_file, url, full_path) 137 | return full_path -------------------------------------------------------------------------------- /lib/certifi/core.py: -------------------------------------------------------------------------------- 1 | """ 2 | certifi.py 3 | ~~~~~~~~~~ 4 | 5 | This module returns the installation location of cacert.pem or its contents. 6 | """ 7 | import sys 8 | 9 | 10 | if sys.version_info >= (3, 11): 11 | 12 | from importlib.resources import as_file, files 13 | 14 | _CACERT_CTX = None 15 | _CACERT_PATH = None 16 | 17 | def where() -> str: 18 | # This is slightly terrible, but we want to delay extracting the file 19 | # in cases where we're inside of a zipimport situation until someone 20 | # actually calls where(), but we don't want to re-extract the file 21 | # on every call of where(), so we'll do it once then store it in a 22 | # global variable. 23 | global _CACERT_CTX 24 | global _CACERT_PATH 25 | if _CACERT_PATH is None: 26 | # This is slightly janky, the importlib.resources API wants you to 27 | # manage the cleanup of this file, so it doesn't actually return a 28 | # path, it returns a context manager that will give you the path 29 | # when you enter it and will do any cleanup when you leave it. In 30 | # the common case of not needing a temporary file, it will just 31 | # return the file system location and the __exit__() is a no-op. 32 | # 33 | # We also have to hold onto the actual context manager, because 34 | # it will do the cleanup whenever it gets garbage collected, so 35 | # we will also store that at the global level as well. 36 | _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) 37 | _CACERT_PATH = str(_CACERT_CTX.__enter__()) 38 | 39 | return _CACERT_PATH 40 | 41 | def contents() -> str: 42 | return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") 43 | 44 | elif sys.version_info >= (3, 7): 45 | 46 | from importlib.resources import path as get_path, read_text 47 | 48 | _CACERT_CTX = None 49 | _CACERT_PATH = None 50 | 51 | def where() -> str: 52 | # This is slightly terrible, but we want to delay extracting the 53 | # file in cases where we're inside of a zipimport situation until 54 | # someone actually calls where(), but we don't want to re-extract 55 | # the file on every call of where(), so we'll do it once then store 56 | # it in a global variable. 57 | global _CACERT_CTX 58 | global _CACERT_PATH 59 | if _CACERT_PATH is None: 60 | # This is slightly janky, the importlib.resources API wants you 61 | # to manage the cleanup of this file, so it doesn't actually 62 | # return a path, it returns a context manager that will give 63 | # you the path when you enter it and will do any cleanup when 64 | # you leave it. In the common case of not needing a temporary 65 | # file, it will just return the file system location and the 66 | # __exit__() is a no-op. 67 | # 68 | # We also have to hold onto the actual context manager, because 69 | # it will do the cleanup whenever it gets garbage collected, so 70 | # we will also store that at the global level as well. 71 | _CACERT_CTX = get_path("certifi", "cacert.pem") 72 | _CACERT_PATH = str(_CACERT_CTX.__enter__()) 73 | 74 | return _CACERT_PATH 75 | 76 | def contents() -> str: 77 | return read_text("certifi", "cacert.pem", encoding="ascii") 78 | 79 | else: 80 | import os 81 | import types 82 | from typing import Union 83 | 84 | Package = Union[types.ModuleType, str] 85 | Resource = Union[str, "os.PathLike"] 86 | 87 | # This fallback will work for Python versions prior to 3.7 that lack the 88 | # importlib.resources module but relies on the existing `where` function 89 | # so won't address issues with environments like PyOxidizer that don't set 90 | # __file__ on modules. 91 | def read_text( 92 | package: Package, 93 | resource: Resource, 94 | encoding: str = 'utf-8', 95 | errors: str = 'strict' 96 | ) -> str: 97 | with open(where(), encoding=encoding) as data: 98 | return data.read() 99 | 100 | # If we don't have importlib.resources, then we will just do the old logic 101 | # of assuming we're on the filesystem and munge the path directly. 102 | def where() -> str: 103 | f = os.path.dirname(__file__) 104 | 105 | return os.path.join(f, "cacert.pem") 106 | 107 | def contents() -> str: 108 | return read_text("certifi", "cacert.pem", encoding="ascii") 109 | -------------------------------------------------------------------------------- /lib/requests/status_codes.py: -------------------------------------------------------------------------------- 1 | r""" 2 | The ``codes`` object defines a mapping from common names for HTTP statuses 3 | to their numerical codes, accessible either as attributes or as dictionary 4 | items. 5 | 6 | Example:: 7 | 8 | >>> import requests 9 | >>> requests.codes['temporary_redirect'] 10 | 307 11 | >>> requests.codes.teapot 12 | 418 13 | >>> requests.codes['\o/'] 14 | 200 15 | 16 | Some codes have multiple names, and both upper- and lower-case versions of 17 | the names are allowed. For example, ``codes.ok``, ``codes.OK``, and 18 | ``codes.okay`` all correspond to the HTTP status code 200. 19 | """ 20 | 21 | from .structures import LookupDict 22 | 23 | _codes = { 24 | # Informational. 25 | 100: ("continue",), 26 | 101: ("switching_protocols",), 27 | 102: ("processing",), 28 | 103: ("checkpoint",), 29 | 122: ("uri_too_long", "request_uri_too_long"), 30 | 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"), 31 | 201: ("created",), 32 | 202: ("accepted",), 33 | 203: ("non_authoritative_info", "non_authoritative_information"), 34 | 204: ("no_content",), 35 | 205: ("reset_content", "reset"), 36 | 206: ("partial_content", "partial"), 37 | 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"), 38 | 208: ("already_reported",), 39 | 226: ("im_used",), 40 | # Redirection. 41 | 300: ("multiple_choices",), 42 | 301: ("moved_permanently", "moved", "\\o-"), 43 | 302: ("found",), 44 | 303: ("see_other", "other"), 45 | 304: ("not_modified",), 46 | 305: ("use_proxy",), 47 | 306: ("switch_proxy",), 48 | 307: ("temporary_redirect", "temporary_moved", "temporary"), 49 | 308: ( 50 | "permanent_redirect", 51 | "resume_incomplete", 52 | "resume", 53 | ), # "resume" and "resume_incomplete" to be removed in 3.0 54 | # Client Error. 55 | 400: ("bad_request", "bad"), 56 | 401: ("unauthorized",), 57 | 402: ("payment_required", "payment"), 58 | 403: ("forbidden",), 59 | 404: ("not_found", "-o-"), 60 | 405: ("method_not_allowed", "not_allowed"), 61 | 406: ("not_acceptable",), 62 | 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"), 63 | 408: ("request_timeout", "timeout"), 64 | 409: ("conflict",), 65 | 410: ("gone",), 66 | 411: ("length_required",), 67 | 412: ("precondition_failed", "precondition"), 68 | 413: ("request_entity_too_large",), 69 | 414: ("request_uri_too_large",), 70 | 415: ("unsupported_media_type", "unsupported_media", "media_type"), 71 | 416: ( 72 | "requested_range_not_satisfiable", 73 | "requested_range", 74 | "range_not_satisfiable", 75 | ), 76 | 417: ("expectation_failed",), 77 | 418: ("im_a_teapot", "teapot", "i_am_a_teapot"), 78 | 421: ("misdirected_request",), 79 | 422: ("unprocessable_entity", "unprocessable"), 80 | 423: ("locked",), 81 | 424: ("failed_dependency", "dependency"), 82 | 425: ("unordered_collection", "unordered"), 83 | 426: ("upgrade_required", "upgrade"), 84 | 428: ("precondition_required", "precondition"), 85 | 429: ("too_many_requests", "too_many"), 86 | 431: ("header_fields_too_large", "fields_too_large"), 87 | 444: ("no_response", "none"), 88 | 449: ("retry_with", "retry"), 89 | 450: ("blocked_by_windows_parental_controls", "parental_controls"), 90 | 451: ("unavailable_for_legal_reasons", "legal_reasons"), 91 | 499: ("client_closed_request",), 92 | # Server Error. 93 | 500: ("internal_server_error", "server_error", "/o\\", "✗"), 94 | 501: ("not_implemented",), 95 | 502: ("bad_gateway",), 96 | 503: ("service_unavailable", "unavailable"), 97 | 504: ("gateway_timeout",), 98 | 505: ("http_version_not_supported", "http_version"), 99 | 506: ("variant_also_negotiates",), 100 | 507: ("insufficient_storage",), 101 | 509: ("bandwidth_limit_exceeded", "bandwidth"), 102 | 510: ("not_extended",), 103 | 511: ("network_authentication_required", "network_auth", "network_authentication"), 104 | } 105 | 106 | codes = LookupDict(name="status_codes") 107 | 108 | 109 | def _init(): 110 | for code, titles in _codes.items(): 111 | for title in titles: 112 | setattr(codes, title, code) 113 | if not title.startswith(("\\", "/")): 114 | setattr(codes, title.upper(), code) 115 | 116 | def doc(code): 117 | names = ", ".join(f"``{n}``" for n in _codes[code]) 118 | return "* %d: %s" % (code, names) 119 | 120 | global __doc__ 121 | __doc__ = ( 122 | __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes)) 123 | if __doc__ is not None 124 | else None 125 | ) 126 | 127 | 128 | _init() 129 | -------------------------------------------------------------------------------- /lib/urllib3/util/request.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from base64 import b64encode 4 | 5 | from ..exceptions import UnrewindableBodyError 6 | from ..packages.six import b, integer_types 7 | 8 | # Pass as a value within ``headers`` to skip 9 | # emitting some HTTP headers that are added automatically. 10 | # The only headers that are supported are ``Accept-Encoding``, 11 | # ``Host``, and ``User-Agent``. 12 | SKIP_HEADER = "@@@SKIP_HEADER@@@" 13 | SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"]) 14 | 15 | ACCEPT_ENCODING = "gzip,deflate" 16 | try: 17 | try: 18 | import brotlicffi as _unused_module_brotli # noqa: F401 19 | except ImportError: 20 | import brotli as _unused_module_brotli # noqa: F401 21 | except ImportError: 22 | pass 23 | else: 24 | ACCEPT_ENCODING += ",br" 25 | 26 | _FAILEDTELL = object() 27 | 28 | 29 | def make_headers( 30 | keep_alive=None, 31 | accept_encoding=None, 32 | user_agent=None, 33 | basic_auth=None, 34 | proxy_basic_auth=None, 35 | disable_cache=None, 36 | ): 37 | """ 38 | Shortcuts for generating request headers. 39 | 40 | :param keep_alive: 41 | If ``True``, adds 'connection: keep-alive' header. 42 | 43 | :param accept_encoding: 44 | Can be a boolean, list, or string. 45 | ``True`` translates to 'gzip,deflate'. 46 | List will get joined by comma. 47 | String will be used as provided. 48 | 49 | :param user_agent: 50 | String representing the user-agent you want, such as 51 | "python-urllib3/0.6" 52 | 53 | :param basic_auth: 54 | Colon-separated username:password string for 'authorization: basic ...' 55 | auth header. 56 | 57 | :param proxy_basic_auth: 58 | Colon-separated username:password string for 'proxy-authorization: basic ...' 59 | auth header. 60 | 61 | :param disable_cache: 62 | If ``True``, adds 'cache-control: no-cache' header. 63 | 64 | Example:: 65 | 66 | >>> make_headers(keep_alive=True, user_agent="Batman/1.0") 67 | {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} 68 | >>> make_headers(accept_encoding=True) 69 | {'accept-encoding': 'gzip,deflate'} 70 | """ 71 | headers = {} 72 | if accept_encoding: 73 | if isinstance(accept_encoding, str): 74 | pass 75 | elif isinstance(accept_encoding, list): 76 | accept_encoding = ",".join(accept_encoding) 77 | else: 78 | accept_encoding = ACCEPT_ENCODING 79 | headers["accept-encoding"] = accept_encoding 80 | 81 | if user_agent: 82 | headers["user-agent"] = user_agent 83 | 84 | if keep_alive: 85 | headers["connection"] = "keep-alive" 86 | 87 | if basic_auth: 88 | headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8") 89 | 90 | if proxy_basic_auth: 91 | headers["proxy-authorization"] = "Basic " + b64encode( 92 | b(proxy_basic_auth) 93 | ).decode("utf-8") 94 | 95 | if disable_cache: 96 | headers["cache-control"] = "no-cache" 97 | 98 | return headers 99 | 100 | 101 | def set_file_position(body, pos): 102 | """ 103 | If a position is provided, move file to that point. 104 | Otherwise, we'll attempt to record a position for future use. 105 | """ 106 | if pos is not None: 107 | rewind_body(body, pos) 108 | elif getattr(body, "tell", None) is not None: 109 | try: 110 | pos = body.tell() 111 | except (IOError, OSError): 112 | # This differentiates from None, allowing us to catch 113 | # a failed `tell()` later when trying to rewind the body. 114 | pos = _FAILEDTELL 115 | 116 | return pos 117 | 118 | 119 | def rewind_body(body, body_pos): 120 | """ 121 | Attempt to rewind body to a certain position. 122 | Primarily used for request redirects and retries. 123 | 124 | :param body: 125 | File-like object that supports seek. 126 | 127 | :param int pos: 128 | Position to seek to in file. 129 | """ 130 | body_seek = getattr(body, "seek", None) 131 | if body_seek is not None and isinstance(body_pos, integer_types): 132 | try: 133 | body_seek(body_pos) 134 | except (IOError, OSError): 135 | raise UnrewindableBodyError( 136 | "An error occurred when rewinding request body for redirect/retry." 137 | ) 138 | elif body_pos is _FAILEDTELL: 139 | raise UnrewindableBodyError( 140 | "Unable to record file position for rewinding " 141 | "request body during a redirect/retry." 142 | ) 143 | else: 144 | raise ValueError( 145 | "body_pos must be of type integer, instead it was %s." % type(body_pos) 146 | ) 147 | -------------------------------------------------------------------------------- /lib/flox/launcher.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import json 3 | import sys 4 | from time import time 5 | 6 | """ 7 | Slightly modified wox.py credit: https://github.com/Wox-launcher/Wox 8 | """ 9 | 10 | class Launcher(object): 11 | """ 12 | Launcher python plugin base 13 | """ 14 | 15 | def __del__(self, debug=None): 16 | self.run(debug) 17 | 18 | def run(self, debug=None): 19 | if debug: 20 | self._debug = debug 21 | self.rpc_request = {'method': 'query', 'parameters': ['']} 22 | if len(sys.argv) > 1: 23 | self.rpc_request = json.loads(sys.argv[1]) 24 | if 'settings' in self.rpc_request.keys(): 25 | self._settings = self.rpc_request['settings'] 26 | self.logger.debug('Loaded settings from RPC request') 27 | if not self._debug: 28 | self._debug = self.settings.get('debug', False) 29 | if self._debug: 30 | self.logger_level("debug") 31 | self.logger.debug(f'Request:\n{json.dumps(self.rpc_request, indent=4)}') 32 | self.logger.debug(f"Params: {self.rpc_request.get('parameters')}") 33 | # proxy is not working now 34 | # self.proxy = rpc_request.get("proxy",{}) 35 | request_method_name = self.rpc_request.get("method") 36 | #transform query and context calls to internal flox methods 37 | if request_method_name == 'query' or request_method_name == 'context_menu': 38 | request_method_name = f"_{request_method_name}" 39 | 40 | request_parameters = self.rpc_request.get("parameters") 41 | 42 | request_method = getattr(self, request_method_name) 43 | try: 44 | results = request_method(*request_parameters) or self._results 45 | except Exception as e: 46 | self.logger.exception(e) 47 | results = self.exception(e) or self._results 48 | line_break = '#' * 10 49 | ms = int((time() - self._start) * 1000) 50 | self.logger.debug(f'{line_break} Total time: {ms}ms {line_break}') 51 | if request_method_name == "_query" or request_method_name == "_context_menu": 52 | results = {"result": results} 53 | if self._settings != self.rpc_request.get('Settings') and self._settings is not None: 54 | results['SettingsChange'] = self.settings 55 | 56 | print(json.dumps(results)) 57 | 58 | def query(self,query): 59 | """ 60 | sub class need to override this method 61 | """ 62 | return [] 63 | 64 | def context_menu(self, data): 65 | """ 66 | optional context menu entries for a result 67 | """ 68 | return [] 69 | 70 | def exception(self, exception): 71 | """ 72 | exception handler 73 | """ 74 | return [] 75 | 76 | def debug(self,msg): 77 | """ 78 | alert msg 79 | """ 80 | print("DEBUG:{}".format(msg)) 81 | sys.exit() 82 | 83 | def change_query(self, query, requery=False): 84 | """ 85 | change query 86 | """ 87 | print(json.dumps({"method": f"{self.api}.ChangeQuery","parameters":[query,requery]})) 88 | 89 | def shell_run(self, cmd): 90 | """ 91 | run shell commands 92 | """ 93 | print(json.dumps({"method": f"{self.api}.ShellRun","parameters":[cmd]})) 94 | 95 | def close_app(self): 96 | """ 97 | close launcher 98 | """ 99 | print(json.dumps({"method": f"{self.api}.CloseApp","parameters":[]})) 100 | 101 | def hide_app(self): 102 | """ 103 | hide launcher 104 | """ 105 | print(json.dumps({"method": f"{self.api}.HideApp","parameters":[]})) 106 | 107 | def show_app(self): 108 | """ 109 | show launcher 110 | """ 111 | print(json.dumps({"method": f"{self.api}.ShowApp","parameters":[]})) 112 | 113 | def show_msg(self, title, sub_title, ico_path=""): 114 | """ 115 | show messagebox 116 | """ 117 | print(json.dumps({"method": f"{self.api}.ShowMsg","parameters":[title,sub_title,ico_path]})) 118 | 119 | def open_setting_dialog(self): 120 | """ 121 | open setting dialog 122 | """ 123 | self.logger.debug(json.dumps({"method": f"{self.api}.OpenSettingDialog","parameters":[]})) 124 | print(json.dumps({"method": f"{self.api}.OpenSettingDialog","parameters":[]})) 125 | 126 | def start_loadingbar(self): 127 | """ 128 | start loading animation in wox 129 | """ 130 | print(json.dumps({"method": f"{self.api}.StartLoadingBar","parameters":[]})) 131 | 132 | def stop_loadingbar(self): 133 | """ 134 | stop loading animation in wox 135 | """ 136 | print(json.dumps({"method": f"{self.api}.StopLoadingBar","parameters":[]})) 137 | 138 | def reload_plugins(self): 139 | """ 140 | reload all launcher plugins 141 | """ 142 | print(json.dumps({"method": f"{self.api}.ReloadPlugins","parameters":[]})) 143 | -------------------------------------------------------------------------------- /lib/urllib3/contrib/ntlmpool.py: -------------------------------------------------------------------------------- 1 | """ 2 | NTLM authenticating pool, contributed by erikcederstran 3 | 4 | Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 5 | """ 6 | from __future__ import absolute_import 7 | 8 | import warnings 9 | from logging import getLogger 10 | 11 | from ntlm import ntlm 12 | 13 | from .. import HTTPSConnectionPool 14 | from ..packages.six.moves.http_client import HTTPSConnection 15 | 16 | warnings.warn( 17 | "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed " 18 | "in urllib3 v2.0 release, urllib3 is not able to support it properly due " 19 | "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. " 20 | "If you are a user of this module please comment in the mentioned issue.", 21 | DeprecationWarning, 22 | ) 23 | 24 | log = getLogger(__name__) 25 | 26 | 27 | class NTLMConnectionPool(HTTPSConnectionPool): 28 | """ 29 | Implements an NTLM authentication version of an urllib3 connection pool 30 | """ 31 | 32 | scheme = "https" 33 | 34 | def __init__(self, user, pw, authurl, *args, **kwargs): 35 | """ 36 | authurl is a random URL on the server that is protected by NTLM. 37 | user is the Windows user, probably in the DOMAIN\\username format. 38 | pw is the password for the user. 39 | """ 40 | super(NTLMConnectionPool, self).__init__(*args, **kwargs) 41 | self.authurl = authurl 42 | self.rawuser = user 43 | user_parts = user.split("\\", 1) 44 | self.domain = user_parts[0].upper() 45 | self.user = user_parts[1] 46 | self.pw = pw 47 | 48 | def _new_conn(self): 49 | # Performs the NTLM handshake that secures the connection. The socket 50 | # must be kept open while requests are performed. 51 | self.num_connections += 1 52 | log.debug( 53 | "Starting NTLM HTTPS connection no. %d: https://%s%s", 54 | self.num_connections, 55 | self.host, 56 | self.authurl, 57 | ) 58 | 59 | headers = {"Connection": "Keep-Alive"} 60 | req_header = "Authorization" 61 | resp_header = "www-authenticate" 62 | 63 | conn = HTTPSConnection(host=self.host, port=self.port) 64 | 65 | # Send negotiation message 66 | headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE( 67 | self.rawuser 68 | ) 69 | log.debug("Request headers: %s", headers) 70 | conn.request("GET", self.authurl, None, headers) 71 | res = conn.getresponse() 72 | reshdr = dict(res.headers) 73 | log.debug("Response status: %s %s", res.status, res.reason) 74 | log.debug("Response headers: %s", reshdr) 75 | log.debug("Response data: %s [...]", res.read(100)) 76 | 77 | # Remove the reference to the socket, so that it can not be closed by 78 | # the response object (we want to keep the socket open) 79 | res.fp = None 80 | 81 | # Server should respond with a challenge message 82 | auth_header_values = reshdr[resp_header].split(", ") 83 | auth_header_value = None 84 | for s in auth_header_values: 85 | if s[:5] == "NTLM ": 86 | auth_header_value = s[5:] 87 | if auth_header_value is None: 88 | raise Exception( 89 | "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header]) 90 | ) 91 | 92 | # Send authentication message 93 | ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE( 94 | auth_header_value 95 | ) 96 | auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE( 97 | ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags 98 | ) 99 | headers[req_header] = "NTLM %s" % auth_msg 100 | log.debug("Request headers: %s", headers) 101 | conn.request("GET", self.authurl, None, headers) 102 | res = conn.getresponse() 103 | log.debug("Response status: %s %s", res.status, res.reason) 104 | log.debug("Response headers: %s", dict(res.headers)) 105 | log.debug("Response data: %s [...]", res.read()[:100]) 106 | if res.status != 200: 107 | if res.status == 401: 108 | raise Exception("Server rejected request: wrong username or password") 109 | raise Exception("Wrong server response: %s %s" % (res.status, res.reason)) 110 | 111 | res.fp = None 112 | log.debug("Connection established") 113 | return conn 114 | 115 | def urlopen( 116 | self, 117 | method, 118 | url, 119 | body=None, 120 | headers=None, 121 | retries=3, 122 | redirect=True, 123 | assert_same_host=True, 124 | ): 125 | if headers is None: 126 | headers = {} 127 | headers["Connection"] = "Keep-Alive" 128 | return super(NTLMConnectionPool, self).urlopen( 129 | method, url, body, headers, retries, redirect, assert_same_host 130 | ) 131 | -------------------------------------------------------------------------------- /lib/requests-2.28.1.dist-info/METADATA: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: requests 3 | Version: 2.28.1 4 | Summary: Python HTTP for Humans. 5 | Home-page: https://requests.readthedocs.io 6 | Author: Kenneth Reitz 7 | Author-email: me@kennethreitz.org 8 | License: Apache 2.0 9 | Project-URL: Documentation, https://requests.readthedocs.io 10 | Project-URL: Source, https://github.com/psf/requests 11 | Platform: UNKNOWN 12 | Classifier: Development Status :: 5 - Production/Stable 13 | Classifier: Environment :: Web Environment 14 | Classifier: Intended Audience :: Developers 15 | Classifier: License :: OSI Approved :: Apache Software License 16 | Classifier: Natural Language :: English 17 | Classifier: Operating System :: OS Independent 18 | Classifier: Programming Language :: Python 19 | Classifier: Programming Language :: Python :: 3 20 | Classifier: Programming Language :: Python :: 3.7 21 | Classifier: Programming Language :: Python :: 3.8 22 | Classifier: Programming Language :: Python :: 3.9 23 | Classifier: Programming Language :: Python :: 3.10 24 | Classifier: Programming Language :: Python :: 3.11 25 | Classifier: Programming Language :: Python :: 3 :: Only 26 | Classifier: Programming Language :: Python :: Implementation :: CPython 27 | Classifier: Programming Language :: Python :: Implementation :: PyPy 28 | Classifier: Topic :: Internet :: WWW/HTTP 29 | Classifier: Topic :: Software Development :: Libraries 30 | Requires-Python: >=3.7, <4 31 | Description-Content-Type: text/markdown 32 | License-File: LICENSE 33 | Requires-Dist: charset-normalizer (<3,>=2) 34 | Requires-Dist: idna (<4,>=2.5) 35 | Requires-Dist: urllib3 (<1.27,>=1.21.1) 36 | Requires-Dist: certifi (>=2017.4.17) 37 | Provides-Extra: security 38 | Provides-Extra: socks 39 | Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks' 40 | Provides-Extra: use_chardet_on_py3 41 | Requires-Dist: chardet (<6,>=3.0.2) ; extra == 'use_chardet_on_py3' 42 | 43 | # Requests 44 | 45 | **Requests** is a simple, yet elegant, HTTP library. 46 | 47 | ```python 48 | >>> import requests 49 | >>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass')) 50 | >>> r.status_code 51 | 200 52 | >>> r.headers['content-type'] 53 | 'application/json; charset=utf8' 54 | >>> r.encoding 55 | 'utf-8' 56 | >>> r.text 57 | '{"authenticated": true, ...' 58 | >>> r.json() 59 | {'authenticated': True, ...} 60 | ``` 61 | 62 | Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method! 63 | 64 | Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code. 65 | 66 | [![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests) 67 | [![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests) 68 | [![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors) 69 | 70 | ## Installing Requests and Supported Versions 71 | 72 | Requests is available on PyPI: 73 | 74 | ```console 75 | $ python -m pip install requests 76 | ``` 77 | 78 | Requests officially supports Python 3.7+. 79 | 80 | ## Supported Features & Best–Practices 81 | 82 | Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today. 83 | 84 | - Keep-Alive & Connection Pooling 85 | - International Domains and URLs 86 | - Sessions with Cookie Persistence 87 | - Browser-style TLS/SSL Verification 88 | - Basic & Digest Authentication 89 | - Familiar `dict`–like Cookies 90 | - Automatic Content Decompression and Decoding 91 | - Multi-part File Uploads 92 | - SOCKS Proxy Support 93 | - Connection Timeouts 94 | - Streaming Downloads 95 | - Automatic honoring of `.netrc` 96 | - Chunked HTTP Requests 97 | 98 | ## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io) 99 | 100 | [![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io) 101 | 102 | ## Cloning the repository 103 | 104 | When cloning the Requests repository, you may need to add the `-c 105 | fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see 106 | [this issue](https://github.com/psf/requests/issues/2690) for more background): 107 | 108 | ```shell 109 | git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git 110 | ``` 111 | 112 | You can also apply this setting to your global Git config: 113 | 114 | ```shell 115 | git config --global fetch.fsck.badTimezone ignore 116 | ``` 117 | 118 | --- 119 | 120 | [![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf) 121 | 122 | 123 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Flow Launcher ChatGPT Plugin 2 | This plugin allows you to use OpenAI's ChatGPT models (gpt-3.5-turbo and gpt-4) with [Flow Launcher](https://www.flowlauncher.com/). 3 | 4 | ![Demo video of the Flow Launcher ChatGPT Plugin](https://i.imgur.com/WQwNY7y.gif) 5 | 6 | ## Features 7 | - 💡 Set which chat model you want to use (gpt-3.5-turbo or gpt-4) 8 | - 📝 Use keywords to select if you want a short, long or standard answer 9 | - 💬 Add custom keywords and system prompts to change the style and content of the output 10 | - 🗃️ Copy the answer or open it in a new text file 11 | - ✋ Activate the query via a custom "stop keyword" 12 | 13 | ## Prerequisites 14 | 1. An account at OpenAI. 15 | 2. A payment method configured in your OpenAI profile [here](https://platform.openai.com/account/billing/payment-methods). 16 | 3. An API key for OpenAI that can be retrieved [here](https://platform.openai.com/account/api-keys). 17 | 18 | ## Installation 19 | 1. Download and install [Flow Launcher](https://www.flowlauncher.com/). 20 | 2. Launch Flow Launcher, then enter `Settings` to open its settings. 21 | 3. Go to the `Plugin Store` module. 22 | 4. Search for `ChatGPT`. 23 | 5. Click `ChatGPT` and then `Install`. 24 | 6. `Flow Launcher` should automatically restart. If not, manually restart `Flow Launcher`. 25 | 7. Go to the `Plugins` module in Flow Launcher. 26 | 8. The ChatGPT plugin should be visible. Select it. 27 | 9. Paste your OpenAI API key in the API Key field. 28 | 10. Adjust the setting (see below) to your own liking. 29 | 11. Run the 'Save Settings' command in Flow Launcher. 30 | 31 | ## Usage 32 | ### Basic 33 | 1. Activate by using the `ai` keyword. 34 | 2. Type any prompt and add the stop keyword at the end (default: `||`). 35 | 3. Wait until the list is updated. 36 | 4. Copy the content or open it in a new text file. 37 | 38 | ### Using system prompts 39 | System prompts are the messages that are being sent to ChatGPT to set the behavior of the responses. System prompts can be activated by adding a Keyword at the start of the sentences. When no Keyword is found, the default system prompt will be used (see below). 40 | 41 | By default the plugin contains the following system prompts: 42 | |Keyword | System Prompt | 43 | |---------|---------------| 44 | |normal|You are an all-knowing AI bot.| 45 | |short|You are an all-knowing AI bot. All your answers are short, to the point, and don't give any additional context.| 46 | |long|You are an all-knowing AI bot. All your answers are in-depth and give both a step-by-step explanation how you came to that answer, as well as references to the resources you used.| 47 | 48 | The different outputs for the prompt "Test" are as follows: 49 | |Keyword| Output| 50 | |--------|-------| 51 | |normal|I'm here to help. How may I assist you with your test?| 52 | |short|Passed.| 53 | |long|As an all-knowing AI bot, when I receive the input "test," I determine whether you are asking me to provide a test-related response or evaluate my knowledge and capabilities. Here, your query is brief and ambiguous, but I will provide an example of how I process and respond to questions. Step 1: Analyze the input: I examine the key term "test" and search for relevant context or additional details to understand your intention. Step 2: Generate a response: Based on your input, I try to cover the evident possibilities in my reply, thus accommodating your potential purpose. This includes testing my understanding, problem-solving capabilities, or demonstrating my knowledge on a specific topic. Step 3: Retrieve relevant resources: Since you did not provide specific details with your query, I cannot include references or resources that support my response. However, a more contextually-rich question will allow me to conduct comprehensive research and cite appropriate resources If you can provide additional information or clarify the context for your query, I will be able to generate an in-depth response based on your request. 54 | 55 | ## Adding your own system prompts 56 | You can add your own prompts in the following way: 57 | 1. Open Flow Launcher. 58 | 2. Type `Settings`. 59 | 3. Go to Plugins -> ChatGPT. 60 | 4. Click the small folder icon. 61 | 5. In the folder that opens, open `system_messages.csv`. 62 | 6. In the first column, add a new Keyword (without spaces). 63 | 7. In the second column, add the System Prompt that you would like to trigger with that Keyword. 64 | 8. Save the file. 65 | 66 | Check out [this Github page](https://github.com/f/awesome-chatgpt-prompts) for some awesome prompts. 67 | 68 | ## Settings 69 | |Setting|Description|Default value| 70 | |-------|-----------|-------------| 71 | |Action keyword|keyword to type to enable this plugin|_ai_| 72 | |API Key|API Key to use with OpenAI's API's. Can be found [here](https://platform.openai.com/account/api-keys).|_none_| 73 | |Model|The ChatGPT model version that will be used to call the API. Note: you need access to the model to be able to use it.|_gpt-3.5-turbo_| 74 | |Prompt stop|Characters at the end of the sentence that will trigger the search| || | 75 | |Default system prompt|The default keyword that will be used to lookup a System Prompt when no specific prompt has been given.| _normal_ | 76 | |Custom URL|Custom OpenAI Format API endpoint|_https://api.openai.com/v1/chat/completions_| 77 | 78 | # Backlog 79 | * Ability to take into account the context of the previous prompts. 80 | -------------------------------------------------------------------------------- /lib/urllib3/util/connection.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import socket 4 | 5 | from ..contrib import _appengine_environ 6 | from ..exceptions import LocationParseError 7 | from ..packages import six 8 | from .wait import NoWayToWaitForSocketError, wait_for_read 9 | 10 | 11 | def is_connection_dropped(conn): # Platform-specific 12 | """ 13 | Returns True if the connection is dropped and should be closed. 14 | 15 | :param conn: 16 | :class:`http.client.HTTPConnection` object. 17 | 18 | Note: For platforms like AppEngine, this will always return ``False`` to 19 | let the platform handle connection recycling transparently for us. 20 | """ 21 | sock = getattr(conn, "sock", False) 22 | if sock is False: # Platform-specific: AppEngine 23 | return False 24 | if sock is None: # Connection already closed (such as by httplib). 25 | return True 26 | try: 27 | # Returns True if readable, which here means it's been dropped 28 | return wait_for_read(sock, timeout=0.0) 29 | except NoWayToWaitForSocketError: # Platform-specific: AppEngine 30 | return False 31 | 32 | 33 | # This function is copied from socket.py in the Python 2.7 standard 34 | # library test suite. Added to its signature is only `socket_options`. 35 | # One additional modification is that we avoid binding to IPv6 servers 36 | # discovered in DNS if the system doesn't have IPv6 functionality. 37 | def create_connection( 38 | address, 39 | timeout=socket._GLOBAL_DEFAULT_TIMEOUT, 40 | source_address=None, 41 | socket_options=None, 42 | ): 43 | """Connect to *address* and return the socket object. 44 | 45 | Convenience function. Connect to *address* (a 2-tuple ``(host, 46 | port)``) and return the socket object. Passing the optional 47 | *timeout* parameter will set the timeout on the socket instance 48 | before attempting to connect. If no *timeout* is supplied, the 49 | global default timeout setting returned by :func:`socket.getdefaulttimeout` 50 | is used. If *source_address* is set it must be a tuple of (host, port) 51 | for the socket to bind as a source address before making the connection. 52 | An host of '' or port 0 tells the OS to use the default. 53 | """ 54 | 55 | host, port = address 56 | if host.startswith("["): 57 | host = host.strip("[]") 58 | err = None 59 | 60 | # Using the value from allowed_gai_family() in the context of getaddrinfo lets 61 | # us select whether to work with IPv4 DNS records, IPv6 records, or both. 62 | # The original create_connection function always returns all records. 63 | family = allowed_gai_family() 64 | 65 | try: 66 | host.encode("idna") 67 | except UnicodeError: 68 | return six.raise_from( 69 | LocationParseError(u"'%s', label empty or too long" % host), None 70 | ) 71 | 72 | for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): 73 | af, socktype, proto, canonname, sa = res 74 | sock = None 75 | try: 76 | sock = socket.socket(af, socktype, proto) 77 | 78 | # If provided, set socket level options before connecting. 79 | _set_socket_options(sock, socket_options) 80 | 81 | if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: 82 | sock.settimeout(timeout) 83 | if source_address: 84 | sock.bind(source_address) 85 | sock.connect(sa) 86 | return sock 87 | 88 | except socket.error as e: 89 | err = e 90 | if sock is not None: 91 | sock.close() 92 | sock = None 93 | 94 | if err is not None: 95 | raise err 96 | 97 | raise socket.error("getaddrinfo returns an empty list") 98 | 99 | 100 | def _set_socket_options(sock, options): 101 | if options is None: 102 | return 103 | 104 | for opt in options: 105 | sock.setsockopt(*opt) 106 | 107 | 108 | def allowed_gai_family(): 109 | """This function is designed to work in the context of 110 | getaddrinfo, where family=socket.AF_UNSPEC is the default and 111 | will perform a DNS search for both IPv6 and IPv4 records.""" 112 | 113 | family = socket.AF_INET 114 | if HAS_IPV6: 115 | family = socket.AF_UNSPEC 116 | return family 117 | 118 | 119 | def _has_ipv6(host): 120 | """Returns True if the system can bind an IPv6 address.""" 121 | sock = None 122 | has_ipv6 = False 123 | 124 | # App Engine doesn't support IPV6 sockets and actually has a quota on the 125 | # number of sockets that can be used, so just early out here instead of 126 | # creating a socket needlessly. 127 | # See https://github.com/urllib3/urllib3/issues/1446 128 | if _appengine_environ.is_appengine_sandbox(): 129 | return False 130 | 131 | if socket.has_ipv6: 132 | # has_ipv6 returns true if cPython was compiled with IPv6 support. 133 | # It does not tell us if the system has IPv6 support enabled. To 134 | # determine that we must bind to an IPv6 address. 135 | # https://github.com/urllib3/urllib3/pull/611 136 | # https://bugs.python.org/issue658327 137 | try: 138 | sock = socket.socket(socket.AF_INET6) 139 | sock.bind((host, 0)) 140 | has_ipv6 = True 141 | except Exception: 142 | pass 143 | 144 | if sock: 145 | sock.close() 146 | return has_ipv6 147 | 148 | 149 | HAS_IPV6 = _has_ipv6("::1") 150 | -------------------------------------------------------------------------------- /lib/requests/__init__.py: -------------------------------------------------------------------------------- 1 | # __ 2 | # /__) _ _ _ _ _/ _ 3 | # / ( (- (/ (/ (- _) / _) 4 | # / 5 | 6 | """ 7 | Requests HTTP Library 8 | ~~~~~~~~~~~~~~~~~~~~~ 9 | 10 | Requests is an HTTP library, written in Python, for human beings. 11 | Basic GET usage: 12 | 13 | >>> import requests 14 | >>> r = requests.get('https://www.python.org') 15 | >>> r.status_code 16 | 200 17 | >>> b'Python is a programming language' in r.content 18 | True 19 | 20 | ... or POST: 21 | 22 | >>> payload = dict(key1='value1', key2='value2') 23 | >>> r = requests.post('https://httpbin.org/post', data=payload) 24 | >>> print(r.text) 25 | { 26 | ... 27 | "form": { 28 | "key1": "value1", 29 | "key2": "value2" 30 | }, 31 | ... 32 | } 33 | 34 | The other HTTP methods are supported - see `requests.api`. Full documentation 35 | is at . 36 | 37 | :copyright: (c) 2017 by Kenneth Reitz. 38 | :license: Apache 2.0, see LICENSE for more details. 39 | """ 40 | 41 | import warnings 42 | 43 | import urllib3 44 | 45 | from .exceptions import RequestsDependencyWarning 46 | 47 | try: 48 | from charset_normalizer import __version__ as charset_normalizer_version 49 | except ImportError: 50 | charset_normalizer_version = None 51 | 52 | try: 53 | from chardet import __version__ as chardet_version 54 | except ImportError: 55 | chardet_version = None 56 | 57 | 58 | def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): 59 | urllib3_version = urllib3_version.split(".") 60 | assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git. 61 | 62 | # Sometimes, urllib3 only reports its version as 16.1. 63 | if len(urllib3_version) == 2: 64 | urllib3_version.append("0") 65 | 66 | # Check urllib3 for compatibility. 67 | major, minor, patch = urllib3_version # noqa: F811 68 | major, minor, patch = int(major), int(minor), int(patch) 69 | # urllib3 >= 1.21.1, <= 1.26 70 | assert major == 1 71 | assert minor >= 21 72 | assert minor <= 26 73 | 74 | # Check charset_normalizer for compatibility. 75 | if chardet_version: 76 | major, minor, patch = chardet_version.split(".")[:3] 77 | major, minor, patch = int(major), int(minor), int(patch) 78 | # chardet_version >= 3.0.2, < 6.0.0 79 | assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0) 80 | elif charset_normalizer_version: 81 | major, minor, patch = charset_normalizer_version.split(".")[:3] 82 | major, minor, patch = int(major), int(minor), int(patch) 83 | # charset_normalizer >= 2.0.0 < 3.0.0 84 | assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0) 85 | else: 86 | raise Exception("You need either charset_normalizer or chardet installed") 87 | 88 | 89 | def _check_cryptography(cryptography_version): 90 | # cryptography < 1.3.4 91 | try: 92 | cryptography_version = list(map(int, cryptography_version.split("."))) 93 | except ValueError: 94 | return 95 | 96 | if cryptography_version < [1, 3, 4]: 97 | warning = "Old version of cryptography ({}) may cause slowdown.".format( 98 | cryptography_version 99 | ) 100 | warnings.warn(warning, RequestsDependencyWarning) 101 | 102 | 103 | # Check imported dependencies for compatibility. 104 | try: 105 | check_compatibility( 106 | urllib3.__version__, chardet_version, charset_normalizer_version 107 | ) 108 | except (AssertionError, ValueError): 109 | warnings.warn( 110 | "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " 111 | "version!".format( 112 | urllib3.__version__, chardet_version, charset_normalizer_version 113 | ), 114 | RequestsDependencyWarning, 115 | ) 116 | 117 | # Attempt to enable urllib3's fallback for SNI support 118 | # if the standard library doesn't support SNI or the 119 | # 'ssl' library isn't available. 120 | try: 121 | try: 122 | import ssl 123 | except ImportError: 124 | ssl = None 125 | 126 | if not getattr(ssl, "HAS_SNI", False): 127 | from urllib3.contrib import pyopenssl 128 | 129 | pyopenssl.inject_into_urllib3() 130 | 131 | # Check cryptography version 132 | from cryptography import __version__ as cryptography_version 133 | 134 | _check_cryptography(cryptography_version) 135 | except ImportError: 136 | pass 137 | 138 | # urllib3's DependencyWarnings should be silenced. 139 | from urllib3.exceptions import DependencyWarning 140 | 141 | warnings.simplefilter("ignore", DependencyWarning) 142 | 143 | # Set default logging handler to avoid "No handler found" warnings. 144 | import logging 145 | from logging import NullHandler 146 | 147 | from . import packages, utils 148 | from .__version__ import ( 149 | __author__, 150 | __author_email__, 151 | __build__, 152 | __cake__, 153 | __copyright__, 154 | __description__, 155 | __license__, 156 | __title__, 157 | __url__, 158 | __version__, 159 | ) 160 | from .api import delete, get, head, options, patch, post, put, request 161 | from .exceptions import ( 162 | ConnectionError, 163 | ConnectTimeout, 164 | FileModeWarning, 165 | HTTPError, 166 | JSONDecodeError, 167 | ReadTimeout, 168 | RequestException, 169 | Timeout, 170 | TooManyRedirects, 171 | URLRequired, 172 | ) 173 | from .models import PreparedRequest, Request, Response 174 | from .sessions import Session, session 175 | from .status_codes import codes 176 | 177 | logging.getLogger(__name__).addHandler(NullHandler()) 178 | 179 | # FileModeWarnings go off per the default. 180 | warnings.simplefilter("default", FileModeWarning, append=True) 181 | -------------------------------------------------------------------------------- /lib/urllib3-1.26.15.dist-info/RECORD: -------------------------------------------------------------------------------- 1 | urllib3-1.26.15.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 2 | urllib3-1.26.15.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115 3 | urllib3-1.26.15.dist-info/METADATA,sha256=blT5BzPICGt1qpMvC6GdfIBUNNMobmh1bXPyuOQmq30,48125 4 | urllib3-1.26.15.dist-info/RECORD,, 5 | urllib3-1.26.15.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110 6 | urllib3-1.26.15.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8 7 | urllib3/__init__.py,sha256=iXLcYiJySn0GNbWOOZDDApgBL1JgP44EZ8i1760S8Mc,3333 8 | urllib3/__pycache__/__init__.cpython-310.pyc,, 9 | urllib3/__pycache__/_collections.cpython-310.pyc,, 10 | urllib3/__pycache__/_version.cpython-310.pyc,, 11 | urllib3/__pycache__/connection.cpython-310.pyc,, 12 | urllib3/__pycache__/connectionpool.cpython-310.pyc,, 13 | urllib3/__pycache__/exceptions.cpython-310.pyc,, 14 | urllib3/__pycache__/fields.cpython-310.pyc,, 15 | urllib3/__pycache__/filepost.cpython-310.pyc,, 16 | urllib3/__pycache__/poolmanager.cpython-310.pyc,, 17 | urllib3/__pycache__/request.cpython-310.pyc,, 18 | urllib3/__pycache__/response.cpython-310.pyc,, 19 | urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811 20 | urllib3/_version.py,sha256=vFwhFPO1DTzD8xawsdSDwriGSheS7LurJQL9fSgM_IM,64 21 | urllib3/connection.py,sha256=92k9td_y4PEiTIjNufCUa1NzMB3J3w0LEdyokYgXnW8,20300 22 | urllib3/connectionpool.py,sha256=u7I7TzJTsicVoNjGeZkCD5LANp_GCeDNBwXZoGHHVLo,39128 23 | urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 24 | urllib3/contrib/__pycache__/__init__.cpython-310.pyc,, 25 | urllib3/contrib/__pycache__/_appengine_environ.cpython-310.pyc,, 26 | urllib3/contrib/__pycache__/appengine.cpython-310.pyc,, 27 | urllib3/contrib/__pycache__/ntlmpool.cpython-310.pyc,, 28 | urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc,, 29 | urllib3/contrib/__pycache__/securetransport.cpython-310.pyc,, 30 | urllib3/contrib/__pycache__/socks.cpython-310.pyc,, 31 | urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957 32 | urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 33 | urllib3/contrib/_securetransport/__pycache__/__init__.cpython-310.pyc,, 34 | urllib3/contrib/_securetransport/__pycache__/bindings.cpython-310.pyc,, 35 | urllib3/contrib/_securetransport/__pycache__/low_level.cpython-310.pyc,, 36 | urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632 37 | urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922 38 | urllib3/contrib/appengine.py,sha256=6IBW6lPOoVUxASPwtn6IH1AATe5DK3lLJCfwyWlLKAE,11012 39 | urllib3/contrib/ntlmpool.py,sha256=NlfkW7WMdW8ziqudopjHoW299og1BTWi0IeIibquFwk,4528 40 | urllib3/contrib/pyopenssl.py,sha256=4AJAlo9NmjWofY4dJwRa4kbZuRuHfNJxu8Pv6yQk1ss,17055 41 | urllib3/contrib/securetransport.py,sha256=QOhVbWrFQTKbmV-vtyG69amekkKVxXkdjk9oymaO0Ag,34416 42 | urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097 43 | urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217 44 | urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579 45 | urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440 46 | urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 47 | urllib3/packages/__pycache__/__init__.cpython-310.pyc,, 48 | urllib3/packages/__pycache__/six.cpython-310.pyc,, 49 | urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 50 | urllib3/packages/backports/__pycache__/__init__.cpython-310.pyc,, 51 | urllib3/packages/backports/__pycache__/makefile.cpython-310.pyc,, 52 | urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417 53 | urllib3/packages/six.py,sha256=b9LM0wBXv7E7SrbCjAm4wwN-hrH-iNxv18LgWNMMKPo,34665 54 | urllib3/poolmanager.py,sha256=0KOOJECoeLYVjUHvv-0h4Oq3FFQQ2yb-Fnjkbj8gJO0,19786 55 | urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985 56 | urllib3/response.py,sha256=UPgLmnHj4z71ZnH8ivYOyncATifTOw9FQukUqDnckCc,30761 57 | urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155 58 | urllib3/util/__pycache__/__init__.cpython-310.pyc,, 59 | urllib3/util/__pycache__/connection.cpython-310.pyc,, 60 | urllib3/util/__pycache__/proxy.cpython-310.pyc,, 61 | urllib3/util/__pycache__/queue.cpython-310.pyc,, 62 | urllib3/util/__pycache__/request.cpython-310.pyc,, 63 | urllib3/util/__pycache__/response.cpython-310.pyc,, 64 | urllib3/util/__pycache__/retry.cpython-310.pyc,, 65 | urllib3/util/__pycache__/ssl_.cpython-310.pyc,, 66 | urllib3/util/__pycache__/ssl_match_hostname.cpython-310.pyc,, 67 | urllib3/util/__pycache__/ssltransport.cpython-310.pyc,, 68 | urllib3/util/__pycache__/timeout.cpython-310.pyc,, 69 | urllib3/util/__pycache__/url.cpython-310.pyc,, 70 | urllib3/util/__pycache__/wait.cpython-310.pyc,, 71 | urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901 72 | urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605 73 | urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498 74 | urllib3/util/request.py,sha256=fWiAaa8pwdLLIqoTLBxCC2e4ed80muzKU3e3HWWTzFQ,4225 75 | urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510 76 | urllib3/util/retry.py,sha256=4laWh0HpwGijLiBmdBIYtbhYekQnNzzhx2W9uys0RHA,22003 77 | urllib3/util/ssl_.py,sha256=c0sYiSC6272r6uPkxQpo5rYPP9QC1eR6oI7004gYqZo,17165 78 | urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758 79 | urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895 80 | urllib3/util/timeout.py,sha256=cwq4dMk87mJHSBktK1miYJ-85G-3T3RmT20v7SFCpno,10168 81 | urllib3/util/url.py,sha256=kMxL1k0d-aQm_iZDw_zMmnyYyjrIA_DbsMy3cm3V55M,14279 82 | urllib3/util/wait.py,sha256=fOX0_faozG2P7iVojQoE1mbydweNyTcm-hXEfFrTtLI,5403 83 | -------------------------------------------------------------------------------- /lib/urllib3/util/wait.py: -------------------------------------------------------------------------------- 1 | import errno 2 | import select 3 | import sys 4 | from functools import partial 5 | 6 | try: 7 | from time import monotonic 8 | except ImportError: 9 | from time import time as monotonic 10 | 11 | __all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"] 12 | 13 | 14 | class NoWayToWaitForSocketError(Exception): 15 | pass 16 | 17 | 18 | # How should we wait on sockets? 19 | # 20 | # There are two types of APIs you can use for waiting on sockets: the fancy 21 | # modern stateful APIs like epoll/kqueue, and the older stateless APIs like 22 | # select/poll. The stateful APIs are more efficient when you have a lots of 23 | # sockets to keep track of, because you can set them up once and then use them 24 | # lots of times. But we only ever want to wait on a single socket at a time 25 | # and don't want to keep track of state, so the stateless APIs are actually 26 | # more efficient. So we want to use select() or poll(). 27 | # 28 | # Now, how do we choose between select() and poll()? On traditional Unixes, 29 | # select() has a strange calling convention that makes it slow, or fail 30 | # altogether, for high-numbered file descriptors. The point of poll() is to fix 31 | # that, so on Unixes, we prefer poll(). 32 | # 33 | # On Windows, there is no poll() (or at least Python doesn't provide a wrapper 34 | # for it), but that's OK, because on Windows, select() doesn't have this 35 | # strange calling convention; plain select() works fine. 36 | # 37 | # So: on Windows we use select(), and everywhere else we use poll(). We also 38 | # fall back to select() in case poll() is somehow broken or missing. 39 | 40 | if sys.version_info >= (3, 5): 41 | # Modern Python, that retries syscalls by default 42 | def _retry_on_intr(fn, timeout): 43 | return fn(timeout) 44 | 45 | else: 46 | # Old and broken Pythons. 47 | def _retry_on_intr(fn, timeout): 48 | if timeout is None: 49 | deadline = float("inf") 50 | else: 51 | deadline = monotonic() + timeout 52 | 53 | while True: 54 | try: 55 | return fn(timeout) 56 | # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7 57 | except (OSError, select.error) as e: 58 | # 'e.args[0]' incantation works for both OSError and select.error 59 | if e.args[0] != errno.EINTR: 60 | raise 61 | else: 62 | timeout = deadline - monotonic() 63 | if timeout < 0: 64 | timeout = 0 65 | if timeout == float("inf"): 66 | timeout = None 67 | continue 68 | 69 | 70 | def select_wait_for_socket(sock, read=False, write=False, timeout=None): 71 | if not read and not write: 72 | raise RuntimeError("must specify at least one of read=True, write=True") 73 | rcheck = [] 74 | wcheck = [] 75 | if read: 76 | rcheck.append(sock) 77 | if write: 78 | wcheck.append(sock) 79 | # When doing a non-blocking connect, most systems signal success by 80 | # marking the socket writable. Windows, though, signals success by marked 81 | # it as "exceptional". We paper over the difference by checking the write 82 | # sockets for both conditions. (The stdlib selectors module does the same 83 | # thing.) 84 | fn = partial(select.select, rcheck, wcheck, wcheck) 85 | rready, wready, xready = _retry_on_intr(fn, timeout) 86 | return bool(rready or wready or xready) 87 | 88 | 89 | def poll_wait_for_socket(sock, read=False, write=False, timeout=None): 90 | if not read and not write: 91 | raise RuntimeError("must specify at least one of read=True, write=True") 92 | mask = 0 93 | if read: 94 | mask |= select.POLLIN 95 | if write: 96 | mask |= select.POLLOUT 97 | poll_obj = select.poll() 98 | poll_obj.register(sock, mask) 99 | 100 | # For some reason, poll() takes timeout in milliseconds 101 | def do_poll(t): 102 | if t is not None: 103 | t *= 1000 104 | return poll_obj.poll(t) 105 | 106 | return bool(_retry_on_intr(do_poll, timeout)) 107 | 108 | 109 | def null_wait_for_socket(*args, **kwargs): 110 | raise NoWayToWaitForSocketError("no select-equivalent available") 111 | 112 | 113 | def _have_working_poll(): 114 | # Apparently some systems have a select.poll that fails as soon as you try 115 | # to use it, either due to strange configuration or broken monkeypatching 116 | # from libraries like eventlet/greenlet. 117 | try: 118 | poll_obj = select.poll() 119 | _retry_on_intr(poll_obj.poll, 0) 120 | except (AttributeError, OSError): 121 | return False 122 | else: 123 | return True 124 | 125 | 126 | def wait_for_socket(*args, **kwargs): 127 | # We delay choosing which implementation to use until the first time we're 128 | # called. We could do it at import time, but then we might make the wrong 129 | # decision if someone goes wild with monkeypatching select.poll after 130 | # we're imported. 131 | global wait_for_socket 132 | if _have_working_poll(): 133 | wait_for_socket = poll_wait_for_socket 134 | elif hasattr(select, "select"): 135 | wait_for_socket = select_wait_for_socket 136 | else: # Platform-specific: Appengine. 137 | wait_for_socket = null_wait_for_socket 138 | return wait_for_socket(*args, **kwargs) 139 | 140 | 141 | def wait_for_read(sock, timeout=None): 142 | """Waits for reading to be available on a given socket. 143 | Returns True if the socket is readable, or False if the timeout expired. 144 | """ 145 | return wait_for_socket(sock, read=True, timeout=timeout) 146 | 147 | 148 | def wait_for_write(sock, timeout=None): 149 | """Waits for writing to be available on a given socket. 150 | Returns True if the socket is readable, or False if the timeout expired. 151 | """ 152 | return wait_for_socket(sock, write=True, timeout=timeout) 153 | -------------------------------------------------------------------------------- /lib/urllib3/util/ssl_match_hostname.py: -------------------------------------------------------------------------------- 1 | """The match_hostname() function from Python 3.3.3, essential when using SSL.""" 2 | 3 | # Note: This file is under the PSF license as the code comes from the python 4 | # stdlib. http://docs.python.org/3/license.html 5 | 6 | import re 7 | import sys 8 | 9 | # ipaddress has been backported to 2.6+ in pypi. If it is installed on the 10 | # system, use it to handle IPAddress ServerAltnames (this was added in 11 | # python-3.5) otherwise only do DNS matching. This allows 12 | # util.ssl_match_hostname to continue to be used in Python 2.7. 13 | try: 14 | import ipaddress 15 | except ImportError: 16 | ipaddress = None 17 | 18 | __version__ = "3.5.0.1" 19 | 20 | 21 | class CertificateError(ValueError): 22 | pass 23 | 24 | 25 | def _dnsname_match(dn, hostname, max_wildcards=1): 26 | """Matching according to RFC 6125, section 6.4.3 27 | 28 | http://tools.ietf.org/html/rfc6125#section-6.4.3 29 | """ 30 | pats = [] 31 | if not dn: 32 | return False 33 | 34 | # Ported from python3-syntax: 35 | # leftmost, *remainder = dn.split(r'.') 36 | parts = dn.split(r".") 37 | leftmost = parts[0] 38 | remainder = parts[1:] 39 | 40 | wildcards = leftmost.count("*") 41 | if wildcards > max_wildcards: 42 | # Issue #17980: avoid denials of service by refusing more 43 | # than one wildcard per fragment. A survey of established 44 | # policy among SSL implementations showed it to be a 45 | # reasonable choice. 46 | raise CertificateError( 47 | "too many wildcards in certificate DNS name: " + repr(dn) 48 | ) 49 | 50 | # speed up common case w/o wildcards 51 | if not wildcards: 52 | return dn.lower() == hostname.lower() 53 | 54 | # RFC 6125, section 6.4.3, subitem 1. 55 | # The client SHOULD NOT attempt to match a presented identifier in which 56 | # the wildcard character comprises a label other than the left-most label. 57 | if leftmost == "*": 58 | # When '*' is a fragment by itself, it matches a non-empty dotless 59 | # fragment. 60 | pats.append("[^.]+") 61 | elif leftmost.startswith("xn--") or hostname.startswith("xn--"): 62 | # RFC 6125, section 6.4.3, subitem 3. 63 | # The client SHOULD NOT attempt to match a presented identifier 64 | # where the wildcard character is embedded within an A-label or 65 | # U-label of an internationalized domain name. 66 | pats.append(re.escape(leftmost)) 67 | else: 68 | # Otherwise, '*' matches any dotless string, e.g. www* 69 | pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) 70 | 71 | # add the remaining fragments, ignore any wildcards 72 | for frag in remainder: 73 | pats.append(re.escape(frag)) 74 | 75 | pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) 76 | return pat.match(hostname) 77 | 78 | 79 | def _to_unicode(obj): 80 | if isinstance(obj, str) and sys.version_info < (3,): 81 | # ignored flake8 # F821 to support python 2.7 function 82 | obj = unicode(obj, encoding="ascii", errors="strict") # noqa: F821 83 | return obj 84 | 85 | 86 | def _ipaddress_match(ipname, host_ip): 87 | """Exact matching of IP addresses. 88 | 89 | RFC 6125 explicitly doesn't define an algorithm for this 90 | (section 1.7.2 - "Out of Scope"). 91 | """ 92 | # OpenSSL may add a trailing newline to a subjectAltName's IP address 93 | # Divergence from upstream: ipaddress can't handle byte str 94 | ip = ipaddress.ip_address(_to_unicode(ipname).rstrip()) 95 | return ip == host_ip 96 | 97 | 98 | def match_hostname(cert, hostname): 99 | """Verify that *cert* (in decoded format as returned by 100 | SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 101 | rules are followed, but IP addresses are not accepted for *hostname*. 102 | 103 | CertificateError is raised on failure. On success, the function 104 | returns nothing. 105 | """ 106 | if not cert: 107 | raise ValueError( 108 | "empty or no certificate, match_hostname needs a " 109 | "SSL socket or SSL context with either " 110 | "CERT_OPTIONAL or CERT_REQUIRED" 111 | ) 112 | try: 113 | # Divergence from upstream: ipaddress can't handle byte str 114 | host_ip = ipaddress.ip_address(_to_unicode(hostname)) 115 | except (UnicodeError, ValueError): 116 | # ValueError: Not an IP address (common case) 117 | # UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking 118 | # byte strings. addresses should be all ascii, so we consider it not 119 | # an ipaddress in this case 120 | host_ip = None 121 | except AttributeError: 122 | # Divergence from upstream: Make ipaddress library optional 123 | if ipaddress is None: 124 | host_ip = None 125 | else: # Defensive 126 | raise 127 | dnsnames = [] 128 | san = cert.get("subjectAltName", ()) 129 | for key, value in san: 130 | if key == "DNS": 131 | if host_ip is None and _dnsname_match(value, hostname): 132 | return 133 | dnsnames.append(value) 134 | elif key == "IP Address": 135 | if host_ip is not None and _ipaddress_match(value, host_ip): 136 | return 137 | dnsnames.append(value) 138 | if not dnsnames: 139 | # The subject is only checked when there is no dNSName entry 140 | # in subjectAltName 141 | for sub in cert.get("subject", ()): 142 | for key, value in sub: 143 | # XXX according to RFC 2818, the most specific Common Name 144 | # must be used. 145 | if key == "commonName": 146 | if _dnsname_match(value, hostname): 147 | return 148 | dnsnames.append(value) 149 | if len(dnsnames) > 1: 150 | raise CertificateError( 151 | "hostname %r " 152 | "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames))) 153 | ) 154 | elif len(dnsnames) == 1: 155 | raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0])) 156 | else: 157 | raise CertificateError( 158 | "no appropriate commonName or subjectAltName fields were found" 159 | ) 160 | -------------------------------------------------------------------------------- /lib/urllib3/request.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from .filepost import encode_multipart_formdata 4 | from .packages.six.moves.urllib.parse import urlencode 5 | 6 | __all__ = ["RequestMethods"] 7 | 8 | 9 | class RequestMethods(object): 10 | """ 11 | Convenience mixin for classes who implement a :meth:`urlopen` method, such 12 | as :class:`urllib3.HTTPConnectionPool` and 13 | :class:`urllib3.PoolManager`. 14 | 15 | Provides behavior for making common types of HTTP request methods and 16 | decides which type of request field encoding to use. 17 | 18 | Specifically, 19 | 20 | :meth:`.request_encode_url` is for sending requests whose fields are 21 | encoded in the URL (such as GET, HEAD, DELETE). 22 | 23 | :meth:`.request_encode_body` is for sending requests whose fields are 24 | encoded in the *body* of the request using multipart or www-form-urlencoded 25 | (such as for POST, PUT, PATCH). 26 | 27 | :meth:`.request` is for making any kind of request, it will look up the 28 | appropriate encoding format and use one of the above two methods to make 29 | the request. 30 | 31 | Initializer parameters: 32 | 33 | :param headers: 34 | Headers to include with all requests, unless other headers are given 35 | explicitly. 36 | """ 37 | 38 | _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"} 39 | 40 | def __init__(self, headers=None): 41 | self.headers = headers or {} 42 | 43 | def urlopen( 44 | self, 45 | method, 46 | url, 47 | body=None, 48 | headers=None, 49 | encode_multipart=True, 50 | multipart_boundary=None, 51 | **kw 52 | ): # Abstract 53 | raise NotImplementedError( 54 | "Classes extending RequestMethods must implement " 55 | "their own ``urlopen`` method." 56 | ) 57 | 58 | def request(self, method, url, fields=None, headers=None, **urlopen_kw): 59 | """ 60 | Make a request using :meth:`urlopen` with the appropriate encoding of 61 | ``fields`` based on the ``method`` used. 62 | 63 | This is a convenience method that requires the least amount of manual 64 | effort. It can be used in most situations, while still having the 65 | option to drop down to more specific methods when necessary, such as 66 | :meth:`request_encode_url`, :meth:`request_encode_body`, 67 | or even the lowest level :meth:`urlopen`. 68 | """ 69 | method = method.upper() 70 | 71 | urlopen_kw["request_url"] = url 72 | 73 | if method in self._encode_url_methods: 74 | return self.request_encode_url( 75 | method, url, fields=fields, headers=headers, **urlopen_kw 76 | ) 77 | else: 78 | return self.request_encode_body( 79 | method, url, fields=fields, headers=headers, **urlopen_kw 80 | ) 81 | 82 | def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw): 83 | """ 84 | Make a request using :meth:`urlopen` with the ``fields`` encoded in 85 | the url. This is useful for request methods like GET, HEAD, DELETE, etc. 86 | """ 87 | if headers is None: 88 | headers = self.headers 89 | 90 | extra_kw = {"headers": headers} 91 | extra_kw.update(urlopen_kw) 92 | 93 | if fields: 94 | url += "?" + urlencode(fields) 95 | 96 | return self.urlopen(method, url, **extra_kw) 97 | 98 | def request_encode_body( 99 | self, 100 | method, 101 | url, 102 | fields=None, 103 | headers=None, 104 | encode_multipart=True, 105 | multipart_boundary=None, 106 | **urlopen_kw 107 | ): 108 | """ 109 | Make a request using :meth:`urlopen` with the ``fields`` encoded in 110 | the body. This is useful for request methods like POST, PUT, PATCH, etc. 111 | 112 | When ``encode_multipart=True`` (default), then 113 | :func:`urllib3.encode_multipart_formdata` is used to encode 114 | the payload with the appropriate content type. Otherwise 115 | :func:`urllib.parse.urlencode` is used with the 116 | 'application/x-www-form-urlencoded' content type. 117 | 118 | Multipart encoding must be used when posting files, and it's reasonably 119 | safe to use it in other times too. However, it may break request 120 | signing, such as with OAuth. 121 | 122 | Supports an optional ``fields`` parameter of key/value strings AND 123 | key/filetuple. A filetuple is a (filename, data, MIME type) tuple where 124 | the MIME type is optional. For example:: 125 | 126 | fields = { 127 | 'foo': 'bar', 128 | 'fakefile': ('foofile.txt', 'contents of foofile'), 129 | 'realfile': ('barfile.txt', open('realfile').read()), 130 | 'typedfile': ('bazfile.bin', open('bazfile').read(), 131 | 'image/jpeg'), 132 | 'nonamefile': 'contents of nonamefile field', 133 | } 134 | 135 | When uploading a file, providing a filename (the first parameter of the 136 | tuple) is optional but recommended to best mimic behavior of browsers. 137 | 138 | Note that if ``headers`` are supplied, the 'Content-Type' header will 139 | be overwritten because it depends on the dynamic random boundary string 140 | which is used to compose the body of the request. The random boundary 141 | string can be explicitly set with the ``multipart_boundary`` parameter. 142 | """ 143 | if headers is None: 144 | headers = self.headers 145 | 146 | extra_kw = {"headers": {}} 147 | 148 | if fields: 149 | if "body" in urlopen_kw: 150 | raise TypeError( 151 | "request got values for both 'fields' and 'body', can only specify one." 152 | ) 153 | 154 | if encode_multipart: 155 | body, content_type = encode_multipart_formdata( 156 | fields, boundary=multipart_boundary 157 | ) 158 | else: 159 | body, content_type = ( 160 | urlencode(fields), 161 | "application/x-www-form-urlencoded", 162 | ) 163 | 164 | extra_kw["body"] = body 165 | extra_kw["headers"] = {"Content-Type": content_type} 166 | 167 | extra_kw["headers"].update(headers) 168 | extra_kw.update(urlopen_kw) 169 | 170 | return self.urlopen(method, url, **extra_kw) 171 | -------------------------------------------------------------------------------- /lib/requests/api.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests.api 3 | ~~~~~~~~~~~~ 4 | 5 | This module implements the Requests API. 6 | 7 | :copyright: (c) 2012 by Kenneth Reitz. 8 | :license: Apache2, see LICENSE for more details. 9 | """ 10 | 11 | from . import sessions 12 | 13 | 14 | def request(method, url, **kwargs): 15 | """Constructs and sends a :class:`Request `. 16 | 17 | :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. 18 | :param url: URL for the new :class:`Request` object. 19 | :param params: (optional) Dictionary, list of tuples or bytes to send 20 | in the query string for the :class:`Request`. 21 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 22 | object to send in the body of the :class:`Request`. 23 | :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. 24 | :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. 25 | :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. 26 | :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. 27 | ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` 28 | or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string 29 | defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers 30 | to add for the file. 31 | :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. 32 | :param timeout: (optional) How many seconds to wait for the server to send data 33 | before giving up, as a float, or a :ref:`(connect timeout, read 34 | timeout) ` tuple. 35 | :type timeout: float or tuple 36 | :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. 37 | :type allow_redirects: bool 38 | :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. 39 | :param verify: (optional) Either a boolean, in which case it controls whether we verify 40 | the server's TLS certificate, or a string, in which case it must be a path 41 | to a CA bundle to use. Defaults to ``True``. 42 | :param stream: (optional) if ``False``, the response content will be immediately downloaded. 43 | :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. 44 | :return: :class:`Response ` object 45 | :rtype: requests.Response 46 | 47 | Usage:: 48 | 49 | >>> import requests 50 | >>> req = requests.request('GET', 'https://httpbin.org/get') 51 | >>> req 52 | 53 | """ 54 | 55 | # By using the 'with' statement we are sure the session is closed, thus we 56 | # avoid leaving sockets open which can trigger a ResourceWarning in some 57 | # cases, and look like a memory leak in others. 58 | with sessions.Session() as session: 59 | return session.request(method=method, url=url, **kwargs) 60 | 61 | 62 | def get(url, params=None, **kwargs): 63 | r"""Sends a GET request. 64 | 65 | :param url: URL for the new :class:`Request` object. 66 | :param params: (optional) Dictionary, list of tuples or bytes to send 67 | in the query string for the :class:`Request`. 68 | :param \*\*kwargs: Optional arguments that ``request`` takes. 69 | :return: :class:`Response ` object 70 | :rtype: requests.Response 71 | """ 72 | 73 | return request("get", url, params=params, **kwargs) 74 | 75 | 76 | def options(url, **kwargs): 77 | r"""Sends an OPTIONS request. 78 | 79 | :param url: URL for the new :class:`Request` object. 80 | :param \*\*kwargs: Optional arguments that ``request`` takes. 81 | :return: :class:`Response ` object 82 | :rtype: requests.Response 83 | """ 84 | 85 | return request("options", url, **kwargs) 86 | 87 | 88 | def head(url, **kwargs): 89 | r"""Sends a HEAD request. 90 | 91 | :param url: URL for the new :class:`Request` object. 92 | :param \*\*kwargs: Optional arguments that ``request`` takes. If 93 | `allow_redirects` is not provided, it will be set to `False` (as 94 | opposed to the default :meth:`request` behavior). 95 | :return: :class:`Response ` object 96 | :rtype: requests.Response 97 | """ 98 | 99 | kwargs.setdefault("allow_redirects", False) 100 | return request("head", url, **kwargs) 101 | 102 | 103 | def post(url, data=None, json=None, **kwargs): 104 | r"""Sends a POST request. 105 | 106 | :param url: URL for the new :class:`Request` object. 107 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 108 | object to send in the body of the :class:`Request`. 109 | :param json: (optional) json data to send in the body of the :class:`Request`. 110 | :param \*\*kwargs: Optional arguments that ``request`` takes. 111 | :return: :class:`Response ` object 112 | :rtype: requests.Response 113 | """ 114 | 115 | return request("post", url, data=data, json=json, **kwargs) 116 | 117 | 118 | def put(url, data=None, **kwargs): 119 | r"""Sends a PUT request. 120 | 121 | :param url: URL for the new :class:`Request` object. 122 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 123 | object to send in the body of the :class:`Request`. 124 | :param json: (optional) json data to send in the body of the :class:`Request`. 125 | :param \*\*kwargs: Optional arguments that ``request`` takes. 126 | :return: :class:`Response ` object 127 | :rtype: requests.Response 128 | """ 129 | 130 | return request("put", url, data=data, **kwargs) 131 | 132 | 133 | def patch(url, data=None, **kwargs): 134 | r"""Sends a PATCH request. 135 | 136 | :param url: URL for the new :class:`Request` object. 137 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 138 | object to send in the body of the :class:`Request`. 139 | :param json: (optional) json data to send in the body of the :class:`Request`. 140 | :param \*\*kwargs: Optional arguments that ``request`` takes. 141 | :return: :class:`Response ` object 142 | :rtype: requests.Response 143 | """ 144 | 145 | return request("patch", url, data=data, **kwargs) 146 | 147 | 148 | def delete(url, **kwargs): 149 | r"""Sends a DELETE request. 150 | 151 | :param url: URL for the new :class:`Request` object. 152 | :param \*\*kwargs: Optional arguments that ``request`` takes. 153 | :return: :class:`Response ` object 154 | :rtype: requests.Response 155 | """ 156 | 157 | return request("delete", url, **kwargs) 158 | -------------------------------------------------------------------------------- /lib/urllib3/util/ssltransport.py: -------------------------------------------------------------------------------- 1 | import io 2 | import socket 3 | import ssl 4 | 5 | from ..exceptions import ProxySchemeUnsupported 6 | from ..packages import six 7 | 8 | SSL_BLOCKSIZE = 16384 9 | 10 | 11 | class SSLTransport: 12 | """ 13 | The SSLTransport wraps an existing socket and establishes an SSL connection. 14 | 15 | Contrary to Python's implementation of SSLSocket, it allows you to chain 16 | multiple TLS connections together. It's particularly useful if you need to 17 | implement TLS within TLS. 18 | 19 | The class supports most of the socket API operations. 20 | """ 21 | 22 | @staticmethod 23 | def _validate_ssl_context_for_tls_in_tls(ssl_context): 24 | """ 25 | Raises a ProxySchemeUnsupported if the provided ssl_context can't be used 26 | for TLS in TLS. 27 | 28 | The only requirement is that the ssl_context provides the 'wrap_bio' 29 | methods. 30 | """ 31 | 32 | if not hasattr(ssl_context, "wrap_bio"): 33 | if six.PY2: 34 | raise ProxySchemeUnsupported( 35 | "TLS in TLS requires SSLContext.wrap_bio() which isn't " 36 | "supported on Python 2" 37 | ) 38 | else: 39 | raise ProxySchemeUnsupported( 40 | "TLS in TLS requires SSLContext.wrap_bio() which isn't " 41 | "available on non-native SSLContext" 42 | ) 43 | 44 | def __init__( 45 | self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True 46 | ): 47 | """ 48 | Create an SSLTransport around socket using the provided ssl_context. 49 | """ 50 | self.incoming = ssl.MemoryBIO() 51 | self.outgoing = ssl.MemoryBIO() 52 | 53 | self.suppress_ragged_eofs = suppress_ragged_eofs 54 | self.socket = socket 55 | 56 | self.sslobj = ssl_context.wrap_bio( 57 | self.incoming, self.outgoing, server_hostname=server_hostname 58 | ) 59 | 60 | # Perform initial handshake. 61 | self._ssl_io_loop(self.sslobj.do_handshake) 62 | 63 | def __enter__(self): 64 | return self 65 | 66 | def __exit__(self, *_): 67 | self.close() 68 | 69 | def fileno(self): 70 | return self.socket.fileno() 71 | 72 | def read(self, len=1024, buffer=None): 73 | return self._wrap_ssl_read(len, buffer) 74 | 75 | def recv(self, len=1024, flags=0): 76 | if flags != 0: 77 | raise ValueError("non-zero flags not allowed in calls to recv") 78 | return self._wrap_ssl_read(len) 79 | 80 | def recv_into(self, buffer, nbytes=None, flags=0): 81 | if flags != 0: 82 | raise ValueError("non-zero flags not allowed in calls to recv_into") 83 | if buffer and (nbytes is None): 84 | nbytes = len(buffer) 85 | elif nbytes is None: 86 | nbytes = 1024 87 | return self.read(nbytes, buffer) 88 | 89 | def sendall(self, data, flags=0): 90 | if flags != 0: 91 | raise ValueError("non-zero flags not allowed in calls to sendall") 92 | count = 0 93 | with memoryview(data) as view, view.cast("B") as byte_view: 94 | amount = len(byte_view) 95 | while count < amount: 96 | v = self.send(byte_view[count:]) 97 | count += v 98 | 99 | def send(self, data, flags=0): 100 | if flags != 0: 101 | raise ValueError("non-zero flags not allowed in calls to send") 102 | response = self._ssl_io_loop(self.sslobj.write, data) 103 | return response 104 | 105 | def makefile( 106 | self, mode="r", buffering=None, encoding=None, errors=None, newline=None 107 | ): 108 | """ 109 | Python's httpclient uses makefile and buffered io when reading HTTP 110 | messages and we need to support it. 111 | 112 | This is unfortunately a copy and paste of socket.py makefile with small 113 | changes to point to the socket directly. 114 | """ 115 | if not set(mode) <= {"r", "w", "b"}: 116 | raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) 117 | 118 | writing = "w" in mode 119 | reading = "r" in mode or not writing 120 | assert reading or writing 121 | binary = "b" in mode 122 | rawmode = "" 123 | if reading: 124 | rawmode += "r" 125 | if writing: 126 | rawmode += "w" 127 | raw = socket.SocketIO(self, rawmode) 128 | self.socket._io_refs += 1 129 | if buffering is None: 130 | buffering = -1 131 | if buffering < 0: 132 | buffering = io.DEFAULT_BUFFER_SIZE 133 | if buffering == 0: 134 | if not binary: 135 | raise ValueError("unbuffered streams must be binary") 136 | return raw 137 | if reading and writing: 138 | buffer = io.BufferedRWPair(raw, raw, buffering) 139 | elif reading: 140 | buffer = io.BufferedReader(raw, buffering) 141 | else: 142 | assert writing 143 | buffer = io.BufferedWriter(raw, buffering) 144 | if binary: 145 | return buffer 146 | text = io.TextIOWrapper(buffer, encoding, errors, newline) 147 | text.mode = mode 148 | return text 149 | 150 | def unwrap(self): 151 | self._ssl_io_loop(self.sslobj.unwrap) 152 | 153 | def close(self): 154 | self.socket.close() 155 | 156 | def getpeercert(self, binary_form=False): 157 | return self.sslobj.getpeercert(binary_form) 158 | 159 | def version(self): 160 | return self.sslobj.version() 161 | 162 | def cipher(self): 163 | return self.sslobj.cipher() 164 | 165 | def selected_alpn_protocol(self): 166 | return self.sslobj.selected_alpn_protocol() 167 | 168 | def selected_npn_protocol(self): 169 | return self.sslobj.selected_npn_protocol() 170 | 171 | def shared_ciphers(self): 172 | return self.sslobj.shared_ciphers() 173 | 174 | def compression(self): 175 | return self.sslobj.compression() 176 | 177 | def settimeout(self, value): 178 | self.socket.settimeout(value) 179 | 180 | def gettimeout(self): 181 | return self.socket.gettimeout() 182 | 183 | def _decref_socketios(self): 184 | self.socket._decref_socketios() 185 | 186 | def _wrap_ssl_read(self, len, buffer=None): 187 | try: 188 | return self._ssl_io_loop(self.sslobj.read, len, buffer) 189 | except ssl.SSLError as e: 190 | if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs: 191 | return 0 # eof, return 0. 192 | else: 193 | raise 194 | 195 | def _ssl_io_loop(self, func, *args): 196 | """Performs an I/O loop between incoming/outgoing and the socket.""" 197 | should_loop = True 198 | ret = None 199 | 200 | while should_loop: 201 | errno = None 202 | try: 203 | ret = func(*args) 204 | except ssl.SSLError as e: 205 | if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): 206 | # WANT_READ, and WANT_WRITE are expected, others are not. 207 | raise e 208 | errno = e.errno 209 | 210 | buf = self.outgoing.read() 211 | self.socket.sendall(buf) 212 | 213 | if errno is None: 214 | should_loop = False 215 | elif errno == ssl.SSL_ERROR_WANT_READ: 216 | buf = self.socket.recv(SSL_BLOCKSIZE) 217 | if buf: 218 | self.incoming.write(buf) 219 | else: 220 | self.incoming.write_eof() 221 | return ret 222 | -------------------------------------------------------------------------------- /lib/urllib3/contrib/socks.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | This module contains provisional support for SOCKS proxies from within 4 | urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and 5 | SOCKS5. To enable its functionality, either install PySocks or install this 6 | module with the ``socks`` extra. 7 | 8 | The SOCKS implementation supports the full range of urllib3 features. It also 9 | supports the following SOCKS features: 10 | 11 | - SOCKS4A (``proxy_url='socks4a://...``) 12 | - SOCKS4 (``proxy_url='socks4://...``) 13 | - SOCKS5 with remote DNS (``proxy_url='socks5h://...``) 14 | - SOCKS5 with local DNS (``proxy_url='socks5://...``) 15 | - Usernames and passwords for the SOCKS proxy 16 | 17 | .. note:: 18 | It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in 19 | your ``proxy_url`` to ensure that DNS resolution is done from the remote 20 | server instead of client-side when connecting to a domain name. 21 | 22 | SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 23 | supports IPv4, IPv6, and domain names. 24 | 25 | When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` 26 | will be sent as the ``userid`` section of the SOCKS request: 27 | 28 | .. code-block:: python 29 | 30 | proxy_url="socks4a://@proxy-host" 31 | 32 | When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion 33 | of the ``proxy_url`` will be sent as the username/password to authenticate 34 | with the proxy: 35 | 36 | .. code-block:: python 37 | 38 | proxy_url="socks5h://:@proxy-host" 39 | 40 | """ 41 | from __future__ import absolute_import 42 | 43 | try: 44 | import socks 45 | except ImportError: 46 | import warnings 47 | 48 | from ..exceptions import DependencyWarning 49 | 50 | warnings.warn( 51 | ( 52 | "SOCKS support in urllib3 requires the installation of optional " 53 | "dependencies: specifically, PySocks. For more information, see " 54 | "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies" 55 | ), 56 | DependencyWarning, 57 | ) 58 | raise 59 | 60 | from socket import error as SocketError 61 | from socket import timeout as SocketTimeout 62 | 63 | from ..connection import HTTPConnection, HTTPSConnection 64 | from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool 65 | from ..exceptions import ConnectTimeoutError, NewConnectionError 66 | from ..poolmanager import PoolManager 67 | from ..util.url import parse_url 68 | 69 | try: 70 | import ssl 71 | except ImportError: 72 | ssl = None 73 | 74 | 75 | class SOCKSConnection(HTTPConnection): 76 | """ 77 | A plain-text HTTP connection that connects via a SOCKS proxy. 78 | """ 79 | 80 | def __init__(self, *args, **kwargs): 81 | self._socks_options = kwargs.pop("_socks_options") 82 | super(SOCKSConnection, self).__init__(*args, **kwargs) 83 | 84 | def _new_conn(self): 85 | """ 86 | Establish a new connection via the SOCKS proxy. 87 | """ 88 | extra_kw = {} 89 | if self.source_address: 90 | extra_kw["source_address"] = self.source_address 91 | 92 | if self.socket_options: 93 | extra_kw["socket_options"] = self.socket_options 94 | 95 | try: 96 | conn = socks.create_connection( 97 | (self.host, self.port), 98 | proxy_type=self._socks_options["socks_version"], 99 | proxy_addr=self._socks_options["proxy_host"], 100 | proxy_port=self._socks_options["proxy_port"], 101 | proxy_username=self._socks_options["username"], 102 | proxy_password=self._socks_options["password"], 103 | proxy_rdns=self._socks_options["rdns"], 104 | timeout=self.timeout, 105 | **extra_kw 106 | ) 107 | 108 | except SocketTimeout: 109 | raise ConnectTimeoutError( 110 | self, 111 | "Connection to %s timed out. (connect timeout=%s)" 112 | % (self.host, self.timeout), 113 | ) 114 | 115 | except socks.ProxyError as e: 116 | # This is fragile as hell, but it seems to be the only way to raise 117 | # useful errors here. 118 | if e.socket_err: 119 | error = e.socket_err 120 | if isinstance(error, SocketTimeout): 121 | raise ConnectTimeoutError( 122 | self, 123 | "Connection to %s timed out. (connect timeout=%s)" 124 | % (self.host, self.timeout), 125 | ) 126 | else: 127 | raise NewConnectionError( 128 | self, "Failed to establish a new connection: %s" % error 129 | ) 130 | else: 131 | raise NewConnectionError( 132 | self, "Failed to establish a new connection: %s" % e 133 | ) 134 | 135 | except SocketError as e: # Defensive: PySocks should catch all these. 136 | raise NewConnectionError( 137 | self, "Failed to establish a new connection: %s" % e 138 | ) 139 | 140 | return conn 141 | 142 | 143 | # We don't need to duplicate the Verified/Unverified distinction from 144 | # urllib3/connection.py here because the HTTPSConnection will already have been 145 | # correctly set to either the Verified or Unverified form by that module. This 146 | # means the SOCKSHTTPSConnection will automatically be the correct type. 147 | class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): 148 | pass 149 | 150 | 151 | class SOCKSHTTPConnectionPool(HTTPConnectionPool): 152 | ConnectionCls = SOCKSConnection 153 | 154 | 155 | class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): 156 | ConnectionCls = SOCKSHTTPSConnection 157 | 158 | 159 | class SOCKSProxyManager(PoolManager): 160 | """ 161 | A version of the urllib3 ProxyManager that routes connections via the 162 | defined SOCKS proxy. 163 | """ 164 | 165 | pool_classes_by_scheme = { 166 | "http": SOCKSHTTPConnectionPool, 167 | "https": SOCKSHTTPSConnectionPool, 168 | } 169 | 170 | def __init__( 171 | self, 172 | proxy_url, 173 | username=None, 174 | password=None, 175 | num_pools=10, 176 | headers=None, 177 | **connection_pool_kw 178 | ): 179 | parsed = parse_url(proxy_url) 180 | 181 | if username is None and password is None and parsed.auth is not None: 182 | split = parsed.auth.split(":") 183 | if len(split) == 2: 184 | username, password = split 185 | if parsed.scheme == "socks5": 186 | socks_version = socks.PROXY_TYPE_SOCKS5 187 | rdns = False 188 | elif parsed.scheme == "socks5h": 189 | socks_version = socks.PROXY_TYPE_SOCKS5 190 | rdns = True 191 | elif parsed.scheme == "socks4": 192 | socks_version = socks.PROXY_TYPE_SOCKS4 193 | rdns = False 194 | elif parsed.scheme == "socks4a": 195 | socks_version = socks.PROXY_TYPE_SOCKS4 196 | rdns = True 197 | else: 198 | raise ValueError("Unable to determine SOCKS version from %s" % proxy_url) 199 | 200 | self.proxy_url = proxy_url 201 | 202 | socks_options = { 203 | "socks_version": socks_version, 204 | "proxy_host": parsed.host, 205 | "proxy_port": parsed.port, 206 | "username": username, 207 | "password": password, 208 | "rdns": rdns, 209 | } 210 | connection_pool_kw["_socks_options"] = socks_options 211 | 212 | super(SOCKSProxyManager, self).__init__( 213 | num_pools, headers, **connection_pool_kw 214 | ) 215 | 216 | self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme 217 | -------------------------------------------------------------------------------- /lib/flox/string_matcher.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from typing import List 3 | 4 | SPACE_CHAR: str = ' ' 5 | QUERY_SEARCH_PRECISION = { 6 | 'Regular': 50, 7 | 'Low': 20, 8 | 'None': 0 9 | } 10 | DEFAULT_QUERY_SEARCH_PRECISION = QUERY_SEARCH_PRECISION['Regular'] 11 | 12 | """ 13 | This is a python copy of Flow Launcher's string matcher. 14 | I take no credit for the algorithm, I just translated it to python. 15 | """ 16 | 17 | 18 | @dataclass 19 | class MatchData: 20 | """Match data""" 21 | matched: bool 22 | score_cutoff: int 23 | index_list: List[int] = field(default_factory=list) 24 | score: int = 0 25 | 26 | 27 | def string_matcher(query: str, text: str, ignore_case: bool = True, query_search_precision: int = DEFAULT_QUERY_SEARCH_PRECISION) -> MatchData: 28 | """Compare query to text""" 29 | if not text or not query: 30 | return MatchData(False, query_search_precision) 31 | 32 | query = query.strip() 33 | 34 | current_acronym_query_index = 0 35 | acronym_match_data: List[int] = [] 36 | acronyms_total_count: int = 0 37 | acronyms_matched: int = 0 38 | 39 | full_text_lower: str = text.lower() if ignore_case else text 40 | query_lower: str = query.lower() if ignore_case else query 41 | 42 | query_substrings: List[str] = query_lower.split(' ') 43 | current_query_substring_index: int = 0 44 | current_query_substring = query_substrings[current_query_substring_index] 45 | current_query_substring_char_index = 0 46 | 47 | first_match_index = -1 48 | first_match_index_in_word = -1 49 | last_match_index = 0 50 | all_query_substrings_matched: bool = False 51 | match_found_in_previous_loop: bool = False 52 | all_substrings_contained_in_text: bool = True 53 | 54 | index_list: List[int] = [] 55 | space_indices: List[int] = [] 56 | for text_index in range(len(full_text_lower)): 57 | if current_acronym_query_index >= len(query_lower) and acronyms_matched == len(query_lower): 58 | 59 | if is_acronym_count(full_text_lower, text_index): 60 | acronyms_total_count += 1 61 | continue 62 | 63 | if current_acronym_query_index >= len(query_lower) or current_acronym_query_index >= len(query_lower) and all_query_substrings_matched: 64 | break 65 | 66 | if full_text_lower[text_index] == SPACE_CHAR and current_query_substring_char_index == 0: 67 | space_indices.append(text_index) 68 | 69 | if is_acronym(text, text_index): 70 | if full_text_lower[text_index] == query_lower[current_acronym_query_index]: 71 | acronym_match_data.append(text_index) 72 | acronyms_matched += 1 73 | current_acronym_query_index += 1 74 | 75 | if is_acronym_count(text, text_index): 76 | acronyms_total_count += 1 77 | 78 | if all_query_substrings_matched or full_text_lower[text_index] != current_query_substring[current_query_substring_char_index]: 79 | match_found_in_previous_loop = False 80 | continue 81 | 82 | if first_match_index < 0: 83 | first_match_index = text_index 84 | 85 | if current_query_substring_char_index == 0: 86 | match_found_in_previous_loop = True 87 | first_match_index_in_word = text_index 88 | elif not match_found_in_previous_loop: 89 | start_index_to_verify = text_index - current_query_substring_char_index 90 | 91 | if all_previous_chars_matched(start_index_to_verify, current_query_substring_char_index, full_text_lower, current_query_substring): 92 | match_found_in_previous_loop = True 93 | first_match_index_in_word = start_index_to_verify if current_query_substring_index == 0 else first_match_index 94 | 95 | index_list = get_updated_index_list( 96 | start_index_to_verify, current_query_substring_char_index, first_match_index_in_word, index_list) 97 | 98 | last_match_index = text_index + 1 99 | index_list.append(text_index) 100 | 101 | current_query_substring_char_index += 1 102 | 103 | if current_query_substring_char_index == len(current_query_substring): 104 | all_substrings_contained_in_text = match_found_in_previous_loop and all_substrings_contained_in_text 105 | 106 | current_query_substring_index += 1 107 | 108 | all_query_substrings_matched = all_query_substrings_matched_func( 109 | current_query_substring_index, len(query_substrings)) 110 | 111 | if all_query_substrings_matched: 112 | continue 113 | 114 | current_query_substring = query_substrings[current_query_substring_index] 115 | current_query_substring_char_index = 0 116 | 117 | if acronyms_matched > 0 and acronyms_matched == len(query): 118 | acronyms_score: int = acronyms_matched * 100 / acronyms_total_count 119 | 120 | if acronyms_score >= query_search_precision: 121 | return MatchData(True, query_search_precision, acronym_match_data, acronyms_score) 122 | 123 | if all_query_substrings_matched: 124 | 125 | nearest_space_index = calculate_closest_space_index( 126 | space_indices, first_match_index) 127 | 128 | score = calculate_search_score(query, text, first_match_index - nearest_space_index - 1, 129 | space_indices, last_match_index - first_match_index, all_substrings_contained_in_text) 130 | 131 | return MatchData(True, query_search_precision, index_list, score) 132 | 133 | return MatchData(False, query_search_precision) 134 | 135 | 136 | def calculate_search_score(query: str, text: str, first_index: int, space_indices: List[int], match_length: int, all_substrings_contained_in_text: bool): 137 | score = 100 * (len(query) + 1) / ((1 + first_index) + (match_length + 1)) 138 | 139 | if first_index == 0 and all_substrings_contained_in_text: 140 | score -= len(space_indices) 141 | 142 | if (len(text) - len(query)) < 5: 143 | score += 20 144 | elif (len(text) - len(query)) < 10: 145 | score += 10 146 | 147 | if all_substrings_contained_in_text: 148 | count: int = len(query.replace(' ', '')) 149 | threshold: int = 4 150 | if count <= threshold: 151 | score += count * 10 152 | else: 153 | score += threshold * 10 + (count - threshold) * 5 154 | 155 | return score 156 | 157 | 158 | def get_updated_index_list(start_index_to_verify: int, current_query_substring_char_index: int, first_matched_index_in_word: int, index_list: List[int]): 159 | updated_list: List[int] = [] 160 | 161 | for idx, item in enumerate(index_list): 162 | if item >= first_matched_index_in_word: 163 | index_list.pop(idx) 164 | 165 | updated_list.extend(index_list) 166 | 167 | for i in range(current_query_substring_char_index): 168 | updated_list.append(start_index_to_verify + i) 169 | 170 | return updated_list 171 | 172 | 173 | def all_query_substrings_matched_func(current_query_substring_index: int, query_substrings_length: int) -> bool: 174 | return current_query_substring_index >= query_substrings_length 175 | 176 | 177 | def all_previous_chars_matched(start_index_to_verify: int, current_query_substring_char_index: int, full_text_lower: str, current_query_substring: str) -> bool: 178 | all_match = True 179 | for i in range(current_query_substring_char_index): 180 | if full_text_lower[start_index_to_verify + i] != current_query_substring[i]: 181 | all_match = False 182 | 183 | return all_match 184 | 185 | 186 | def is_acronym(text: str, text_index: int) -> bool: 187 | if is_acronym_char(text, text_index) or is_acronym_number(text, text_index): 188 | return True 189 | return False 190 | 191 | 192 | def is_acronym_count(text: str, text_index: int) -> bool: 193 | if is_acronym_char(text, text_index): 194 | return True 195 | if is_acronym_number(text, text_index): 196 | return text_index == 0 or text[text_index - 1] == SPACE_CHAR 197 | 198 | return False 199 | 200 | 201 | def is_acronym_char(text: str, text_index: int) -> bool: 202 | return text[text_index].isupper() or text_index == 0 or text[text_index - 1] == SPACE_CHAR 203 | 204 | 205 | def is_acronym_number(text: str, text_index: int) -> bool: 206 | return text[text_index].isdigit() 207 | 208 | 209 | def calculate_closest_space_index(space_indices: List[int], first_match_index: int) -> int: 210 | 211 | closest_space_index = -1 212 | 213 | for i in space_indices: 214 | if i < first_match_index: 215 | closest_space_index = i 216 | else: 217 | break 218 | 219 | return closest_space_index 220 | --------------------------------------------------------------------------------