├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ └── feature_request.yml ├── dependabot.yml ├── mergify.yml └── workflows │ ├── dependabot-auto-merge.yml │ ├── pypi-publish.yml │ ├── update-dependencies.yml │ └── update-help-image.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── YetAnotherPicSearch ├── __init__.py ├── __main__.py ├── cache.py ├── config.py ├── data_source │ ├── __init__.py │ ├── ascii2d.py │ ├── baidu.py │ ├── ehentai.py │ ├── google.py │ ├── iqdb.py │ ├── nhentai.py │ ├── saucenao.py │ ├── whatanime.py │ └── yandex.py ├── nhentai_model.py ├── registry.py ├── res │ └── usage.jpg └── utils.py ├── docs ├── images │ ├── image01.jpg │ ├── image02.jpg │ └── image03.jpg └── usage.md ├── pdm.lock ├── pyproject.toml └── scripts ├── .gitignore ├── pdm.lock ├── pyproject.toml └── update_help_image ├── __init__.py └── __main__.py /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: 问题汇报 2 | description: 汇报错误或意外行为 3 | labels: [ bug ] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | 在提 issue 前请确保你仔细阅读过文档,搜索过现有的 issue ,并确保你使用的是最新的 YetAnotherPicSearch 和 PicImageSearch 。 9 | 10 | 在启动本项目出现错误时,你可以无视堆栈 (stack) 相关的信息,但错误信息 (Error: xxxxx) 请认真看看,自己稍微翻译翻译就知道大体意思,大部分问题你都可以在本项目文档或搜索引擎中找到解答。 11 | 12 | **这些 issue 不会被受理:** 13 | 14 | 1. 文档中有提到的 15 | 2. 使用的 YetAnotherPicSearch 和 PicImageSearch 不是最新的 16 | 3. 大部分“连接失败”问题,请自己排查配置等原因 17 | 18 | - type: textarea 19 | id: environment 20 | attributes: 21 | label: YetAnotherPicSearch 、 PicImageSearch 、 go-cqhttp 、 nonebot 、 Python 版本及操作系统 22 | placeholder: 如果 YetAnotherPicSearch 和 PicImageSearch 不是最新版本还发 issue ,会被直接关闭。 23 | validations: 24 | required: true 25 | 26 | - type: textarea 27 | id: installed_packages 28 | attributes: 29 | label: 列出安装的 Python 包 30 | placeholder: 把 pip freeze -l 执行的结果贴上来。 31 | description: 如果你不是通过 pip 或 nb-cli 下载的 YetAnotherPicSearch ,请把你安装的 Python 包列出来。 32 | validations: 33 | required: false 34 | 35 | - type: textarea 36 | id: reproduction 37 | attributes: 38 | label: 如何复现 39 | placeholder: 是否稳定复现,是的话说明步骤。 40 | validations: 41 | required: true 42 | 43 | - type: textarea 44 | id: expectation 45 | attributes: 46 | label: 期望行为 47 | placeholder: 期望的正常行为是怎么样的? 48 | validations: 49 | required: false 50 | 51 | - type: textarea 52 | id: actuality 53 | attributes: 54 | label: 实际行为 55 | placeholder: 实际的行为是怎么样的? 56 | description: | 57 | 请提供相关的截图、日志、配置、地址(issue 内上传图片或文件可直接粘贴)等信息: 58 | - 问题相关截图,例如机器人回复异常时的聊天截图 59 | - 问题日志,可截图,可粘贴文本,可上传日志文件,如粘贴文本请务必用 markdown 代码块包裹,**记得脱敏处理,删除 QQ 号等敏感信息** 60 | - 可能与 bug 有关的配置内容,如果不确定则不必提供 61 | - 出问题的图片 62 | validations: 63 | required: true 64 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: 功能需求 2 | description: 希望添加一个功能 3 | labels: [ enhancement ] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | 提新功能需求之前,请确保你使用的是最新的 YetAnotherPicSearch ,并仔细阅读文档,避免发生“提出的需求是已经实现了的”这种令人哭笑不得的情况。 9 | 10 | - type: textarea 11 | id: feature 12 | attributes: 13 | label: 功能需求 14 | placeholder: 希望添加的功能是什么? 15 | validations: 16 | required: true 17 | 18 | - type: textarea 19 | id: reason 20 | attributes: 21 | label: 理由 22 | placeholder: 为什么要添加这个功能? 23 | validations: 24 | required: true 25 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | 8 | - package-ecosystem: "github-actions" 9 | directory: "/" 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /.github/mergify.yml: -------------------------------------------------------------------------------- 1 | pull_request_rules: 2 | - name: "Auto merge bot PRs" 3 | conditions: 4 | - or: 5 | - "author = github-actions[bot]" 6 | - "author = pre-commit-ci[bot]" 7 | - not: "conflict" 8 | actions: 9 | merge: 10 | method: squash 11 | -------------------------------------------------------------------------------- /.github/workflows/dependabot-auto-merge.yml: -------------------------------------------------------------------------------- 1 | name: Dependabot auto-merge 2 | on: pull_request 3 | 4 | permissions: 5 | contents: write 6 | pull-requests: write 7 | 8 | jobs: 9 | dependabot: 10 | runs-on: ubuntu-latest 11 | if: github.event.pull_request.user.login == 'dependabot[bot]' 12 | steps: 13 | - name: Dependabot metadata 14 | id: metadata 15 | uses: dependabot/fetch-metadata@v2 16 | with: 17 | github-token: "${{ secrets.GITHUB_TOKEN }}" 18 | - name: Enable auto-merge for Dependabot PRs 19 | if: steps.metadata.outputs.update-type == 'version-update:semver-patch' 20 | run: gh pr merge --auto --squash "$PR_URL" 21 | env: 22 | PR_URL: ${{github.event.pull_request.html_url}} 23 | GH_TOKEN: ${{secrets.GITHUB_TOKEN}} 24 | -------------------------------------------------------------------------------- /.github/workflows/pypi-publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python 🐍 distributions 📦 to PyPI 2 | 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | 8 | jobs: 9 | build-n-publish: 10 | name: Use PDM to Build and publish Python 🐍 distributions 📦 to PyPI 11 | runs-on: ubuntu-latest 12 | 13 | permissions: 14 | # IMPORTANT: this permission is mandatory for trusted publishing 15 | id-token: write 16 | 17 | steps: 18 | - name: Checkout 19 | uses: actions/checkout@master 20 | with: 21 | submodules: true 22 | 23 | - name: Setup PDM 24 | uses: pdm-project/setup-pdm@v4 25 | 26 | - name: Build and Publish distribution 📦 to PyPI 27 | run: pdm publish 28 | -------------------------------------------------------------------------------- /.github/workflows/update-dependencies.yml: -------------------------------------------------------------------------------- 1 | name: Update PDM Dependencies 2 | 3 | on: 4 | schedule: 5 | - cron: "0 0 * * *" 6 | workflow_dispatch: 7 | 8 | permissions: 9 | contents: write 10 | pull-requests: write 11 | 12 | jobs: 13 | update-dependencies: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | 18 | - name: Update dependencies 19 | uses: pdm-project/update-deps-action@main 20 | -------------------------------------------------------------------------------- /.github/workflows/update-help-image.yml: -------------------------------------------------------------------------------- 1 | name: Update Help Image 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - '.github/workflows/update-help-image.yml' 9 | - 'scripts/update_help_image/**' 10 | - 'docs/usage.md' 11 | - 'YetAnotherPicSearch/res/usage.jpg' 12 | workflow_dispatch: 13 | 14 | jobs: 15 | test: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v4 19 | with: 20 | fetch-depth: 1 21 | submodules: true 22 | 23 | - name: Setup PDM 24 | uses: pdm-project/setup-pdm@v4 25 | with: 26 | python-version: '3.12' 27 | # cache: true 28 | 29 | - name: Install Pre-requisites 30 | run: |- 31 | sudo apt-get install -y fonts-noto 32 | 33 | - name: Run Job 34 | run: |- 35 | cd scripts 36 | pdm install -G:all 37 | pdm run update-help-image 38 | 39 | - name: Commit and Push 40 | run: |- 41 | if [[ -n "$(git status -s)" ]]; then 42 | git add . 43 | git config user.name github-actions[bot] 44 | git config user.email github-actions[bot]@users.noreply.github.com 45 | git commit -m "chore: update help image" 46 | git push 47 | else 48 | echo "No changes detected." 49 | fi 50 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.toptal.com/developers/gitignore/api/python,intellij+all,dotenv 3 | # Edit at https://www.toptal.com/developers/gitignore?templates=python,intellij+all,dotenv 4 | 5 | ### dotenv ### 6 | .env 7 | .env.prod 8 | 9 | ### Intellij+all ### 10 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider 11 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 12 | 13 | # User-specific stuff 14 | .idea/**/workspace.xml 15 | .idea/**/tasks.xml 16 | .idea/**/usage.statistics.xml 17 | .idea/**/dictionaries 18 | .idea/**/shelf 19 | 20 | # AWS User-specific 21 | .idea/**/aws.xml 22 | 23 | # Generated files 24 | .idea/**/contentModel.xml 25 | 26 | # Sensitive or high-churn files 27 | .idea/**/dataSources/ 28 | .idea/**/dataSources.ids 29 | .idea/**/dataSources.local.xml 30 | .idea/**/sqlDataSources.xml 31 | .idea/**/dynamic.xml 32 | .idea/**/uiDesigner.xml 33 | .idea/**/dbnavigator.xml 34 | 35 | # Gradle 36 | .idea/**/gradle.xml 37 | .idea/**/libraries 38 | 39 | # Gradle and Maven with auto-import 40 | # When using Gradle or Maven with auto-import, you should exclude module files, 41 | # since they will be recreated, and may cause churn. Uncomment if using 42 | # auto-import. 43 | # .idea/artifacts 44 | # .idea/compiler.xml 45 | # .idea/jarRepositories.xml 46 | # .idea/modules.xml 47 | # .idea/*.iml 48 | # .idea/modules 49 | # *.iml 50 | # *.ipr 51 | 52 | # CMake 53 | cmake-build-*/ 54 | 55 | # Mongo Explorer plugin 56 | .idea/**/mongoSettings.xml 57 | 58 | # File-based project format 59 | *.iws 60 | 61 | # IntelliJ 62 | out/ 63 | 64 | # mpeltonen/sbt-idea plugin 65 | .idea_modules/ 66 | 67 | # JIRA plugin 68 | atlassian-ide-plugin.xml 69 | 70 | # Cursive Clojure plugin 71 | .idea/replstate.xml 72 | 73 | # SonarLint plugin 74 | .idea/sonarlint/ 75 | 76 | # Crashlytics plugin (for Android Studio and IntelliJ) 77 | com_crashlytics_export_strings.xml 78 | crashlytics.properties 79 | crashlytics-build.properties 80 | fabric.properties 81 | 82 | # Editor-based Rest Client 83 | .idea/httpRequests 84 | 85 | # Android studio 3.1+ serialized cache file 86 | .idea/caches/build_file_checksums.ser 87 | 88 | ### Intellij+all Patch ### 89 | # Ignores the whole .idea folder and all .iml files 90 | # See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360 91 | 92 | .idea/* 93 | 94 | # Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023 95 | 96 | *.iml 97 | modules.xml 98 | .idea/misc.xml 99 | *.ipr 100 | 101 | # Sonarlint plugin 102 | .idea/sonarlint 103 | 104 | ### Python ### 105 | # Byte-compiled / optimized / DLL files 106 | __pycache__/ 107 | *.py[cod] 108 | *$py.class 109 | 110 | # C extensions 111 | *.so 112 | 113 | # Distribution / packaging 114 | .Python 115 | build/ 116 | develop-eggs/ 117 | dist/ 118 | downloads/ 119 | eggs/ 120 | .eggs/ 121 | lib/ 122 | lib64/ 123 | parts/ 124 | sdist/ 125 | var/ 126 | wheels/ 127 | share/python-wheels/ 128 | *.egg-info/ 129 | .installed.cfg 130 | *.egg 131 | MANIFEST 132 | 133 | # PyInstaller 134 | # Usually these files are written by a python script from a template 135 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 136 | *.manifest 137 | *.spec 138 | 139 | # Installer logs 140 | pip-log.txt 141 | pip-delete-this-directory.txt 142 | 143 | # Unit test / coverage reports 144 | htmlcov/ 145 | .tox/ 146 | .nox/ 147 | .coverage 148 | .coverage.* 149 | .cache 150 | nosetests.xml 151 | coverage.xml 152 | *.cover 153 | *.py,cover 154 | .hypothesis/ 155 | .pytest_cache/ 156 | cover/ 157 | 158 | # Translations 159 | *.mo 160 | *.pot 161 | 162 | # Django stuff: 163 | *.log 164 | local_settings.py 165 | db.sqlite3 166 | db.sqlite3-journal 167 | 168 | # Flask stuff: 169 | instance/ 170 | .webassets-cache 171 | 172 | # Scrapy stuff: 173 | .scrapy 174 | 175 | # Sphinx documentation 176 | docs/_build/ 177 | 178 | # PyBuilder 179 | .pybuilder/ 180 | target/ 181 | 182 | # Jupyter Notebook 183 | .ipynb_checkpoints 184 | 185 | # IPython 186 | profile_default/ 187 | ipython_config.py 188 | 189 | # pyenv 190 | # For a library or package, you might want to ignore these files since the code is 191 | # intended to run in multiple environments; otherwise, check them in: 192 | # .python-version 193 | 194 | # pipenv 195 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 196 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 197 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 198 | # install all needed dependencies. 199 | #Pipfile.lock 200 | 201 | # poetry 202 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 203 | # This is especially recommended for binary packages to ensure reproducibility, and is more 204 | # commonly ignored for libraries. 205 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 206 | #poetry.lock 207 | 208 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 209 | __pypackages__/ 210 | 211 | # Celery stuff 212 | celerybeat-schedule 213 | celerybeat.pid 214 | 215 | # SageMath parsed files 216 | *.sage.py 217 | 218 | # Environments 219 | .pdm-python 220 | .venv 221 | env/ 222 | venv/ 223 | ENV/ 224 | env.bak/ 225 | venv.bak/ 226 | 227 | # Spyder project settings 228 | .spyderproject 229 | .spyproject 230 | 231 | # Rope project settings 232 | .ropeproject 233 | 234 | # mkdocs documentation 235 | /site 236 | 237 | # mypy 238 | .mypy_cache/ 239 | .dmypy.json 240 | dmypy.json 241 | 242 | # Pyre type checker 243 | .pyre/ 244 | 245 | # pytype static type analyzer 246 | .pytype/ 247 | 248 | # Cython debug symbols 249 | cython_debug/ 250 | 251 | # PyCharm 252 | # JetBrains specific template is maintainted in a separate JetBrains.gitignore that can 253 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 254 | # and can be added to the global gitignore or merged into this file. For a more nuclear 255 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 256 | #.idea/ 257 | 258 | # End of https://www.toptal.com/developers/gitignore/api/python,intellij+all,dotenv 259 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | minimum_pre_commit_version: "3.5.0" 2 | files: ^.*\.py$ 3 | repos: 4 | - repo: https://github.com/astral-sh/ruff-pre-commit 5 | rev: 'v0.11.11' 6 | hooks: 7 | # resolve COM812, format -> fix -> format 8 | - id: ruff-format 9 | types_or: [ python, pyi ] 10 | - id: ruff 11 | args: [ --fix ] 12 | types_or: [ python, pyi ] 13 | - id: ruff-format 14 | types_or: [ python, pyi ] 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 |
4 | 5 | 6 | NoneBotPluginLogo 7 | 8 | 9 |

10 | NoneBotPluginText 11 |

12 | 13 | # YetAnotherPicSearch 14 | 15 | _✨ 基于 [NoneBot2](https://github.com/nonebot/nonebot2) 与 [PicImageSearch](https://github.com/kitUIN/PicImageSearch) 的另一个 NoneBot 搜图插件 ✨_ 16 | 17 | python 18 | 19 | pdm-managed 20 | 21 | 22 |
23 | 24 | 25 | Pydantic Version 1 Or 2 26 | 27 | 28 | license 29 | 30 | 31 | pypi 32 | 33 | 34 | pypi download 35 | 36 | 37 |
38 | 39 | 40 | NoneBot Registry 41 | 42 | 43 | Supported Adapters 44 | 45 | 46 |
47 | 48 | ## 📖 介绍 49 | 50 | 主要受到 [cq-picsearcher-bot](https://github.com/Tsuk1ko/cq-picsearcher-bot) 的启发。我只需要基础的搜图功能,于是忍不住自己也写了一个,用来搜图、搜番、搜本子。 51 | 52 | 目前支持的搜图服务: 53 | [Ascii2D](https://ascii2d.net/) | [Baidu](https://graph.baidu.com/) | [E-Hentai](https://e-hentai.org/) | [ExHentai](https://exhentai.org/) | [Google](https://www.google.com/imghp) | [Iqdb](https://iqdb.org/) | [SauceNAO](https://saucenao.com/) | [TraceMoe](https://trace.moe/) | [Yandex](https://yandex.com/images/search) 54 | 55 | ## 💿 安装 56 | 57 | 以下提到的方法 任选**其一** 即可 58 | 59 |
60 | [推荐] 使用 nb-cli 安装 61 | 在 nonebot2 项目的根目录下打开命令行, 输入以下指令即可安装 62 | 63 | ```bash 64 | nb plugin install YetAnotherPicSearch 65 | ``` 66 | 67 |
68 | 69 |
70 | 使用包管理器安装 71 | 在 nonebot2 项目的插件目录下, 打开命令行, 根据你使用的包管理器, 输入相应的安装命令 72 | 73 |
74 | pip 75 | 76 | ```bash 77 | pip install YetAnotherPicSearch 78 | ``` 79 | 80 |
81 |
82 | pdm 83 | 84 | ```bash 85 | pdm add YetAnotherPicSearch 86 | ``` 87 | 88 |
89 |
90 | poetry 91 | 92 | ```bash 93 | poetry add YetAnotherPicSearch 94 | ``` 95 | 96 |
97 |
98 | conda 99 | 100 | ```bash 101 | conda install YetAnotherPicSearch 102 | ``` 103 | 104 |
105 | 106 | 打开 nonebot2 项目根目录下的 `pyproject.toml` 文件, 在 `[tool.nonebot]` 部分的 `plugins` 项里追加写入 107 | 108 | ```toml 109 | [tool.nonebot] 110 | plugins = [ 111 | # ... 112 | "YetAnotherPicSearch" 113 | ] 114 | ``` 115 | 116 |
117 | 118 | ## ⚙️ 配置 119 | 120 | | 配置项 | 必填 | 默认值 | 说明 | 121 | | :----------------------------: | :--------------------------: | :-------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | 122 | | **通用配置** | | | | 123 | | `PROXY` | 否 | `None` | 大部分请求所使用的代理地址,如需要 socks 协议支持请额外执行 `pip install YetAnotherPicSearch[socks]` 安装 | 124 | | **数据源配置** | | | | 125 | | `SAUCENAO_API_KEY` | $${\textsf{\color{red}是}}$$ | 无 | SauceNAO 的 API KEY,在 [这里](https://saucenao.com/user.php) 注册后到 [这里](https://saucenao.com/user.php?page=search-api) 获取
如果 SauceNAO 的 API 使用触发当日上限,请同时换新的 API Key 和代理节点,仅换其中一个没有意义。 | 126 | | `ASCII2D_BASE_URL` | 否 | `https://ascii2d.net` | Ascii2D Base URL \([#139](https://github.com/lgc-NB2Dev/YetAnotherPicSearch/issues/139)\) | 127 | | `HIDE_ASCII2D_BASE_URL` | 否 | `True` | 当自定义 Ascii2D Base URL 后,是否将自定义的 URL 替换回官方 URL | 128 | | `EXHENTAI_COOKIES` | 否 | `None` | ExHentai 的 Cookies,没有的情况下自动改用 E-Hentai 搜图,获取方式参考 请参考 [PicImageSearch 文档](https://pic-image-search.kituin.fun/wiki/picimagesearch/E-hentai/DataStructure/#cookies%E8%8E%B7%E5%8F%96) | 129 | | `NHENTAI_BASE_URL` | 否 | `https://ascii2d.net` | NHentai Base URL \([#139](https://github.com/lgc-NB2Dev/YetAnotherPicSearch/issues/139)\) | 130 | | `HIDE_NHENTAI_BASE_URL` | 否 | `True` | 当自定义 NHentai Base URL 后,是否将自定义的 URL 替换回官方 URL | 131 | | `NHENTAI_USERAGENT` | 否 | `None` | 用来绕过 NHentai Cloudflare 拦截的 User Agent,配置后在 E-Hentai 标题搜索无结果时会自动调用 NHentai 标题搜索
先用配置的 `PROXY` 做代理,使用浏览器访问 NHentai 通过 CloudFlare 检测后,获取 UA 和 Cookies 填到对应配置项 | 132 | | `NHENTAI_COOKIES` | 否 | `None` | 用来绕过 NHentai Cloudflare 拦截的 Cookies,同上 | 133 | | **行为配置** | | | | 134 | | `SAUCENAO_LOW_ACC` | 否 | `60` | SauceNAO 相似度低于这个百分比将被认定为相似度过低 | 135 | | `AUTO_USE_ASCII2D` | 否 | `True` | 是否在 SauceNAO 或 IQDB 相似度过低时 / E-Hentai 无结果时 自动使用 Ascii2D 搜索 | 136 | | **交互配置** | | | | 137 | | `SEARCH_KEYWORD` | 否 | `搜图` | 触发插件功能的指令名,使用时记得带上你配置的指令头 | 138 | | `SEARCH_KEYWORD_ONLY` | 否 | `False` | 是否只响应指令消息(优先级高于 `SEARCH_IN_GROUP_ONLY_KEYWORD` 与 `SEARCH_IMMEDIATELY`) | 139 | | `SEARCH_IN_GROUP_ONLY_KEYWORD` | 否 | `True` | 是否在群聊中只响应指令消息,否则可以通过 @Bot 触发搜图模式 | 140 | | `SEARCH_IMMEDIATELY` | 否 | `True` | 私聊发送图片是否直接触发搜图,否则需要使用命令 | 141 | | `WAIT_FOR_IMAGE_TIMEOUT` | 否 | `180` | 当用户未提供图片时,提示用户提供图片的等待时间(秒) | 142 | | **消息配置** | | | | 143 | | `HIDE_IMG` | 否 | `False` | 隐藏所有搜索结果的缩略图 | 144 | | `HIDE_IMG_WHEN_LOW_ACC` | 否 | `True` | SauceNAO / IQDB 得到低相似度结果时隐藏结果缩略图 | 145 | | `HIDE_IMG_WHEN_WHATANIME_R18` | 否 | `True` | WhatAnime 得到 R18 结果时隐藏结果缩略图 | 146 | | `SAUCENAO_NSFW_HIDE_LEVEL` | 否 | `2` | 对 SauceNAO 的搜索结果进行 NSFW 判断的严格程度(依次递增),启用后自动隐藏相应的 NSFW 结果的缩略图
`0`:不判断, `1`:只判断明确的, `2`:包括可疑的, `3`:非明确为 SFW 的 | 147 | | `FORWARD_SEARCH_RESULT` | 否 | `True` | 若结果消息有多条,是否采用合并转发方式发送搜图结果(平台不支持会自动回退) | 148 | | `TO_CONFUSE_URLS` | 否 | ... | 要破坏处理的网址列表,减少风控概率(发出来的消息包含这些网址会在网址的 `://` 与 `.` 后加上空格) | 149 | | **杂项配置** | | | | 150 | | `CACHE_EXPIRE` | 否 | `3` | 消息缓存过期时间 (天) | 151 | 152 | ## 🎉 使用 153 | 154 | 使用你配置的指令(默认为 `搜图`)即可开始使用,附带或回复图片可直接触发搜图,可以一次性带多张图 155 | 更详细的使用方法请参考 [这里](https://github.com/lgc-NB2Dev/YetAnotherPicSearch/tree/main/docs/usage.md) 156 | 157 | ### 效果图 158 | 159 |

160 | 161 | 162 | 163 |

164 | 165 | ## 📞 联系 166 | 167 | 172 | 173 | ### LgCookie 174 | 175 | QQ:3076823485 176 | Telegram:[@lgc2333](https://t.me/lgc2333) 177 | 吹水群:[1105946125](https://jq.qq.com/?_wv=1027&k=Z3n1MpEp) 178 | 邮箱: 179 | 180 | ## 💡 鸣谢 181 | 182 | - [cq-picsearcher-bot](https://github.com/Tsuk1ko/cq-picsearcher-bot) 183 | - [PicImageSearch](https://github.com/kitUIN/PicImageSearch) 184 | - [NoneBot2](https://github.com/nonebot/nonebot2) 185 | - [go-cqhttp](https://github.com/Mrs4s/go-cqhttp) 186 | 187 | ## 💰 赞助 188 | 189 | **[赞助我](https://blog.lgc2333.top/donate)** 190 | 191 | 感谢大家的赞助!你们的赞助将是我继续创作的动力! 192 | 193 | ## ⭐ Star History 194 | 195 | [![Star History](https://starchart.cc/lgc-NB2Dev/YetAnotherPicSearch.svg)](https://starchart.cc/lgc-NB2Dev/YetAnotherPicSearch) 196 | 197 | ## 📝 更新日志 198 | 199 | ### 2.0.6 200 | 201 | - 添加配置 `NHENTAI_BASE_URL`、`HIDE_NHENTAI_BASE_URL` 202 | 203 | ### 2.0.5 204 | 205 | - feat: 添加隐藏 ascii2d 网页反向代理地址的配置及功能 by [@iona-s](https://github.com/iona-s) in [#186](https://github.com/lgc-NB2Dev/YetAnotherPicSearch/pull/186) 206 | 207 | ### 2.0.4 208 | 209 | - 兼容 HTTPX 0.28 210 | 211 | ### 2.0.3 212 | 213 | - 移除重复搜索结果并添加数量提示 214 | 215 | ### 2.0.2 216 | 217 | - 添加配置项用于自定义 Ascii2D 的 Base URL 218 | 219 | ### 2.0.1 220 | 221 | - 修复 [#137](https://github.com/lgc-NB2Dev/YetAnotherPicSearch/issues/137) 222 | - 修复文本重复的问题 223 | - 修复 ExHentai 始终显示无法使用的问题 224 | - 修复缓存消息显示问题 225 | - 添加缺失依赖 226 | 227 | ### 2.0.0 228 | 229 | 项目重构: 230 | 231 | - 使用 alconna 支持多平台,重构消息缓存 232 | - 将之前的 `搜图关键词` 改为指令;同时由于不方便判断是否回复的是 Bot 自身消息,所以阉掉了这个 233 | - 其他细节更改 234 | - 配置变动: 235 | - 新增 `SEARCH_IN_GROUP_ONLY_KEYWORD` 236 | - 新增 `WAIT_FOR_IMAGE_TIMEOUT` 237 | 238 | 以前的更新日志请在 Releases 中查看 239 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/__init__.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: E402, N999 2 | 3 | from nonebot.plugin import PluginMetadata, inherit_supported_adapters, require 4 | 5 | require("nonebot_plugin_waiter") 6 | require("nonebot_plugin_alconna") 7 | 8 | from . import __main__ as __main__ 9 | from .config import ConfigModel, config 10 | from .data_source import load_search_func 11 | 12 | load_search_func() 13 | 14 | __version__ = "2.0.6" 15 | __plugin_meta__ = PluginMetadata( 16 | name="YetAnotherPicSearch", 17 | description="基于 NoneBot2 及 PicImageSearch 的另一个 NoneBot 搜图插件", 18 | usage=f"使用指令 `{config.search_keyword} -h` 查看帮助", 19 | type="application", 20 | homepage="https://github.com/lgc-NB2Dev/YetAnotherPicSearch", 21 | config=ConfigModel, 22 | supported_adapters=inherit_supported_adapters( 23 | "nonebot_plugin_waiter", 24 | "nonebot_plugin_alconna", 25 | ), 26 | extra={}, 27 | ) 28 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/__main__.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: N999 2 | 3 | import asyncio 4 | from collections.abc import AsyncIterator 5 | from contextlib import asynccontextmanager, suppress 6 | from dataclasses import dataclass 7 | from pathlib import Path 8 | from typing import TYPE_CHECKING, NoReturn, Optional, Union, overload 9 | 10 | from cookit.loguru import logged_suppress 11 | from cookit.nonebot.alconna import RecallContext 12 | from nonebot import logger, on_command, on_message 13 | from nonebot.adapters import Bot as BaseBot, Event as BaseEvent, Message as BaseMessage 14 | from nonebot.exception import ActionFailed, FinishedException 15 | from nonebot.matcher import current_bot, current_event, current_matcher 16 | from nonebot.params import _command, _command_arg 17 | from nonebot.permission import SUPERUSER 18 | from nonebot.typing import T_State 19 | from nonebot_plugin_alconna.uniseg import ( 20 | At, 21 | CustomNode, 22 | FallbackStrategy, 23 | Image, 24 | MsgTarget, 25 | Reference, 26 | Reply, 27 | SerializeFailed, 28 | Target, 29 | Text, 30 | UniMessage, 31 | UniMsg, 32 | ) 33 | from nonebot_plugin_waiter import waiter 34 | from PicImageSearch import Network 35 | 36 | from .cache import msg_cache 37 | from .config import config 38 | from .registry import registered_search_func 39 | from .utils import get_image_from_seg, post_image_process 40 | 41 | if TYPE_CHECKING: 42 | from httpx import AsyncClient 43 | 44 | KEY_IMAGES = "images" 45 | 46 | 47 | @dataclass 48 | class SearchArgs: 49 | mode: str = "all" 50 | purge: bool = False 51 | 52 | 53 | async def extract_images(msg: UniMsg) -> list[Image]: 54 | if Reply in msg and isinstance((raw_reply := msg[Reply, 0].msg), BaseMessage): 55 | msg = await UniMessage.generate(message=raw_reply) 56 | return msg[Image] 57 | 58 | 59 | async def rule_func_search_msg( 60 | bot: BaseBot, 61 | ev: BaseEvent, 62 | state: T_State, 63 | msg: UniMsg, 64 | target: MsgTarget, 65 | ) -> bool: 66 | if target.private: 67 | images = await extract_images(msg) 68 | state[KEY_IMAGES] = images 69 | return bool(images) and config.search_immediately 70 | 71 | # 指令检测在下方做了 72 | # 不太方便做是否回复 Bot 自己消息的判断,阉了吧 73 | return (not config.search_in_group_only_keyword) and ( 74 | ev.is_tome() or any(True for x in msg if isinstance(x, At) and x.target == bot.self_id) 75 | ) 76 | 77 | 78 | async def extract_search_args() -> SearchArgs: 79 | ev = current_event.get() 80 | m = current_matcher.get() 81 | state = m.state 82 | 83 | args = SearchArgs() 84 | 85 | async def finish_with_unknown(arg: str) -> NoReturn: 86 | await m.finish( 87 | f"意外参数 {arg}\n使用指令 `{config.search_keyword} -h` 查看帮助", 88 | ) 89 | 90 | async def parse_mode(arg: str) -> bool: 91 | if arg.startswith("--") and (mode := arg[2:]) in registered_search_func: 92 | args.mode = mode 93 | return True 94 | return False 95 | 96 | async def is_purge(arg: str) -> bool: 97 | if arg == "--purge": 98 | args.purge = True 99 | return True 100 | return False 101 | 102 | async def send_help(arg: str) -> bool: 103 | if arg in {"-h", "--help"}: 104 | await UniMessage.image( 105 | raw=(Path(__file__).parent / "res" / "usage.jpg").read_bytes(), 106 | ).finish(reply_to=True) 107 | return True 108 | 109 | msg = _command_arg(state).extract_plain_text() if _command(state) else ev.get_plaintext() 110 | for arg in msg.strip().lower().split(): 111 | for func in (parse_mode, is_purge, send_help): 112 | if await func(arg): 113 | break 114 | else: 115 | await finish_with_unknown(arg) 116 | 117 | return args 118 | 119 | 120 | async def get_images_from_ev(msg: UniMessage) -> list[Image]: 121 | m = current_matcher.get() 122 | state = m.state 123 | 124 | images = state.get(KEY_IMAGES) or await extract_images(msg) 125 | if images: 126 | return images 127 | 128 | @waiter(waits=["message"], keep_session=True) 129 | async def wait_msg(msg: UniMsg) -> UniMsg: 130 | return msg 131 | 132 | waited_msg = await wait_msg.wait( 133 | f"请在 {config.wait_for_image_timeout} 秒内发送你要搜索的图片,发送其他内容取消搜索", 134 | ) 135 | if not waited_msg: 136 | await m.finish("操作超时,退出搜图") 137 | 138 | images = await extract_images(waited_msg) 139 | if not images: 140 | await m.finish("无效输入,退出搜图") 141 | 142 | return images 143 | 144 | 145 | @asynccontextmanager 146 | async def fail_with_msg( 147 | msg: Union[UniMessage, str], 148 | should_finish: bool = True, 149 | ) -> AsyncIterator[None]: 150 | try: 151 | yield 152 | except Exception as e: 153 | logger.exception("Error occurred") 154 | asyncio.create_task( 155 | logger.catch( 156 | (msg if isinstance(msg, UniMessage) else UniMessage(msg)).send, 157 | )(reply_to=True), 158 | ) 159 | if should_finish: 160 | raise FinishedException from e 161 | 162 | 163 | async def should_display_favorite(target: Target) -> bool: 164 | return (await SUPERUSER(current_bot.get(), current_event.get())) and target.private 165 | 166 | 167 | async def send_msgs( 168 | msgs: list[UniMessage], 169 | target: Target, 170 | index: Optional[int] = None, 171 | display_fav: bool = False, 172 | ) -> None: 173 | def pre_process_msg(m: UniMessage) -> UniMessage: 174 | if index: 175 | m = UniMessage.text(f"第 {index} 张图片的搜索结果:\n") + m 176 | 177 | should_remove: list[str] = [] 178 | if not display_fav: 179 | should_remove.append("❤️ 已收藏\n") 180 | for txt in should_remove: 181 | # alconna 的 text auto merge 害人 182 | # for seg in (x for x in m[Text] if (txt in x.text)): 183 | for seg in (x for x in m if isinstance(x, Text) and (txt in x.text)): 184 | seg.text = seg.text.replace(txt, "") 185 | 186 | return m 187 | 188 | msgs = [pre_process_msg(m) for m in msgs] 189 | msg_len = len(msgs) 190 | reply_to: Optional[str] = UniMessage.get_message_id() 191 | 192 | async def try_send() -> None: 193 | if config.forward_search_result and msg_len > 1: 194 | bot = current_bot.get() 195 | nodes = [ 196 | CustomNode( 197 | uid=bot.self_id, 198 | name=next(iter(config.nickname), "YetAnotherPicSearch"), 199 | content=x, 200 | ) 201 | for x in msgs 202 | ] 203 | with suppress(SerializeFailed, ActionFailed): 204 | await UniMessage(Reference(nodes=nodes)).send( 205 | target=target, 206 | fallback=FallbackStrategy.forbid, 207 | # reply_to=reply_to, 208 | ) 209 | return 210 | 211 | for x in msgs: 212 | await x.send( 213 | target=target, 214 | fallback=FallbackStrategy.to_text, 215 | reply_to=reply_to, 216 | ) 217 | 218 | with suppress(ActionFailed): 219 | await try_send() 220 | return 221 | 222 | target.private = True 223 | reply_to = None 224 | with suppress(ActionFailed): 225 | await try_send() 226 | return 227 | 228 | with logged_suppress("ActionFailed", ActionFailed): 229 | await UniMessage.text( 230 | "消息发送失败了呜呜喵,似乎是被某种神秘的力量拦截了喵", 231 | ).send(reply_to=True) 232 | 233 | 234 | @overload 235 | def make_cache_key(mode: str, seg: Image, raw: bytes) -> str: ... 236 | @overload 237 | def make_cache_key( 238 | mode: str, 239 | seg: Image, 240 | raw: None = None, 241 | ) -> Optional[str]: ... 242 | def make_cache_key(mode: str, seg: Image, raw: Optional[bytes] = None) -> Optional[str]: 243 | if seg.id: 244 | adapter_name = current_bot.get().adapter.get_name() 245 | base = f"id_{adapter_name}_{seg.id}" 246 | elif raw: 247 | base = f"hash_{hash(raw):x}" 248 | else: 249 | return None 250 | return f"{hash(f'{base}_{mode}'):x}" 251 | 252 | 253 | async def handle_single_image( 254 | client: "AsyncClient", 255 | seg: Image, 256 | mode: str, 257 | purge: bool, 258 | target: Target, 259 | index: Optional[int] = None, 260 | display_fav: bool = False, 261 | ) -> None: 262 | async def fetch_image(seg: Image) -> Optional[bytes]: 263 | async with fail_with_msg( 264 | f"图片{f' {index} ' if index else ''}下载失败", 265 | should_finish=False, 266 | ): 267 | return await get_image_from_seg(seg) 268 | 269 | # lazy fetch file if cache does not exist 270 | file = None 271 | cache_key = make_cache_key(mode, seg) 272 | if not cache_key: 273 | if not (file := await fetch_image(seg)): 274 | return 275 | cache_key = make_cache_key(mode, seg, file) 276 | 277 | if (not purge) and (cached_msgs := msg_cache.get(cache_key)): 278 | msgs = [(UniMessage.text("[缓存] ") + x) for x in cached_msgs] 279 | await send_msgs(msgs, target, index, display_fav) 280 | return 281 | 282 | # lazy fetch 283 | if (not file) and (not (file := await fetch_image(seg))): 284 | return 285 | file = post_image_process(file) 286 | 287 | search_results: list[UniMessage] = [] 288 | search_func = registered_search_func[mode].func 289 | 290 | while search_func is not None: 291 | result = await search_func(file, client, mode) 292 | messages, search_func = result if isinstance(result, tuple) else (result, None) 293 | search_results.extend([msg.copy() for msg in messages]) 294 | await send_msgs(messages, target, index, display_fav) 295 | 296 | msg_cache[cache_key] = search_results 297 | 298 | 299 | async def search_handler(msg: UniMsg, target: MsgTarget) -> None: 300 | arg = await extract_search_args() 301 | images = await get_images_from_ev(msg) 302 | 303 | async with RecallContext() as recall: 304 | await recall.send("正在进行搜索,请稍候", reply_to=True) 305 | 306 | display_fav = await should_display_favorite(target) 307 | network = ( 308 | Network(proxies=config.proxy, cookies=config.exhentai_cookies, timeout=60) 309 | if arg.mode == "ex" 310 | else Network(proxies=config.proxy) 311 | ) 312 | multiple_images = len(images) > 1 313 | async with network as client: 314 | for index, seg in enumerate(images, 1): 315 | async with fail_with_msg( 316 | f"第 {index} 张图搜索失败", 317 | should_finish=False, 318 | ): 319 | await handle_single_image( 320 | client, 321 | seg, 322 | arg.mode, 323 | arg.purge, 324 | target, 325 | index if multiple_images else None, 326 | display_fav, 327 | ) 328 | 329 | 330 | matcher_search_cmd = on_command(config.search_keyword, priority=1, block=True) 331 | matcher_search_cmd.handle()(search_handler) 332 | if not config.search_keyword_only: 333 | matcher_search_msg = on_message(rule=rule_func_search_msg, priority=2, block=True) 334 | matcher_search_msg.handle()(search_handler) 335 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/cache.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterator, MutableMapping 2 | from pathlib import Path 3 | from typing import Any, Optional, cast 4 | from typing_extensions import override 5 | 6 | import msgpack 7 | from cookit import FileCacheManager 8 | from cookit.loguru.common import logged_suppress 9 | from nonebot_plugin_alconna.uniseg import UniMessage 10 | 11 | from .config import config 12 | 13 | # dumper and validators just works with simple standard UniMessages 14 | # but it's enough for this plugin, maybe ¯\_(ツ)_/¯ 15 | 16 | 17 | class MessageCacheManager(MutableMapping[str, Optional[list[UniMessage]]]): 18 | def __init__( 19 | self, 20 | cache_dir: Path, 21 | max_size: Optional[int] = None, 22 | ttl: Optional[int] = None, 23 | ) -> None: 24 | super().__init__() 25 | self.cache = FileCacheManager(cache_dir, max_size=max_size, ttl=ttl) 26 | 27 | @override 28 | def __getitem__(self, key: str) -> Optional[list[UniMessage]]: 29 | data = self.cache[key] 30 | with logged_suppress("Failed to read message cache"): 31 | unpacked: list[list[dict[str, Any]]] = msgpack.unpackb(data) 32 | return [UniMessage.load(x) for x in unpacked] 33 | 34 | @override 35 | def __setitem__(self, key: str, value: Optional[list[UniMessage]]) -> None: 36 | if not value: 37 | raise ValueError("value cannot be empty") 38 | data = None 39 | with logged_suppress("Failed to dump message cache"): 40 | dumped = [x.dump(media_save_dir=False) for x in value] 41 | data = cast("bytes", msgpack.packb(dumped)) 42 | if data: 43 | self.cache[key] = data 44 | 45 | @override 46 | def __delitem__(self, key: str) -> None: 47 | self.cache.__delitem__(key) 48 | 49 | @override 50 | def __iter__(self) -> Iterator[str]: 51 | return self.cache.__iter__() 52 | 53 | @override 54 | def __len__(self) -> int: 55 | return self.cache.__len__() 56 | 57 | @override 58 | def __contains__(self, key: Any) -> bool: 59 | return self.cache.__contains__(key) 60 | 61 | 62 | CACHE_DIR = Path.cwd() / "data" / "YetAnotherPicSearch" / "cache" 63 | msg_cache = MessageCacheManager( 64 | CACHE_DIR, 65 | max_size=config.cache_expire * 100, 66 | ttl=config.cache_expire * 24 * 60 * 60, 67 | ) 68 | 69 | # delete old cache 70 | for _it in (x for x in Path.cwd().glob("pic_search_cache*") if x.is_file()): 71 | _it.unlink() 72 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/config.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Optional 2 | 3 | from cookit.pyd import field_validator 4 | from nonebot import get_plugin_config 5 | from pydantic import BaseModel, Field, HttpUrl 6 | 7 | 8 | class ConfigModel(BaseModel): 9 | nickname: set[str] 10 | 11 | proxy: Optional[str] = None 12 | 13 | saucenao_api_key: str 14 | ascii2d_base_url: Annotated[str, HttpUrl] = "https://ascii2d.net" 15 | hide_ascii2d_base_url: bool = True 16 | exhentai_cookies: Optional[str] = None 17 | nhentai_useragent: Optional[str] = None 18 | nhentai_cookies: Optional[str] = None 19 | nhentai_base_url: Annotated[str, HttpUrl] = "https://nhentai.net" 20 | hide_nhentai_base_url: bool = True 21 | 22 | saucenao_low_acc: int = 60 23 | auto_use_ascii2d: bool = True 24 | 25 | search_keyword: str = "搜图" 26 | search_keyword_only: bool = False 27 | search_in_group_only_keyword: bool = True 28 | search_immediately: bool = True 29 | wait_for_image_timeout: int = 180 30 | 31 | hide_img: bool = False 32 | hide_img_when_low_acc: bool = True 33 | hide_img_when_whatanime_r18: bool = True 34 | saucenao_nsfw_hide_level: int = Field(2, ge=0, le=3) 35 | forward_search_result: bool = True 36 | to_confuse_urls: list[str] = [ 37 | "ascii2d.net", 38 | "danbooru.donmai.us", 39 | "konachan.com", 40 | "pixiv.net", 41 | "saucenao.com", 42 | "yandex.com", 43 | ] 44 | 45 | cache_expire: int = 3 46 | 47 | @field_validator("saucenao_api_key", mode="before") 48 | def saucenao_api_key_validator(cls, v: str) -> str: # noqa: N805 49 | if not v: 50 | raise ValueError("请配置 SAUCENAO_API_KEY 否则无法正常使用搜图功能!") 51 | return v 52 | 53 | @field_validator("ascii2d_base_url", mode="after") 54 | def ascii2d_base_url_validator(cls, v: str) -> str: # noqa: N805 55 | return v.rstrip("/") 56 | 57 | @field_validator("proxy", mode="before") 58 | def proxy_validator(cls, v: Optional[str]) -> Optional[str]: # noqa: N805 59 | if isinstance(v, str) and v.startswith("socks://"): 60 | raise ValueError( 61 | '请修改代理地址为 "socks5://" 或 "socks4://" 的格式,具体取决于你的代理协议!', 62 | ) 63 | return v 64 | 65 | 66 | config = get_plugin_config(ConfigModel) 67 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/data_source/__init__.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: N999 2 | 3 | 4 | def load_search_func() -> None: 5 | from pathlib import Path 6 | 7 | from cookit import auto_import 8 | 9 | auto_import(Path(__file__).parent, __package__) 10 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/data_source/ascii2d.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import re 3 | from typing import cast 4 | from urllib.parse import unquote 5 | 6 | from cookit import flatten 7 | from cookit.loguru import logged_suppress 8 | from httpx import AsyncClient, HTTPStatusError 9 | from nonebot_plugin_alconna.uniseg import UniMessage 10 | from PicImageSearch import Ascii2D 11 | from PicImageSearch.model import Ascii2DResponse 12 | 13 | from ..config import config 14 | from ..registry import SearchFunctionReturnType, search_function 15 | from ..utils import async_lock, combine_message, get_image_bytes_by_url, shorten_url 16 | 17 | 18 | @search_function("a2d") 19 | @async_lock() 20 | async def ascii2d_search( 21 | file: bytes, 22 | client: AsyncClient, 23 | _: str, 24 | ) -> SearchFunctionReturnType: 25 | ascii2d_color = Ascii2D(base_url=config.ascii2d_base_url, client=client) 26 | color_res = await ascii2d_color.search(file=file) 27 | if not color_res.raw: 28 | return [UniMessage.text("Ascii2D 暂时无法使用")] 29 | 30 | resp_text, resp_url, _ = await ascii2d_color.get( 31 | re.sub(r"(/|%2F)color", r"\1bovw", color_res.url), 32 | ) 33 | bovw_res = Ascii2DResponse(resp_text, resp_url) 34 | # 去除 bovw_res 中已经存在于 color_res 的部分 35 | color_res_origin_list = [str(i.origin) for i in color_res.raw] 36 | duplicated_raw = [ 37 | i for i in bovw_res.raw if (str(i.origin) in color_res_origin_list and any(i.title or i.url_list)) 38 | ] 39 | duplicated_count = len(duplicated_raw) 40 | bovw_res.raw = [i for i in bovw_res.raw if i not in duplicated_raw] 41 | 42 | res = await asyncio.gather( 43 | get_final_res(color_res), 44 | get_final_res(bovw_res, bovw=True, duplicated_count=duplicated_count), 45 | ) 46 | return flatten(res) 47 | 48 | 49 | async def get_final_res( 50 | res: Ascii2DResponse, 51 | bovw: bool = False, 52 | duplicated_count: int = 0, 53 | ) -> list[UniMessage]: 54 | final_res_list: list[UniMessage] = [] 55 | for r in res.raw: 56 | if not (r.title or r.url_list): 57 | continue 58 | 59 | msg = UniMessage() 60 | if config.hide_img: 61 | msg += f"预览图链接:{r.thumbnail}\n" 62 | else: 63 | thumbnail = None 64 | with logged_suppress("Failed to get image", HTTPStatusError): 65 | thumbnail = await get_image_bytes_by_url(r.thumbnail) 66 | if not thumbnail: 67 | continue 68 | msg += UniMessage.image(raw=thumbnail) 69 | msg += "\n" 70 | 71 | title = r.title 72 | if r.url_list and title == r.url_list[0].text: 73 | title = "" 74 | 75 | source = r.url or (cast("str", r.url_list[0].href) if r.url_list else "") 76 | source = await shorten_url(source) if source else "" 77 | 78 | author = r.author 79 | if author and r.author_url: 80 | author_url = await shorten_url(r.author_url) 81 | author = f"[{author}]({author_url})" 82 | 83 | res_list = [ 84 | r.detail, 85 | title, 86 | f"作者:{author}" if author else "", 87 | f"来源:{source}" if source else "", 88 | ] 89 | msg += combine_message(res_list) 90 | 91 | final_res_list.append(msg) 92 | if len(final_res_list) == 3: 93 | break 94 | 95 | res_url = ( 96 | unquote(res.url).replace(config.ascii2d_base_url, "https://ascii2d.net") 97 | if config.hide_ascii2d_base_url 98 | else res.url 99 | ) 100 | 101 | return [ 102 | UniMessage.text( 103 | f"Ascii2D {'特徴' if bovw else '色合'}検索結果" 104 | + (f" (已去除与特徴検索結果重复的 {duplicated_count} 个结果)" if duplicated_count else "") 105 | + f"\n搜索页面:{await shorten_url(res_url)}", 106 | ), 107 | *final_res_list, 108 | ] 109 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/data_source/baidu.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | 3 | from nonebot_plugin_alconna.uniseg import UniMessage 4 | from PicImageSearch import BaiDu 5 | 6 | from ..registry import SearchFunctionReturnType, search_function 7 | from ..utils import async_lock, combine_message, handle_img, shorten_url 8 | 9 | if TYPE_CHECKING: 10 | from httpx import AsyncClient 11 | 12 | 13 | @search_function("baidu") 14 | @async_lock() 15 | async def baidu_search( 16 | file: bytes, 17 | client: "AsyncClient", 18 | _: str, 19 | ) -> SearchFunctionReturnType: 20 | baidu = BaiDu(client=client) 21 | res = await baidu.search(file=file) 22 | _url = await shorten_url(res.url) 23 | if not res.raw: 24 | return [UniMessage.text(f"Baidu 搜索结果为空\n搜索页面:{_url}")] 25 | thumbnail = await handle_img(res.raw[0].thumbnail) 26 | res_list = [ 27 | "Baidu 搜索结果", 28 | thumbnail, 29 | res.raw[0].url, 30 | f"搜索页面:{_url}", 31 | ] 32 | return [combine_message(res_list)] 33 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/data_source/ehentai.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import re 3 | from collections import defaultdict 4 | from typing import Any, cast 5 | 6 | import arrow 7 | from httpx import AsyncClient 8 | from nonebot_plugin_alconna.uniseg import UniMessage 9 | from PicImageSearch import EHentai 10 | from PicImageSearch.model import EHentaiResponse 11 | from pyquery import PyQuery 12 | 13 | from ..config import config 14 | from ..registry import SearchFunctionReturnType, search_function 15 | from ..utils import ( 16 | DEFAULT_HEADERS, 17 | async_lock, 18 | combine_message, 19 | filter_results_with_ratio, 20 | handle_img, 21 | parse_cookies, 22 | preprocess_search_query, 23 | shorten_url, 24 | ) 25 | from .ascii2d import ascii2d_search 26 | 27 | 28 | @search_function("ex") 29 | @async_lock(freq=8) 30 | async def ehentai_search( 31 | file: bytes, 32 | client: AsyncClient, 33 | mode: str, 34 | ) -> SearchFunctionReturnType: 35 | ex = bool(config.exhentai_cookies) 36 | ehentai = EHentai(client=client) 37 | 38 | if not (res := await ehentai.search(file=file, ex=ex)): 39 | if "Please wait a bit longer between each file search" in res.origin: 40 | return await ehentai_search(file, client, mode) 41 | else: 42 | final_res = await search_result_filter(res) 43 | if not res.raw and config.auto_use_ascii2d: 44 | final_res.append(UniMessage.text("自动使用 Ascii2D 进行搜索")) 45 | return final_res, ascii2d_search 46 | return final_res 47 | 48 | return [UniMessage.text("EHentai 暂时无法使用")] 49 | 50 | 51 | async def ehentai_title_search(title: str) -> list[UniMessage]: 52 | query = preprocess_search_query(title) 53 | url = "https://exhentai.org" if config.exhentai_cookies else "https://e-hentai.org" 54 | params: dict[str, Any] = {"f_search": query} 55 | 56 | async with AsyncClient( 57 | headers=DEFAULT_HEADERS, 58 | cookies=parse_cookies(config.exhentai_cookies), 59 | proxy=config.proxy, 60 | ) as session: 61 | resp = await session.get(url, params=params) 62 | if res := EHentaiResponse(resp.text, str(resp.url)): 63 | if not res.raw: 64 | # 如果第一次没找到,使搜索结果包含被删除的部分,并重新搜索 65 | params["advsearch"] = 1 66 | params["f_sname"] = "on" 67 | params["f_sh"] = "on" 68 | resp = await session.get(url, params=params) 69 | res = EHentaiResponse(resp.text, str(resp.url)) 70 | 71 | # 只保留标题和搜索关键词相关度较高的结果,并排序,以此来提高准确度 72 | if res.raw: 73 | res.raw = filter_results_with_ratio(res, title) 74 | return await search_result_filter(res) 75 | 76 | return [UniMessage.text("EHentai 暂时无法使用")] 77 | 78 | 79 | async def search_result_filter( 80 | res: EHentaiResponse, 81 | ) -> list[UniMessage]: 82 | url = await shorten_url(res.url) 83 | if not res.raw: 84 | return [UniMessage.text(f"EHentai 搜索结果为空\n搜索页面:{url}")] 85 | 86 | # 尝试过滤已删除的 87 | if not_expunged_res := [i for i in res.raw if not PyQuery(i.origin)("[id^='posted'] s")]: 88 | res.raw = not_expunged_res 89 | 90 | # 尝试过滤无主题的杂图图集 91 | if not_themeless_res := [i for i in res.raw if "themeless" not in " ".join(i.tags)]: 92 | res.raw = not_themeless_res 93 | 94 | # 尝试过滤评分低于 3 星的 95 | if above_3_star_res := [ 96 | i for i in res.raw if get_star_rating(cast("str", PyQuery(i.origin)("div.ir").attr("style"))) >= 3 97 | ]: 98 | res.raw = above_3_star_res 99 | 100 | # 尽可能过滤掉非预期结果(大概 101 | priority = defaultdict(lambda: 0) 102 | priority["Image Set"] = 1 103 | priority["Non-H"] = 2 104 | priority["Western"] = 3 105 | priority["Misc"] = 4 106 | priority["Cosplay"] = 5 107 | priority["Asian Porn"] = 6 108 | res.raw.sort(key=lambda x: priority[x.type], reverse=True) 109 | for key, group in itertools.groupby(res.raw, key=lambda x: x.type): 110 | if priority[key] > 0: 111 | group_list = list(group) 112 | if len(res.raw) != len(group_list): 113 | res.raw = [i for i in res.raw if i not in group_list] 114 | 115 | # 优先找汉化版;没找到就优先找原版 116 | if chinese_res := [i for i in res.raw if "translated" in " ".join(i.tags) and "chinese" in " ".join(i.tags)]: 117 | selected_res = chinese_res[0] 118 | elif not_translated_res := [i for i in res.raw if "translated" not in " ".join(i.tags)]: 119 | selected_res = not_translated_res[0] 120 | else: 121 | selected_res = res.raw[0] 122 | 123 | thumbnail = await handle_img( 124 | selected_res.thumbnail, 125 | cookies=config.exhentai_cookies, 126 | ) 127 | date = arrow.get(selected_res.date).to("Asia/Shanghai").format("YYYY-MM-DD HH:mm") 128 | favorited = bool(selected_res.origin.find("[id^='posted']").eq(0).attr("style")) 129 | res_list = [ 130 | "EHentai 搜索结果", 131 | thumbnail, 132 | selected_res.title, 133 | ("❤️ 已收藏" if favorited else ""), 134 | f"类型:{selected_res.type}", 135 | f"日期:{date}", 136 | f"来源:{selected_res.url}", 137 | f"搜索页面:{url}", 138 | ] 139 | return [combine_message(res_list)] 140 | 141 | 142 | def get_star_rating(css_style: str) -> float: 143 | if match := re.search(r"(-?\d+)px (-\d+)px", css_style): 144 | x, y = match.groups() 145 | star_rating = 5 - int(x.rstrip("px")) / -16 146 | if y == "-21px": 147 | star_rating -= 0.5 148 | return star_rating 149 | return 0 150 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/data_source/google.py: -------------------------------------------------------------------------------- 1 | import base64 2 | from typing import TYPE_CHECKING 3 | 4 | from nonebot_plugin_alconna.uniseg import UniMessage 5 | from PicImageSearch import Google 6 | 7 | from ..registry import SearchFunctionReturnType, search_function 8 | from ..utils import async_lock, combine_message, shorten_url 9 | 10 | if TYPE_CHECKING: 11 | from httpx import AsyncClient 12 | from PicImageSearch.model import GoogleResponse 13 | 14 | 15 | @search_function("google") 16 | @async_lock() 17 | async def google_search( 18 | file: bytes, 19 | client: "AsyncClient", 20 | _: str, 21 | ) -> SearchFunctionReturnType: 22 | google = Google(client=client) 23 | if res := await google.search(file=file): 24 | return await search_result_filter(res) 25 | return [UniMessage.text("Google 暂时无法使用")] 26 | 27 | 28 | async def search_result_filter(res: "GoogleResponse") -> list[UniMessage]: 29 | url = await shorten_url(res.url) 30 | if not res.raw: 31 | return [UniMessage.text(f"Google 搜索结果为空\n搜索页面:{url}")] 32 | 33 | selected_res = next((i for i in res.raw if i.thumbnail), res.raw[0]) 34 | if not selected_res.thumbnail: 35 | return [UniMessage.text(f"Google 搜索结果为空\n搜索页面:{url}")] 36 | 37 | thumbnail = UniMessage.image( 38 | raw=base64.b64decode(selected_res.thumbnail.split(",", 1)[1]), 39 | ) 40 | res_list = [ 41 | "Google 搜索结果", 42 | thumbnail, 43 | selected_res.title, 44 | f"来源:{selected_res.url}", 45 | f"搜索页面:{url}", 46 | ] 47 | return [combine_message(res_list)] 48 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/data_source/iqdb.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | 3 | from nonebot_plugin_alconna.uniseg import UniMessage 4 | from PicImageSearch import Iqdb 5 | 6 | from ..config import config 7 | from ..registry import SearchFunctionReturnType, search_function 8 | from ..utils import ( 9 | async_lock, 10 | combine_message, 11 | get_source, 12 | get_valid_url, 13 | handle_img, 14 | shorten_url, 15 | ) 16 | from .ascii2d import ascii2d_search 17 | 18 | if TYPE_CHECKING: 19 | from httpx import AsyncClient 20 | 21 | 22 | @search_function("iqdb") 23 | @async_lock() 24 | async def iqdb_search( 25 | file: bytes, 26 | client: "AsyncClient", 27 | _: str, 28 | ) -> SearchFunctionReturnType: 29 | iqdb = Iqdb(client=client) 30 | res = await iqdb.search(file=file) 31 | if not res.raw: 32 | return ( 33 | [UniMessage.text("Iqdb 暂时无法使用,自动使用 Ascii2D 进行搜索")], 34 | ascii2d_search, 35 | ) 36 | 37 | final_res: list[UniMessage] = [] 38 | # 如果遇到搜索结果相似度低的情况,去除第一个只有提示信息的空结果 39 | low_acc = False 40 | if res.raw[0].content == "No relevant matches": 41 | low_acc = True 42 | res.raw.pop(0) 43 | selected_res = res.raw[0] 44 | hide_img = config.hide_img or (low_acc and config.hide_img_when_low_acc) 45 | 46 | # 优先取 danbooru 或 yande.re 47 | danbooru_res_list = [i for i in res.raw if i.source == "Danbooru"] 48 | yandere_res_list = [i for i in res.raw if i.source == "yande.re"] 49 | if danbooru_res_list: 50 | selected_res = danbooru_res_list[0] 51 | elif yandere_res_list: 52 | selected_res = yandere_res_list[0] 53 | 54 | thumbnail = await handle_img(selected_res.thumbnail, hide_img) 55 | source = await get_source(selected_res.url) 56 | if source: 57 | if get_valid_url(source): 58 | source = await shorten_url(source) 59 | source = f"来源:{source}" 60 | res_list = [ 61 | f"Iqdb ({selected_res.similarity}%)", 62 | thumbnail, 63 | await shorten_url(selected_res.url), 64 | source, 65 | f"搜索页面:{res.url}", 66 | ] 67 | final_res.append(combine_message(res_list)) 68 | 69 | if low_acc and config.auto_use_ascii2d: 70 | final_res.append( 71 | UniMessage.text( 72 | f"相似度 {selected_res.similarity}% 过低,自动使用 Ascii2D 进行搜索", 73 | ), 74 | ) 75 | return final_res, ascii2d_search 76 | 77 | return final_res 78 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/data_source/nhentai.py: -------------------------------------------------------------------------------- 1 | from typing import cast 2 | 3 | import arrow 4 | from httpx import AsyncClient 5 | from lxml.html import HTMLParser, fromstring 6 | from nonebot_plugin_alconna.uniseg import UniMessage 7 | from pyquery import PyQuery 8 | 9 | from ..config import config 10 | from ..nhentai_model import NHentaiItem, NHentaiResponse 11 | from ..utils import ( 12 | combine_message, 13 | filter_results_with_ratio, 14 | handle_img, 15 | parse_cookies, 16 | preprocess_search_query, 17 | shorten_url, 18 | ) 19 | 20 | NHENTAI_HEADERS = ( 21 | {"User-Agent": config.nhentai_useragent} if config.nhentai_cookies and config.nhentai_useragent else None 22 | ) 23 | NHENTAI_COOKIES = parse_cookies(config.nhentai_cookies) 24 | 25 | 26 | def get_nh_display_base(): 27 | return config.nhentai_base_url if config.hide_nhentai_base_url else "https://nhentai.net" 28 | 29 | 30 | async def update_nhentai_info(item: NHentaiItem) -> None: 31 | async with AsyncClient( 32 | headers=NHENTAI_HEADERS, 33 | cookies=NHENTAI_COOKIES, 34 | proxy=config.proxy, 35 | ) as session: 36 | resp = await session.get(item.with_base_url(config.nhentai_base_url)) 37 | uft8_parser = HTMLParser(encoding="utf-8") 38 | data = PyQuery(fromstring(resp.text, parser=uft8_parser)) 39 | item.origin = data 40 | item.title = cast( 41 | "str", 42 | (data.find("h2.title").text() if data.find("h2.title") else data.find("h1.title").text()), 43 | ) 44 | item.type = cast("str", data.find('#tags a[href^="/category/"] .name').text()) 45 | item.date = cast("str", data.find("#tags time").attr("datetime")) 46 | item.tags = [cast("str", i.text()) for i in data.find('#tags a:not([href*="/search/?q=pages"]) .name').items()] 47 | 48 | 49 | async def nhentai_title_search(title: str) -> list[UniMessage]: 50 | query = preprocess_search_query(title) 51 | async with AsyncClient( 52 | headers=NHENTAI_HEADERS, 53 | cookies=NHENTAI_COOKIES, 54 | proxy=config.proxy, 55 | ) as session: 56 | resp = await session.get( 57 | f"{config.nhentai_base_url}/search/", 58 | params={"q": query}, 59 | ) 60 | if res := NHentaiResponse( 61 | resp.text, 62 | str(resp.url).replace(f"{config.nhentai_base_url}/", "", 1), 63 | ): 64 | # 只保留标题和搜索关键词相关度较高的结果,并排序,以此来提高准确度 65 | if res.raw: 66 | res.raw = filter_results_with_ratio(res, title) 67 | return await search_result_filter(res) 68 | 69 | return [UniMessage.text("NHentai 暂时无法使用")] 70 | 71 | 72 | async def search_result_filter(res: NHentaiResponse) -> list[UniMessage]: 73 | display_base = get_nh_display_base() 74 | url = await shorten_url( 75 | res.with_base_url(display_base), 76 | force_shorten=True, 77 | ) 78 | if not res.raw: 79 | return [UniMessage.text(f"NHentai 搜索结果为空\n搜索页面:{url}")] 80 | 81 | for i in res.raw: 82 | await update_nhentai_info(i) 83 | 84 | # 优先找汉化版;没找到就优先找原版 85 | if chinese_res := [i for i in res.raw if "translated" in i.tags and "chinese" in i.tags]: 86 | selected_res = chinese_res[0] 87 | elif not_translated_res := [i for i in res.raw if "translated" not in i.tags]: 88 | selected_res = not_translated_res[0] 89 | else: 90 | selected_res = res.raw[0] 91 | 92 | thumbnail = await handle_img(selected_res.thumbnail) 93 | date = arrow.get(selected_res.date).to("Asia/Shanghai").format("YYYY-MM-DD HH:mm") 94 | res_list = [ 95 | "NHentai 搜索结果", 96 | thumbnail, 97 | selected_res.title, 98 | f"类型:{selected_res.type}", 99 | f"日期:{date}", 100 | f"来源:{selected_res.with_base_url(display_base)}", 101 | f"搜索页面:{url}", 102 | ] 103 | return [combine_message(res_list)] 104 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/data_source/saucenao.py: -------------------------------------------------------------------------------- 1 | import re 2 | from typing import TYPE_CHECKING 3 | 4 | from nonebot_plugin_alconna.uniseg import UniMessage 5 | from PicImageSearch import SauceNAO 6 | 7 | from ..config import config 8 | from ..registry import ( 9 | SearchFunctionReturnTuple, 10 | SearchFunctionReturnType, 11 | search_function, 12 | ) 13 | from ..utils import ( 14 | async_lock, 15 | combine_message, 16 | get_source, 17 | get_valid_url, 18 | handle_img, 19 | shorten_url, 20 | ) 21 | from .ascii2d import ascii2d_search 22 | from .ehentai import ehentai_title_search 23 | from .nhentai import nhentai_title_search 24 | from .whatanime import whatanime_search 25 | 26 | if TYPE_CHECKING: 27 | from httpx import AsyncClient 28 | from PicImageSearch.model import SauceNAOItem, SauceNAOResponse 29 | 30 | SAUCENAO_DB = { 31 | "all": 999, 32 | "pixiv": 5, 33 | "danbooru": 9, 34 | "anime": [21, 22], 35 | "doujin": [18, 38], 36 | "fakku": 16, 37 | } 38 | 39 | 40 | @search_function(*SAUCENAO_DB.keys()) 41 | @async_lock(freq=8) 42 | async def saucenao_search( 43 | file: bytes, 44 | client: "AsyncClient", 45 | mode: str, 46 | ) -> SearchFunctionReturnType: 47 | db = SAUCENAO_DB[mode] 48 | if isinstance(db, list): 49 | saucenao = SauceNAO( 50 | client=client, 51 | api_key=config.saucenao_api_key, 52 | hide=config.saucenao_nsfw_hide_level, 53 | dbs=db, 54 | ) 55 | else: 56 | saucenao = SauceNAO( 57 | client=client, 58 | api_key=config.saucenao_api_key, 59 | hide=config.saucenao_nsfw_hide_level, 60 | db=db, 61 | ) 62 | res = await saucenao.search(file=file) 63 | 64 | if res and res.status == 429 and "4 searches every 30 seconds" in res.origin["header"]["message"]: 65 | return await saucenao_search(file, client, mode) 66 | 67 | if not res or not res.raw: 68 | final_res = [ 69 | UniMessage.text("SauceNAO 暂时无法使用,自动使用 Ascii2D 进行搜索"), 70 | ] 71 | return final_res, ascii2d_search 72 | 73 | selected_res = get_best_result(res, res.raw[0]) 74 | return await get_final_res(mode, res, selected_res) 75 | 76 | 77 | def get_best_pixiv_result( 78 | res: "SauceNAOResponse", 79 | selected_res: "SauceNAOItem", 80 | ) -> "SauceNAOItem": 81 | pixiv_res_list = list( 82 | filter( 83 | lambda x: x.index_id == SAUCENAO_DB["pixiv"] and x.url and abs(x.similarity - selected_res.similarity) < 5, 84 | res.raw, 85 | ), 86 | ) 87 | 88 | if len(pixiv_res_list) <= 1: 89 | return selected_res 90 | 91 | pixiv_id_results = [ 92 | (int(match.group()), result) for result in pixiv_res_list if (match := re.search(r"\d+", result.url)) 93 | ] 94 | return min(pixiv_id_results)[1] if pixiv_id_results else selected_res 95 | 96 | 97 | def get_best_result(res: "SauceNAOResponse", selected_res: "SauceNAOItem") -> "SauceNAOItem": 98 | # 如果结果为 pixiv ,尝试找到原始投稿,避免返回盗图者的投稿 99 | if selected_res.index_id == SAUCENAO_DB["pixiv"]: 100 | selected_res = get_best_pixiv_result(res, selected_res) 101 | # 如果地址有多个,优先取 danbooru 102 | elif len(selected_res.ext_urls) > 1: 103 | for i in selected_res.ext_urls: 104 | if "danbooru" in i: 105 | selected_res.url = i 106 | return selected_res 107 | 108 | 109 | async def get_final_res( 110 | mode: str, 111 | res: "SauceNAOResponse", 112 | selected_res: "SauceNAOItem", 113 | ) -> SearchFunctionReturnType: 114 | low_acc = selected_res.similarity < config.saucenao_low_acc 115 | hide_img = bool(config.hide_img or selected_res.hidden or (low_acc and config.hide_img_when_low_acc)) 116 | 117 | thumbnail = await handle_img(selected_res.thumbnail, hide_img) 118 | 119 | url = await shorten_url(selected_res.url) 120 | source = selected_res.source if selected_res.source != selected_res.title else "" 121 | if not source and selected_res.url: 122 | source = await get_source(selected_res.url) 123 | if source and get_valid_url(source): 124 | source = await shorten_url(source) 125 | 126 | author_link = ( 127 | f"[{selected_res.author}]({await shorten_url(selected_res.author_url)})" 128 | if selected_res.author and selected_res.author_url 129 | else "" 130 | ) 131 | 132 | res_list = [ 133 | f"SauceNAO ({selected_res.similarity}%)", 134 | thumbnail, 135 | selected_res.title, 136 | f"作者:{author_link}" if author_link else "", 137 | url if url != source else "", 138 | f"来源:{source}" if source else "", 139 | f"搜索页面:{res.url}", 140 | ] 141 | 142 | final_res: list[UniMessage] = [] 143 | 144 | if res.long_remaining and res.long_remaining < 10: 145 | final_res.append( 146 | UniMessage.text(f"SauceNAO 24h 内仅剩 {res.long_remaining} 次使用次数"), 147 | ) 148 | 149 | final_res.append(combine_message(res_list)) 150 | 151 | if low_acc: 152 | extra_res, extra_handle = await handle_saucenao_low_acc(mode, selected_res) 153 | final_res.extend(extra_res) 154 | return final_res, extra_handle 155 | if selected_res.index_id in SAUCENAO_DB["doujin"]: 156 | title = selected_res.title.replace("-", "") 157 | final_res.extend(await search_on_ehentai_and_nhentai(title)) 158 | # 如果搜索结果为 fakku ,额外返回 ehentai 的搜索结果 159 | elif selected_res.index_id == SAUCENAO_DB["fakku"]: 160 | title = f"{selected_res.author} {selected_res.title}" 161 | final_res.extend(await search_on_ehentai_and_nhentai(title)) 162 | elif selected_res.index_id in SAUCENAO_DB["anime"]: 163 | return final_res, whatanime_search 164 | 165 | return final_res, None 166 | 167 | 168 | async def search_on_ehentai_and_nhentai(title: str) -> list[UniMessage]: 169 | title_search_result = await ehentai_title_search(title) 170 | 171 | if ( 172 | title_search_result[0].startswith("EHentai 搜索结果为空") 173 | and config.nhentai_useragent 174 | and config.nhentai_cookies 175 | ): 176 | nhentai_title_search_result = await nhentai_title_search(title) 177 | if not nhentai_title_search_result[0].startswith("NHentai 搜索结果为空"): 178 | title_search_result = nhentai_title_search_result 179 | 180 | return title_search_result 181 | 182 | 183 | async def handle_saucenao_low_acc( 184 | mode: str, 185 | selected_res: "SauceNAOItem", 186 | ) -> SearchFunctionReturnTuple: 187 | final_res: list[UniMessage] = [] 188 | # 因为 saucenao 的动画搜索数据库更新不够快,所以当搜索模式为动画时额外增加 whatanime 的搜索结果 189 | if mode == "anime": 190 | return final_res, whatanime_search 191 | if config.auto_use_ascii2d: 192 | final_res.append( 193 | UniMessage.text( 194 | f"相似度 {selected_res.similarity}% 过低,自动使用 Ascii2D 进行搜索", 195 | ), 196 | ) 197 | return final_res, ascii2d_search 198 | 199 | return final_res, None 200 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/data_source/whatanime.py: -------------------------------------------------------------------------------- 1 | import math 2 | from typing import TYPE_CHECKING, Any 3 | 4 | from nonebot_plugin_alconna.uniseg import UniMessage 5 | from PicImageSearch import TraceMoe 6 | 7 | from ..config import config 8 | from ..utils import async_lock, combine_message, handle_img 9 | 10 | if TYPE_CHECKING: 11 | from httpx import AsyncClient 12 | 13 | from ..registry import SearchFunctionReturnType 14 | 15 | 16 | @async_lock() 17 | async def whatanime_search( 18 | file: bytes, 19 | client: "AsyncClient", 20 | _: str, 21 | ) -> "SearchFunctionReturnType": 22 | whatanime = TraceMoe(client=client) 23 | res = await whatanime.search(file=file) 24 | if res and res.raw: 25 | time = res.raw[0].From 26 | minutes = math.floor(time / 60) 27 | seconds = math.floor(time % 60) 28 | time_str = f"{minutes:02d}:{seconds:02d}" 29 | 30 | if res.raw[0].isAdult: 31 | thumbnail = await handle_img( 32 | res.raw[0].cover_image, 33 | config.hide_img or config.hide_img_when_whatanime_r18, 34 | ) 35 | else: 36 | thumbnail = await handle_img( 37 | res.raw[0].cover_image, 38 | ) 39 | 40 | chinese_title = res.raw[0].title_chinese 41 | native_title = res.raw[0].title_native 42 | 43 | start_date = date_to_str(res.raw[0].start_date) 44 | end_date = "" 45 | if (end_date_year := res.raw[0].end_date["year"]) and end_date_year > 0: 46 | end_date = date_to_str(res.raw[0].end_date) 47 | episode = res.raw[0].episode or 1 48 | res_list = [ 49 | f"WhatAnime ({res.raw[0].similarity}%)", 50 | f"该截图出自第 {episode} 集的 {time_str}", 51 | thumbnail, 52 | native_title, 53 | chinese_title if chinese_title != native_title else "", 54 | f"类型:{res.raw[0].type}-{res.raw[0].format}", 55 | f"开播:{start_date}", 56 | f"完结:{end_date}" if end_date else "", 57 | ] 58 | return [combine_message(res_list)] 59 | 60 | return [UniMessage.text("WhatAnime 暂时无法使用")] 61 | 62 | 63 | def date_to_str(date: dict[str, Any]) -> str: 64 | return f"{date['year']}-{date['month']}-{date['day']}" 65 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/data_source/yandex.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | 3 | from nonebot_plugin_alconna.uniseg import UniMessage 4 | from PicImageSearch import Yandex 5 | 6 | from ..registry import SearchFunctionReturnType, search_function 7 | from ..utils import async_lock, combine_message, handle_img, shorten_url 8 | 9 | if TYPE_CHECKING: 10 | from httpx import AsyncClient 11 | from PicImageSearch.model import YandexResponse 12 | 13 | 14 | @search_function("yandex") 15 | @async_lock() 16 | async def yandex_search( 17 | file: bytes, 18 | client: "AsyncClient", 19 | _: str, 20 | ) -> SearchFunctionReturnType: 21 | yandex = Yandex(client=client) 22 | if res := await yandex.search(file=file): 23 | return await search_result_filter(res) 24 | return [UniMessage.text("Yandex 暂时无法使用")] 25 | 26 | 27 | async def search_result_filter(res: "YandexResponse") -> list[UniMessage]: 28 | url = await shorten_url(res.url) 29 | if not res.raw: 30 | return [UniMessage.text(f"Yandex 搜索结果为空\n搜索页面:{url}")] 31 | 32 | thumbnail = await handle_img(res.raw[0].thumbnail) 33 | res_list = [ 34 | "Yandex 搜索结果", 35 | thumbnail, 36 | res.raw[0].size, 37 | res.raw[0].title, 38 | res.raw[0].source, 39 | res.raw[0].content, 40 | f"来源:{res.raw[0].url}", 41 | f"搜索页面:{url}", 42 | ] 43 | return [combine_message(res_list)] 44 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/nhentai_model.py: -------------------------------------------------------------------------------- 1 | from typing import cast 2 | 3 | from lxml.html import HTMLParser, fromstring 4 | from pyquery import PyQuery 5 | 6 | 7 | class NHentaiItem: 8 | def __init__(self, data: PyQuery): 9 | self.origin: PyQuery = data # 原始数据 10 | self.title: str = cast("str", data.find(".caption").text()) 11 | cover = data.find(".cover") 12 | self.href: str = cast("str", cover.attr("href")) 13 | self.thumbnail: str = cast("str", cover.find("img").attr("data-src")) 14 | self.type: str = "" 15 | self.date: str = "" 16 | self.tags: list[str] = [] 17 | 18 | def with_base_url(self, base: str) -> str: 19 | return f"{base}/{self.href}" 20 | 21 | 22 | class NHentaiResponse: 23 | def __init__(self, resp_text: str, resp_href: str): 24 | self.origin: str = resp_text # 原始数据 25 | uft8_parser = HTMLParser(encoding="utf-8") 26 | data = PyQuery(fromstring(self.origin, parser=uft8_parser)) 27 | self.raw: list[NHentaiItem] = [NHentaiItem(i) for i in data.find(".gallery").items()] 28 | self.href: str = resp_href 29 | 30 | def with_base_url(self, base: str) -> str: 31 | return f"{base}/{self.href}" 32 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/registry.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Awaitable 2 | from dataclasses import dataclass 3 | from typing import Callable, Optional, TypeVar, Union 4 | from typing_extensions import TypeAlias 5 | 6 | from httpx import AsyncClient 7 | from nonebot_plugin_alconna.uniseg import UniMessage 8 | 9 | SearchFunctionReturnTuple: TypeAlias = tuple[ 10 | list[UniMessage], 11 | Optional["SearchFunctionType"], 12 | ] 13 | SearchFunctionReturnType: TypeAlias = Union[ 14 | list[UniMessage], 15 | SearchFunctionReturnTuple, 16 | ] 17 | SearchFunctionType: TypeAlias = Callable[ 18 | [bytes, AsyncClient, str], 19 | Awaitable[SearchFunctionReturnType], 20 | ] 21 | 22 | TSF = TypeVar("TSF", bound=SearchFunctionType) 23 | 24 | 25 | @dataclass 26 | class SearchFunctionInfo: 27 | func: SearchFunctionType 28 | 29 | 30 | registered_search_func: dict[str, SearchFunctionInfo] = {} 31 | 32 | 33 | def search_function(*modes: str) -> Callable[[TSF], TSF]: 34 | def deco(func: TSF) -> TSF: 35 | info = SearchFunctionInfo(func) 36 | for mode in modes: 37 | registered_search_func[mode] = info 38 | return func 39 | 40 | return deco 41 | -------------------------------------------------------------------------------- /YetAnotherPicSearch/res/usage.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lgc-NB2Dev/YetAnotherPicSearch/5d92e31122f1758c09c9be9d8db14cc6c9ca5064/YetAnotherPicSearch/res/usage.jpg -------------------------------------------------------------------------------- /YetAnotherPicSearch/utils.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import operator 3 | import re 4 | from collections.abc import Awaitable, Iterable 5 | from contextlib import suppress 6 | from difflib import SequenceMatcher 7 | from functools import wraps 8 | from io import BytesIO 9 | from pathlib import Path 10 | from typing import ( 11 | Callable, 12 | Optional, 13 | TypeVar, 14 | Union, 15 | ) 16 | from typing_extensions import ParamSpec 17 | 18 | import arrow 19 | from cookit.loguru import logged_suppress 20 | from httpx import URL, AsyncClient, HTTPStatusError, InvalidURL 21 | from nonebot.matcher import current_bot, current_event, current_matcher 22 | from nonebot_plugin_alconna.uniseg import Image as ImageSeg, UniMessage, image_fetch 23 | from PicImageSearch.model.ehentai import EHentaiItem, EHentaiResponse 24 | from PIL import Image 25 | from pyquery import PyQuery 26 | from tenacity import TryAgain, retry, stop_after_attempt, stop_after_delay 27 | 28 | from .config import config 29 | from .nhentai_model import NHentaiItem, NHentaiResponse 30 | 31 | T = TypeVar("T") 32 | P = ParamSpec("P") 33 | 34 | 35 | DEFAULT_HEADERS = { 36 | "User-Agent": ( 37 | "Mozilla/5.0 (Windows NT 10.0; Win64; x64) " 38 | "AppleWebKit/537.36 (KHTML, like Gecko) " 39 | "Chrome/99.0.4844.82 Safari/537.36" 40 | ), 41 | } 42 | 43 | 44 | def post_image_process(file: bytes) -> bytes: 45 | im = Image.open(BytesIO(file)) 46 | if (im.format == "WEBP") or getattr(im, "is_animated", False): 47 | with BytesIO() as output: 48 | im.save(output, "PNG") 49 | return output.getvalue() 50 | return file 51 | 52 | 53 | @retry(stop=(stop_after_attempt(3) | stop_after_delay(30)), reraise=True) 54 | async def get_image_bytes_by_url(url: str, cookies: Optional[str] = None) -> bytes: 55 | _url = URL(url) 56 | referer = f"{_url.scheme}://{_url.host}/" 57 | headers = DEFAULT_HEADERS if _url.host.endswith("qpic.cn") else {"Referer": referer, **DEFAULT_HEADERS} 58 | async with AsyncClient( 59 | headers=headers, 60 | cookies=parse_cookies(cookies), 61 | proxy=config.proxy, 62 | follow_redirects=True, 63 | ) as session: 64 | resp = await session.get(url) 65 | if resp.status_code == 404: 66 | resp.raise_for_status() 67 | raise Exception # NoReturn for sure, just make linter know 68 | if resp.status_code >= 400 or len(resp.content) == 0: 69 | raise TryAgain 70 | return post_image_process(resp.content) 71 | 72 | 73 | async def get_image_from_seg(seg: ImageSeg) -> bytes: 74 | with suppress(ValueError): 75 | return seg.raw_bytes 76 | if seg.path: 77 | return Path(seg.path).read_bytes() 78 | if file := await image_fetch( 79 | current_event.get(), 80 | current_bot.get(), 81 | current_matcher.get().state, 82 | seg, 83 | ): 84 | return file 85 | raise ValueError("Cannot get image") 86 | 87 | 88 | async def handle_img( 89 | url: str, 90 | hide_img: bool = config.hide_img, 91 | cookies: Optional[str] = None, 92 | ) -> UniMessage: 93 | if not hide_img: 94 | with logged_suppress("Failed to get image", HTTPStatusError): 95 | return UniMessage.image(raw=await get_image_bytes_by_url(url, cookies)) 96 | return UniMessage.text(f"预览图链接:{url}") 97 | 98 | 99 | def handle_source(source: str) -> str: 100 | return ( 101 | source.replace("www.pixiv.net/en/artworks", "www.pixiv.net/artworks") 102 | .replace( 103 | "www.pixiv.net/member_illust.php?mode=medium&illust_id=", 104 | "www.pixiv.net/artworks/", 105 | ) 106 | .replace("http://", "https://") 107 | ) 108 | 109 | 110 | def parse_source(resp_text: str, host: str) -> Optional[str]: 111 | if host in {"danbooru.donmai.us", "gelbooru.com"}: 112 | source = PyQuery(resp_text)(".image-container").attr("data-normalized-source") 113 | return str(source) if source else None 114 | 115 | if host in {"yande.re", "konachan.com"}: 116 | source = PyQuery(resp_text)("#post_source").attr("value") 117 | pool_text = PyQuery(resp_text)('a[href^="/pool/show/"]').text() or "" 118 | return str(source) or str(pool_text) 119 | 120 | return "" 121 | 122 | 123 | async def get_source(url: str) -> str: 124 | if not url: 125 | return "" 126 | 127 | _url = get_valid_url(url) 128 | if not _url: 129 | return "" 130 | 131 | host = _url.host 132 | headers = None if host == "danbooru.donmai.us" else DEFAULT_HEADERS 133 | async with AsyncClient( 134 | headers=headers, 135 | proxy=config.proxy, 136 | follow_redirects=True, 137 | ) as session: 138 | resp = await session.get(url) 139 | if resp.status_code >= 400: 140 | return "" 141 | 142 | source = parse_source(resp.text, host) 143 | if source and get_valid_url(source): 144 | return handle_source(source) 145 | 146 | return source or "" 147 | 148 | 149 | def confuse_url(url: str) -> str: 150 | return next( 151 | ( 152 | url.replace("//", "// ").replace(host, host.replace(".", ". ")) 153 | for host in config.to_confuse_urls 154 | if host in url 155 | ), 156 | url, 157 | ) 158 | 159 | 160 | async def shorten_url(url: str, force_shorten: bool = False) -> str: 161 | pid_search = re.compile( 162 | r"(?:pixiv.+(?:illust_id=|artworks/)|/img-original/img/(?:\d+/){6})(\d+)", 163 | ) 164 | if pid_match := pid_search.search(url): 165 | return confuse_url(f"https://pixiv.net/i/{pid_match[1]}") 166 | 167 | uid_search = re.compile(r"pixiv.+(?:member\.php\?id=|users/)(\d+)") 168 | if uid_match := uid_search.search(url): 169 | return confuse_url(f"https://pixiv.net/u/{uid_match[1]}") 170 | 171 | host = URL(url).host 172 | if host == "danbooru.donmai.us": 173 | return confuse_url(url.replace("/post/show/", "/posts/")) 174 | 175 | if force_shorten or host in { 176 | "e-hentai.org", 177 | "exhentai.org", 178 | "graph.baidu.com", 179 | "nhentai.net", 180 | "www.google.com", 181 | "yandex.com", 182 | }: 183 | flag = len(url) > 1024 184 | async with AsyncClient(headers=DEFAULT_HEADERS) as session: 185 | if not flag: 186 | resp = await session.post("https://yww.uy/shorten", json={"url": url}) 187 | if resp.status_code < 400: 188 | return resp.json()["url"] 189 | flag = True 190 | if flag: 191 | resp = await session.post( 192 | "https://www.shorturl.at/shortener.php", 193 | data={"u": url}, 194 | ) 195 | if resp.status_code < 400: 196 | final_url = PyQuery(resp.text)("#shortenurl").attr("value") 197 | return f"https://{final_url}" 198 | 199 | return confuse_url(url) 200 | 201 | 202 | def parse_cookies(cookies_str: Optional[str] = None) -> dict[str, str]: 203 | cookies_dict: dict[str, str] = {} 204 | if cookies_str: 205 | for line in cookies_str.split(";"): 206 | key, value = line.strip().split("=", 1) 207 | cookies_dict[key] = value 208 | return cookies_dict 209 | 210 | 211 | def async_lock( 212 | freq: float = 1, 213 | ) -> Callable[[Callable[P, Awaitable[T]]], Callable[P, Awaitable[T]]]: 214 | def decorator(func: Callable[P, Awaitable[T]]) -> Callable[P, Awaitable[T]]: 215 | lock = asyncio.Lock() 216 | last_call_time: Optional[arrow.Arrow] = None 217 | 218 | @wraps(func) 219 | async def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: 220 | nonlocal last_call_time 221 | async with lock: 222 | elapsed_time = arrow.now() - (last_call_time or arrow.now().shift(seconds=-freq)) 223 | if elapsed_time.total_seconds() < freq: 224 | await asyncio.sleep(freq - elapsed_time.total_seconds()) 225 | result = await func(*args, **kwargs) 226 | last_call_time = arrow.now() 227 | return result 228 | 229 | return wrapper 230 | 231 | return decorator 232 | 233 | 234 | def preprocess_search_query(query: str) -> str: 235 | query = re.sub(r"●|・|~|~|〜|、|×|:::|\s+-\s+|\[中国翻訳]", " ", query) 236 | # 去除独立的英文、日文、中文字符,但不去除带连字符的 237 | for i in [ 238 | r"\b[A-Za-z]\b", 239 | r"\b[\u4e00-\u9fff]\b", 240 | r"\b[\u3040-\u309f\u30a0-\u30ff]\b", 241 | ]: 242 | query = re.sub(rf"(? list[T_Item]: 255 | raw_with_ratio = [(i, SequenceMatcher(lambda x: x == " ", title, i.title).ratio()) for i in res.raw] 256 | raw_with_ratio.sort(key=operator.itemgetter(1), reverse=True) 257 | 258 | filtered = [i[0] for i in raw_with_ratio if i[1] > 0.65] 259 | return filtered or [i[0] for i in raw_with_ratio] # type: ignore 260 | 261 | 262 | def get_valid_url(url: str) -> Optional[URL]: 263 | with suppress(InvalidURL): 264 | url_obj = URL(url) 265 | if url_obj.host: 266 | return url_obj 267 | return None 268 | 269 | 270 | def combine_message( 271 | msg_list: Iterable[Union[UniMessage, str, None]], 272 | join: Optional[str] = "\n", 273 | ) -> UniMessage: 274 | msg = UniMessage() 275 | for i, it in enumerate(msg_list): 276 | if not it: 277 | continue 278 | if join and i != 0: 279 | msg += join 280 | msg += it 281 | return msg 282 | -------------------------------------------------------------------------------- /docs/images/image01.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lgc-NB2Dev/YetAnotherPicSearch/5d92e31122f1758c09c9be9d8db14cc6c9ca5064/docs/images/image01.jpg -------------------------------------------------------------------------------- /docs/images/image02.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lgc-NB2Dev/YetAnotherPicSearch/5d92e31122f1758c09c9be9d8db14cc6c9ca5064/docs/images/image02.jpg -------------------------------------------------------------------------------- /docs/images/image03.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lgc-NB2Dev/YetAnotherPicSearch/5d92e31122f1758c09c9be9d8db14cc6c9ca5064/docs/images/image03.jpg -------------------------------------------------------------------------------- /docs/usage.md: -------------------------------------------------------------------------------- 1 | # 使用教程 2 | 3 | ## 日常使用 4 | 5 | - 私聊: 6 | - 发送指令及参数进入搜图模式,详见下方的 [搜图模式](#搜图模式) 7 | - 发送指令及参数时附带或回复图片 8 | - 直接发送图片 (如果禁用了 `SEARCH_IMMEDIATELY` 则无效) 9 | - 群聊: 10 | - 发送指令及参数进入搜图模式,详见下方的 [搜图模式](#搜图模式) 11 | - 发送指令及参数时附带或回复图片 12 | - `@机器人` 并发送或回复图片(如果禁用 `SEARCH_IN_GROUP_ONLY_KEYWORD` 则无效) 13 | - 可以在同一条消息中包含多张图片,会自动批量搜索 14 | - 搜索图片时可以在消息内包含以下参数: 15 | - `--purge` - 无视缓存进行搜图,并更新缓存 16 | - 指定搜索范围(以下参数仅可选一个): 17 | - `--all` - 全库搜索 (默认) 18 | - `--pixiv` - 从 Pixiv 中搜索 19 | - `--danbooru` - 从 Danbooru 中搜索 20 | - `--doujin` - 搜索本子 21 | - `--anime` - 搜索番剧 22 | - `--a2d` - 使用 Ascii2D 进行搜索 (优势搜索局部图能力较强) 23 | - `--baidu` - 使用 Baidu 进行搜索 24 | - `--ex` - 使用 ExHentai (E-Hentai) 进行搜索 25 | - `--google` - 使用 Google 进行搜索 26 | - `--iqdb` - 使用 Iqdb 进行搜索 27 | - `--yandex` - 使用 Yandex 进行搜索 28 | - 对于 SauceNAO: 29 | - 如果得到的结果相似度低于 60% (可配置),会自动使用 Ascii2D 进行搜索 (可配置) 30 | - 如果额度耗尽,会自动使用 Ascii2D 进行搜索 31 | - 如果搜索到本子,会自动在 ExHentai (E-Hentai) 中搜索并返回链接 (如果有汉化本会优先返回汉化本链接) 32 | - 如果搜到番剧,会自动使用 WhatAnime 搜索番剧详细信息: 33 | - AnimeDB 与 WhatAnime 的结果可能会不一致,是正常现象,毕竟这是两个不同的搜索引擎 34 | - 同时展示这两个搜索的目的是为了尽力得到你可能想要的识别结果 35 | - 对于 ExHentai: 36 | - 如果没有配置 `EXHENTAI_COOKIES` ,会自动回退到 `E-Hentai` 搜索 37 | - 不支持单色图片的搜索,例如黑白漫画,只推荐用于搜索 CG 、画集、图集、彩色漫画、彩色封面等 38 | - 如果没有配置 `SUPERUSERS` ,不会显示搜索结果的收藏状态 39 | - 关于消息发送失败的情况: 40 | 在某些国内平台如 QQ 上,这可能是因为消息中包含的链接被列入黑名单,成了所谓的 `红链`。 41 | 需确定哪个网站的域名被封禁了,然后配置 `TO_CONFUSE_URLS` 配置项来规避。 42 | 43 | ## 搜图模式 44 | 45 | 搜图模式存在的意义是方便用户在转发图片等不方便在消息中夹带 @ 或搜图参数的情况下指定搜索范围或者使用某项功能: 46 | 47 | - 发送指令并附上搜索范围或者功能参数,如果没有指定,会使用默认设置 (即 `--all`) 48 | - 此时你发出来的下一条消息中的图 (也就是一次性的) 会使用指定搜索范围或者使用某项功能 49 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "YetAnotherPicSearch" 3 | dynamic = ["version"] 4 | description = "Yet Another Picture Search Nonebot Plugin" 5 | authors = [ 6 | { name = "NekoAria" }, 7 | { name = "LgCookie", email = "lgc2333@126.com" }, 8 | ] 9 | requires-python = ">=3.9,<4" 10 | dependencies = [ 11 | "nonebot2>=2.4.1", 12 | "nonebot-plugin-alconna>=0.54.2", 13 | "nonebot-plugin-waiter>=0.8.1", 14 | "cookit[loguru,nonebot-alconna,pydantic]>=0.9.3", 15 | "arrow>=1.3.0", 16 | "httpx>=0.27.2", 17 | "lxml>=5.3.0", 18 | "PicImageSearch>=3.11.4", 19 | "pyquery>=2.0.1", 20 | "tenacity>=9.0.0", 21 | "msgpack>=1.1.0", 22 | "Pillow>=11.1.0", 23 | ] 24 | keywords = [ 25 | "nonebot", 26 | "ascii2d", 27 | "baidu", 28 | "e-hentai", 29 | "google", 30 | "iqdb", 31 | "saucenao", 32 | "tracemoe", 33 | "yandex", 34 | "anime", 35 | "danbooru", 36 | "doujin", 37 | "pixiv", 38 | ] 39 | license = { text = "GPL-3.0-only" } 40 | readme = "README.md" 41 | 42 | [project.urls] 43 | homepage = "https://github.com/lgc-NB2Dev/YetAnotherPicSearch" 44 | repository = "https://github.com/lgc-NB2Dev/YetAnotherPicSearch" 45 | 46 | [project.optional-dependencies] 47 | socks = ["httpx[socks]>=0.27.2"] 48 | 49 | [dependency-groups] 50 | dev = ["basedpyright>=1.26.0", "pre-commit>=4.1.0", "ruff>=0.9.4"] 51 | 52 | [build-system] 53 | requires = ["pdm-backend"] 54 | build-backend = "pdm.backend" 55 | 56 | [tool.pdm.version] 57 | source = "file" 58 | path = "YetAnotherPicSearch/__init__.py" 59 | 60 | [tool.pdm.build] 61 | includes = ["YetAnotherPicSearch"] 62 | 63 | [tool.basedpyright] 64 | pythonVersion = "3.9" 65 | typeCheckingMode = "standard" 66 | reportShadowedImports = false 67 | 68 | [tool.ruff] 69 | target-version = "py39" 70 | line-length = 120 71 | 72 | [tool.ruff.format] 73 | docstring-code-format = true 74 | line-ending = "lf" 75 | 76 | [tool.ruff.lint] 77 | select = [ 78 | "A", 79 | "ANN001", 80 | "ARG", 81 | "ASYNC", 82 | "B", 83 | "C4", 84 | "COM", 85 | "DTZ", 86 | "E", 87 | "F", 88 | "FAST", 89 | "FBT", 90 | "FLY", 91 | "FURB", 92 | "I", 93 | "INP", 94 | "ISC", 95 | "N", 96 | "NPY", 97 | "PD", 98 | "PERF", 99 | "PGH", 100 | "PIE", 101 | "PL", 102 | "PT", 103 | "PTH", 104 | "PYI", 105 | "Q", 106 | "RET", 107 | "RSE", 108 | "RUF", 109 | "S", 110 | "SIM", 111 | "SLF", 112 | "SLOT", 113 | "TC", 114 | "TRY", 115 | "UP", 116 | "W", 117 | "YTT", 118 | ] 119 | ignore = [ 120 | "B008", 121 | "B905", 122 | # "COM812", 123 | "E501", 124 | "F821", # conflict with pyright 125 | "FBT001", 126 | "FBT002", 127 | "ISC001", 128 | "PERF203", 129 | "PGH003", 130 | "PLC04", 131 | "PLC2701", 132 | "PLR09", 133 | "PLR1702", 134 | "PLR2004", 135 | "PLR6301", 136 | "PLW0603", 137 | "PLW1641", 138 | "PLW2901", 139 | "RUF001", 140 | "RUF002", 141 | "RUF003", 142 | "RUF006", 143 | "RUF029", 144 | "RUF100", 145 | "S101", 146 | "S311", 147 | "S404", 148 | "SIM117", 149 | "TC001", 150 | "TRY002", 151 | "TRY003", 152 | "W505", 153 | ] 154 | 155 | [tool.ruff.lint.isort] 156 | combine-as-imports = true 157 | detect-same-package = true 158 | extra-standard-library = ["typing_extensions"] 159 | split-on-trailing-comma = true 160 | 161 | [tool.ruff.lint.flake8-type-checking] 162 | exempt-modules = [ 163 | "types", 164 | "typing", 165 | "typing_extensions", 166 | "collections", 167 | "pathlib", 168 | "nonebot", 169 | "nonebot_plugin_alconna", 170 | ] 171 | quote-annotations = true 172 | 173 | [tool.ruff.lint.pydocstyle] 174 | convention = "google" 175 | -------------------------------------------------------------------------------- /scripts/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm-project.org/#use-with-ide 110 | .pdm.toml 111 | .pdm-python 112 | .pdm-build/ 113 | 114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 115 | __pypackages__/ 116 | 117 | # Celery stuff 118 | celerybeat-schedule 119 | celerybeat.pid 120 | 121 | # SageMath parsed files 122 | *.sage.py 123 | 124 | # Environments 125 | .env 126 | .venv 127 | env/ 128 | venv/ 129 | ENV/ 130 | env.bak/ 131 | venv.bak/ 132 | 133 | # Spyder project settings 134 | .spyderproject 135 | .spyproject 136 | 137 | # Rope project settings 138 | .ropeproject 139 | 140 | # mkdocs documentation 141 | /site 142 | 143 | # mypy 144 | .mypy_cache/ 145 | .dmypy.json 146 | dmypy.json 147 | 148 | # Pyre type checker 149 | .pyre/ 150 | 151 | # pytype static type analyzer 152 | .pytype/ 153 | 154 | # Cython debug symbols 155 | cython_debug/ 156 | 157 | # PyCharm 158 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 159 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 160 | # and can be added to the global gitignore or merged into this file. For a more nuclear 161 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 162 | #.idea/ 163 | -------------------------------------------------------------------------------- /scripts/pdm.lock: -------------------------------------------------------------------------------- 1 | # This file is @generated by PDM. 2 | # It is not intended for manual editing. 3 | 4 | [metadata] 5 | groups = ["default"] 6 | strategy = ["inherit_metadata"] 7 | lock_version = "4.5.0" 8 | content_hash = "sha256:9d0c900e2049bc1a8f775db40c72652c628dc3ef5ce863a7ff0d81f0f3d3d066" 9 | 10 | [[metadata.targets]] 11 | requires_python = ">=3.9" 12 | 13 | [[package]] 14 | name = "aiofiles" 15 | version = "24.1.0" 16 | requires_python = ">=3.8" 17 | summary = "File support for asyncio." 18 | groups = ["default"] 19 | files = [ 20 | {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, 21 | {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, 22 | ] 23 | 24 | [[package]] 25 | name = "annotated-types" 26 | version = "0.7.0" 27 | requires_python = ">=3.8" 28 | summary = "Reusable constraint types to use with typing.Annotated" 29 | groups = ["default"] 30 | dependencies = [ 31 | "typing-extensions>=4.0.0; python_version < \"3.9\"", 32 | ] 33 | files = [ 34 | {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, 35 | {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, 36 | ] 37 | 38 | [[package]] 39 | name = "colorama" 40 | version = "0.4.6" 41 | requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 42 | summary = "Cross-platform colored terminal text." 43 | groups = ["default"] 44 | marker = "sys_platform == \"win32\"" 45 | files = [ 46 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 47 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 48 | ] 49 | 50 | [[package]] 51 | name = "greenlet" 52 | version = "3.0.3" 53 | requires_python = ">=3.7" 54 | summary = "Lightweight in-process concurrent programming" 55 | groups = ["default"] 56 | files = [ 57 | {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, 58 | {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, 59 | {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, 60 | {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, 61 | {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, 62 | {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, 63 | {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, 64 | {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, 65 | {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, 66 | {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, 67 | {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, 68 | {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, 69 | {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, 70 | {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, 71 | {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, 72 | {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, 73 | {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, 74 | {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, 75 | {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, 76 | {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, 77 | {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, 78 | {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, 79 | {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, 80 | {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, 81 | {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, 82 | {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, 83 | {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, 84 | {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, 85 | {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, 86 | {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, 87 | {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, 88 | {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, 89 | {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, 90 | {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, 91 | {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, 92 | {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, 93 | {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, 94 | {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, 95 | ] 96 | 97 | [[package]] 98 | name = "idna" 99 | version = "3.7" 100 | requires_python = ">=3.5" 101 | summary = "Internationalized Domain Names in Applications (IDNA)" 102 | groups = ["default"] 103 | files = [ 104 | {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, 105 | {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, 106 | ] 107 | 108 | [[package]] 109 | name = "importlib-metadata" 110 | version = "8.2.0" 111 | requires_python = ">=3.8" 112 | summary = "Read metadata from Python packages" 113 | groups = ["default"] 114 | marker = "python_version < \"3.10\"" 115 | dependencies = [ 116 | "typing-extensions>=3.6.4; python_version < \"3.8\"", 117 | "zipp>=0.5", 118 | ] 119 | files = [ 120 | {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, 121 | {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, 122 | ] 123 | 124 | [[package]] 125 | name = "jinja2" 126 | version = "3.1.4" 127 | requires_python = ">=3.7" 128 | summary = "A very fast and expressive template engine." 129 | groups = ["default"] 130 | dependencies = [ 131 | "MarkupSafe>=2.0", 132 | ] 133 | files = [ 134 | {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, 135 | {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, 136 | ] 137 | 138 | [[package]] 139 | name = "loguru" 140 | version = "0.7.2" 141 | requires_python = ">=3.5" 142 | summary = "Python logging made (stupidly) simple" 143 | groups = ["default"] 144 | dependencies = [ 145 | "aiocontextvars>=0.2.0; python_version < \"3.7\"", 146 | "colorama>=0.3.4; sys_platform == \"win32\"", 147 | "win32-setctime>=1.0.0; sys_platform == \"win32\"", 148 | ] 149 | files = [ 150 | {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, 151 | {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, 152 | ] 153 | 154 | [[package]] 155 | name = "markdown" 156 | version = "3.6" 157 | requires_python = ">=3.8" 158 | summary = "Python implementation of John Gruber's Markdown." 159 | groups = ["default"] 160 | dependencies = [ 161 | "importlib-metadata>=4.4; python_version < \"3.10\"", 162 | ] 163 | files = [ 164 | {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, 165 | {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, 166 | ] 167 | 168 | [[package]] 169 | name = "markupsafe" 170 | version = "2.1.5" 171 | requires_python = ">=3.7" 172 | summary = "Safely add untrusted strings to HTML/XML markup." 173 | groups = ["default"] 174 | files = [ 175 | {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, 176 | {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, 177 | {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, 178 | {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, 179 | {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, 180 | {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, 181 | {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, 182 | {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, 183 | {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, 184 | {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, 185 | {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, 186 | {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, 187 | {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, 188 | {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, 189 | {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, 190 | {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, 191 | {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, 192 | {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, 193 | {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, 194 | {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, 195 | {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, 196 | {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, 197 | {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, 198 | {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, 199 | {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, 200 | {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, 201 | {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, 202 | {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, 203 | {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, 204 | {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, 205 | {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, 206 | {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, 207 | {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, 208 | {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, 209 | {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, 210 | {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, 211 | {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, 212 | {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, 213 | {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, 214 | {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, 215 | {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, 216 | ] 217 | 218 | [[package]] 219 | name = "multidict" 220 | version = "6.0.5" 221 | requires_python = ">=3.7" 222 | summary = "multidict implementation" 223 | groups = ["default"] 224 | files = [ 225 | {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, 226 | {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, 227 | {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, 228 | {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, 229 | {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, 230 | {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, 231 | {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, 232 | {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, 233 | {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, 234 | {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, 235 | {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, 236 | {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, 237 | {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, 238 | {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, 239 | {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, 240 | {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, 241 | {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, 242 | {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, 243 | {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, 244 | {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, 245 | {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, 246 | {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, 247 | {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, 248 | {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, 249 | {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, 250 | {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, 251 | {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, 252 | {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, 253 | {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, 254 | {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, 255 | {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, 256 | {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, 257 | {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, 258 | {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, 259 | {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, 260 | {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, 261 | {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, 262 | {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, 263 | {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, 264 | {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, 265 | {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, 266 | {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, 267 | {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, 268 | {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, 269 | {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, 270 | {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, 271 | {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, 272 | {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, 273 | {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, 274 | {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, 275 | {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, 276 | {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, 277 | {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, 278 | {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, 279 | {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, 280 | {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, 281 | {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, 282 | {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, 283 | {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, 284 | {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, 285 | {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, 286 | {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, 287 | ] 288 | 289 | [[package]] 290 | name = "nonebot-plugin-htmlrender" 291 | version = "0.3.3" 292 | requires_python = "<4.0,>=3.9" 293 | summary = "通过浏览器渲染图片" 294 | groups = ["default"] 295 | dependencies = [ 296 | "Pygments>=2.10.0", 297 | "aiofiles>=0.8.0", 298 | "jinja2>=3.0.3", 299 | "markdown>=3.3.6", 300 | "nonebot2>=2.2.0", 301 | "playwright>=1.17.2", 302 | "pymdown-extensions>=9.1", 303 | "python-markdown-math>=0.8", 304 | ] 305 | files = [ 306 | {file = "nonebot_plugin_htmlrender-0.3.3-py3-none-any.whl", hash = "sha256:2ac871d345c94103aa630153e007caa6319b5f5468491347513d746ba98b70d7"}, 307 | {file = "nonebot_plugin_htmlrender-0.3.3.tar.gz", hash = "sha256:ab46ecc6dbd102628af8f88437fdc24da11839487950d07d0c5fd8db0db98ae8"}, 308 | ] 309 | 310 | [[package]] 311 | name = "nonebot2" 312 | version = "2.3.2" 313 | requires_python = "<4.0,>=3.9" 314 | summary = "An asynchronous python bot framework." 315 | groups = ["default"] 316 | dependencies = [ 317 | "loguru<1.0.0,>=0.6.0", 318 | "pydantic!=2.5.0,!=2.5.1,<3.0.0,>=1.10.0", 319 | "pygtrie<3.0.0,>=2.4.1", 320 | "python-dotenv<2.0.0,>=0.21.0", 321 | "tomli<3.0.0,>=2.0.1; python_version < \"3.11\"", 322 | "typing-extensions<5.0.0,>=4.4.0", 323 | "yarl<2.0.0,>=1.7.2", 324 | ] 325 | files = [ 326 | {file = "nonebot2-2.3.2-py3-none-any.whl", hash = "sha256:c51aa3c1f23d8062ce6d13c8423dcb9a8bf0c44f21687916095f825da79a9a55"}, 327 | {file = "nonebot2-2.3.2.tar.gz", hash = "sha256:af52e27e03e7fe147f2b642151eec81f264d058efe53b974eb08b5d90177cd14"}, 328 | ] 329 | 330 | [[package]] 331 | name = "playwright" 332 | version = "1.45.1" 333 | requires_python = ">=3.8" 334 | summary = "A high-level API to automate web browsers" 335 | groups = ["default"] 336 | dependencies = [ 337 | "greenlet==3.0.3", 338 | "pyee==11.1.0", 339 | ] 340 | files = [ 341 | {file = "playwright-1.45.1-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:360607e37c00cdf97c74317f010e106ac4671aeaec6a192431dd71a30941da9d"}, 342 | {file = "playwright-1.45.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:20adc2abf164c5e8969f9066011b152e12c210549edec78cd05bd0e9cf4135b7"}, 343 | {file = "playwright-1.45.1-py3-none-macosx_11_0_universal2.whl", hash = "sha256:5f047cdc6accf4c7084dfc7587a2a5ef790cddc44cbb111e471293c5a91119db"}, 344 | {file = "playwright-1.45.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:f06f6659abe0abf263e5f6661d379fbf85c112745dd31d82332ceae914f58df7"}, 345 | {file = "playwright-1.45.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87dc3b3d17e12c68830c29b7fdf5e93315221bbb4c6090e83e967e154e2c1828"}, 346 | {file = "playwright-1.45.1-py3-none-win32.whl", hash = "sha256:2b8f517886ef1e2151982f6e7be84be3ef7d8135bdcf8ee705b4e4e99566e866"}, 347 | {file = "playwright-1.45.1-py3-none-win_amd64.whl", hash = "sha256:0d236cf427784e77de352ba1b7d700693c5fe455b8e5f627f6d84ad5b84b5bf5"}, 348 | ] 349 | 350 | [[package]] 351 | name = "pydantic" 352 | version = "2.8.2" 353 | requires_python = ">=3.8" 354 | summary = "Data validation using Python type hints" 355 | groups = ["default"] 356 | dependencies = [ 357 | "annotated-types>=0.4.0", 358 | "pydantic-core==2.20.1", 359 | "typing-extensions>=4.12.2; python_version >= \"3.13\"", 360 | "typing-extensions>=4.6.1; python_version < \"3.13\"", 361 | ] 362 | files = [ 363 | {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, 364 | {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, 365 | ] 366 | 367 | [[package]] 368 | name = "pydantic-core" 369 | version = "2.20.1" 370 | requires_python = ">=3.8" 371 | summary = "Core functionality for Pydantic validation and serialization" 372 | groups = ["default"] 373 | dependencies = [ 374 | "typing-extensions!=4.7.0,>=4.6.0", 375 | ] 376 | files = [ 377 | {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, 378 | {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, 379 | {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, 380 | {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, 381 | {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, 382 | {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, 383 | {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, 384 | {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, 385 | {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, 386 | {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, 387 | {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, 388 | {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, 389 | {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, 390 | {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, 391 | {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, 392 | {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, 393 | {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, 394 | {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, 395 | {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, 396 | {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, 397 | {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, 398 | {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, 399 | {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, 400 | {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, 401 | {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, 402 | {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, 403 | {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, 404 | {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, 405 | {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, 406 | {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, 407 | {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, 408 | {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, 409 | {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, 410 | {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, 411 | {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, 412 | {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, 413 | {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, 414 | {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, 415 | {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, 416 | {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, 417 | {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, 418 | {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, 419 | {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, 420 | {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, 421 | {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, 422 | {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, 423 | {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, 424 | {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, 425 | {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, 426 | {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, 427 | {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, 428 | {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, 429 | {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, 430 | {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, 431 | {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, 432 | {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, 433 | {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, 434 | {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, 435 | {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, 436 | {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, 437 | {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, 438 | {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, 439 | {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, 440 | {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, 441 | {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, 442 | {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, 443 | {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, 444 | {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, 445 | {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, 446 | {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, 447 | {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, 448 | {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, 449 | {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, 450 | {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, 451 | {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, 452 | {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, 453 | {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, 454 | ] 455 | 456 | [[package]] 457 | name = "pyee" 458 | version = "11.1.0" 459 | requires_python = ">=3.8" 460 | summary = "A rough port of Node.js's EventEmitter to Python with a few tricks of its own" 461 | groups = ["default"] 462 | dependencies = [ 463 | "typing-extensions", 464 | ] 465 | files = [ 466 | {file = "pyee-11.1.0-py3-none-any.whl", hash = "sha256:5d346a7d0f861a4b2e6c47960295bd895f816725b27d656181947346be98d7c1"}, 467 | {file = "pyee-11.1.0.tar.gz", hash = "sha256:b53af98f6990c810edd9b56b87791021a8f54fd13db4edd1142438d44ba2263f"}, 468 | ] 469 | 470 | [[package]] 471 | name = "pygments" 472 | version = "2.18.0" 473 | requires_python = ">=3.8" 474 | summary = "Pygments is a syntax highlighting package written in Python." 475 | groups = ["default"] 476 | files = [ 477 | {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, 478 | {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, 479 | ] 480 | 481 | [[package]] 482 | name = "pygtrie" 483 | version = "2.5.0" 484 | summary = "A pure Python trie data structure implementation." 485 | groups = ["default"] 486 | files = [ 487 | {file = "pygtrie-2.5.0-py3-none-any.whl", hash = "sha256:8795cda8105493d5ae159a5bef313ff13156c5d4d72feddefacaad59f8c8ce16"}, 488 | {file = "pygtrie-2.5.0.tar.gz", hash = "sha256:203514ad826eb403dab1d2e2ddd034e0d1534bbe4dbe0213bb0593f66beba4e2"}, 489 | ] 490 | 491 | [[package]] 492 | name = "pymdown-extensions" 493 | version = "10.9" 494 | requires_python = ">=3.8" 495 | summary = "Extension pack for Python Markdown." 496 | groups = ["default"] 497 | dependencies = [ 498 | "markdown>=3.6", 499 | "pyyaml", 500 | ] 501 | files = [ 502 | {file = "pymdown_extensions-10.9-py3-none-any.whl", hash = "sha256:d323f7e90d83c86113ee78f3fe62fc9dee5f56b54d912660703ea1816fed5626"}, 503 | {file = "pymdown_extensions-10.9.tar.gz", hash = "sha256:6ff740bcd99ec4172a938970d42b96128bdc9d4b9bcad72494f29921dc69b753"}, 504 | ] 505 | 506 | [[package]] 507 | name = "python-dotenv" 508 | version = "1.0.1" 509 | requires_python = ">=3.8" 510 | summary = "Read key-value pairs from a .env file and set them as environment variables" 511 | groups = ["default"] 512 | files = [ 513 | {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, 514 | {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, 515 | ] 516 | 517 | [[package]] 518 | name = "python-markdown-math" 519 | version = "0.8" 520 | requires_python = ">=3.6" 521 | summary = "Math extension for Python-Markdown" 522 | groups = ["default"] 523 | dependencies = [ 524 | "Markdown>=3.0", 525 | ] 526 | files = [ 527 | {file = "python-markdown-math-0.8.tar.gz", hash = "sha256:8564212af679fc18d53f38681f16080fcd3d186073f23825c7ce86fadd3e3635"}, 528 | {file = "python_markdown_math-0.8-py3-none-any.whl", hash = "sha256:c685249d84b5b697e9114d7beb352bd8ca2e07fd268fd4057ffca888c14641e5"}, 529 | ] 530 | 531 | [[package]] 532 | name = "pyyaml" 533 | version = "6.0.1" 534 | requires_python = ">=3.6" 535 | summary = "YAML parser and emitter for Python" 536 | groups = ["default"] 537 | files = [ 538 | {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, 539 | {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, 540 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, 541 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, 542 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, 543 | {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, 544 | {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, 545 | {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, 546 | {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, 547 | {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, 548 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, 549 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, 550 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, 551 | {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, 552 | {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, 553 | {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, 554 | {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, 555 | {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, 556 | {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, 557 | {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, 558 | {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, 559 | {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, 560 | {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, 561 | {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, 562 | {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, 563 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, 564 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, 565 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, 566 | {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, 567 | {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, 568 | {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, 569 | {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, 570 | ] 571 | 572 | [[package]] 573 | name = "tomli" 574 | version = "2.0.1" 575 | requires_python = ">=3.7" 576 | summary = "A lil' TOML parser" 577 | groups = ["default"] 578 | marker = "python_version < \"3.11\"" 579 | files = [ 580 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 581 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 582 | ] 583 | 584 | [[package]] 585 | name = "typing-extensions" 586 | version = "4.12.2" 587 | requires_python = ">=3.8" 588 | summary = "Backported and Experimental Type Hints for Python 3.8+" 589 | groups = ["default"] 590 | files = [ 591 | {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, 592 | {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, 593 | ] 594 | 595 | [[package]] 596 | name = "win32-setctime" 597 | version = "1.1.0" 598 | requires_python = ">=3.5" 599 | summary = "A small Python utility to set file creation time on Windows" 600 | groups = ["default"] 601 | marker = "sys_platform == \"win32\"" 602 | files = [ 603 | {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, 604 | {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, 605 | ] 606 | 607 | [[package]] 608 | name = "yarl" 609 | version = "1.9.4" 610 | requires_python = ">=3.7" 611 | summary = "Yet another URL library" 612 | groups = ["default"] 613 | dependencies = [ 614 | "idna>=2.0", 615 | "multidict>=4.0", 616 | "typing-extensions>=3.7.4; python_version < \"3.8\"", 617 | ] 618 | files = [ 619 | {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, 620 | {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, 621 | {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, 622 | {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, 623 | {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, 624 | {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, 625 | {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, 626 | {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, 627 | {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, 628 | {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, 629 | {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, 630 | {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, 631 | {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, 632 | {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, 633 | {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, 634 | {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, 635 | {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, 636 | {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, 637 | {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, 638 | {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, 639 | {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, 640 | {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, 641 | {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, 642 | {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, 643 | {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, 644 | {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, 645 | {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, 646 | {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, 647 | {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, 648 | {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, 649 | {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, 650 | {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, 651 | {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, 652 | {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, 653 | {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, 654 | {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, 655 | {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, 656 | {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, 657 | {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, 658 | {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, 659 | {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, 660 | {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, 661 | {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, 662 | {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, 663 | {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, 664 | {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, 665 | {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, 666 | {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, 667 | {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, 668 | {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, 669 | {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, 670 | {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, 671 | {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, 672 | {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, 673 | {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, 674 | {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, 675 | {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, 676 | {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, 677 | {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, 678 | {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, 679 | {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, 680 | {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, 681 | ] 682 | 683 | [[package]] 684 | name = "zipp" 685 | version = "3.19.2" 686 | requires_python = ">=3.8" 687 | summary = "Backport of pathlib-compatible object wrapper for zip files" 688 | groups = ["default"] 689 | marker = "python_version < \"3.10\"" 690 | files = [ 691 | {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, 692 | {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, 693 | ] 694 | -------------------------------------------------------------------------------- /scripts/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "scripts" 3 | version = "0.1.0" 4 | description = "" 5 | authors = [{ name = "LgCookie", email = "lgc2333@126.com" }] 6 | dependencies = ["nonebot2>=2.3.2", "nonebot-plugin-htmlrender>=0.3.3"] 7 | requires-python = ">=3.9" 8 | license = { text = "GPL-3.0-only" } 9 | 10 | [tool.pdm] 11 | distribution = false 12 | 13 | [tool.pdm.scripts] 14 | update-help-image = "python -m update_help_image" 15 | -------------------------------------------------------------------------------- /scripts/update_help_image/__init__.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: E402 2 | 3 | import nonebot 4 | 5 | nonebot.init(driver="~none") 6 | 7 | from nonebot.plugin import require 8 | 9 | require("nonebot_plugin_htmlrender") 10 | -------------------------------------------------------------------------------- /scripts/update_help_image/__main__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from pathlib import Path 3 | from typing import TYPE_CHECKING, Literal, Union, cast 4 | 5 | import nonebot 6 | from nonebot_plugin_htmlrender.data_source import TEMPLATES_PATH, env, get_new_page, markdown, read_tpl 7 | 8 | if TYPE_CHECKING: 9 | from nonebot.drivers.none import Driver as NoneDriver 10 | 11 | 12 | async def md_to_pic( 13 | md: str = "", 14 | extra_css: str = "", 15 | type: Literal["jpeg", "png"] = "png", # noqa: A002 16 | quality: Union[int, None] = None, 17 | device_scale_factor: float = 2, 18 | ) -> bytes: 19 | template = env.get_template("markdown.html") 20 | md = markdown.markdown( 21 | md, 22 | extensions=[ 23 | "pymdownx.tasklist", 24 | "tables", 25 | "fenced_code", 26 | "codehilite", 27 | "mdx_math", 28 | "pymdownx.tilde", 29 | ], 30 | extension_configs={"mdx_math": {"enable_dollar_delimiter": True}}, 31 | tab_length=2, 32 | ) 33 | 34 | extra = "" 35 | if "math/tex" in md: 36 | katex_css, katex_js, mathtex_js = await asyncio.gather( 37 | read_tpl("katex/katex.min.b64_fonts.css"), 38 | read_tpl("katex/katex.min.js"), 39 | read_tpl("katex/mathtex-script-type.min.js"), 40 | ) 41 | extra = ( 42 | f'' 43 | f"" 44 | f"" 45 | ) 46 | 47 | github_md_css, pygments_css = await asyncio.gather( 48 | read_tpl("github-markdown-light.css"), 49 | read_tpl("pygments-default.css"), 50 | ) 51 | css = f"{github_md_css}\n{pygments_css}\n{extra_css}" 52 | 53 | html = await template.render_async(css=css, md=md, extra=extra) 54 | 55 | async with get_new_page(device_scale_factor) as page: 56 | await page.goto(f"file://{TEMPLATES_PATH}") 57 | await page.set_content(html, wait_until="networkidle") 58 | await page.wait_for_load_state("load") 59 | elem = await page.query_selector("article.markdown-body") 60 | assert elem 61 | return await elem.screenshot(type=type, quality=quality) 62 | 63 | 64 | driver = nonebot.get_driver() 65 | 66 | 67 | @driver.on_startup 68 | async def _(): 69 | project_root = Path(__file__).parent.parent.parent 70 | help_md_path = project_root / "docs" / "usage.md" 71 | help_img_path = project_root / "YetAnotherPicSearch" / "res" / "usage.jpg" 72 | if not (p := help_img_path.parent).exists(): 73 | p.mkdir(parents=True) 74 | 75 | help_md = help_md_path.read_text("u8") 76 | help_img = await md_to_pic(help_md, type="jpeg") 77 | help_img_path.write_bytes(help_img) 78 | 79 | cast("NoneDriver", driver).exit() 80 | 81 | 82 | nonebot.run() 83 | --------------------------------------------------------------------------------