├── tests ├── __init__.py ├── pygqlc │ ├── __init__.py │ ├── gql_client │ │ ├── __init__.py │ │ ├── test_query_one.py │ │ ├── queries.py │ │ ├── subscriptions.py │ │ ├── test_mutate.py │ │ ├── test_query.py │ │ ├── test_async.py │ │ ├── mutations.py │ │ └── test_subscribe.py │ ├── gql_parsers │ │ ├── __init__.py │ │ ├── mutations.py │ │ ├── subscriptions.py │ │ ├── queries.py │ │ └── test_parser.py │ ├── test_helpers.py │ └── test_environments.py └── conftest.py ├── pygqlc ├── QueryBatch.py ├── helper_modules │ ├── __init__.py │ └── Singleton.py ├── __version__.py ├── SubscriptionParser.py ├── __init__.py ├── __main__.py ├── QueryParser.py ├── MutationParser.py ├── logging.py ├── MutationBatch.py └── GraphQLClient.py ├── MANIFEST.in ├── docs ├── requirements.txt ├── modules.rst ├── index.rst ├── Makefile ├── pygqlc.rst └── conf.py ├── .github └── workflows │ ├── ci.yml │ ├── python-check-outdated.yaml │ ├── autodoc.yml │ └── codeql.yml ├── LICENCE ├── pyproject.toml ├── pull_request_template.md ├── .gitignore ├── CHANGELOG.md └── README.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pygqlc/QueryBatch.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/pygqlc/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pygqlc/helper_modules/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_client/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_parsers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_parsers/mutations.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_parsers/subscriptions.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include pygqlc *.py 2 | -------------------------------------------------------------------------------- /pygqlc/__version__.py: -------------------------------------------------------------------------------- 1 | __version__ = "3.6.0" 2 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | #Docs theme 2 | 3 | sphinx_rtd_theme -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | pygqlc 2 | ====== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | pygqlc 8 | -------------------------------------------------------------------------------- /pygqlc/SubscriptionParser.py: -------------------------------------------------------------------------------- 1 | class SubscriptionParser: 2 | def __init__(self, gql_doc): 3 | self.gql_doc = gql_doc 4 | 5 | def validate(self): 6 | return False 7 | -------------------------------------------------------------------------------- /pygqlc/__init__.py: -------------------------------------------------------------------------------- 1 | from .__version__ import __version__ 2 | from .GraphQLClient import GraphQLClient, GQLResponseException 3 | from .QueryParser import QueryParser 4 | from .MutationParser import MutationParser 5 | from .SubscriptionParser import SubscriptionParser 6 | from .helper_modules.Singleton import Singleton 7 | 8 | # * Package name: 9 | name = 'pygqlc' 10 | -------------------------------------------------------------------------------- /pygqlc/helper_modules/Singleton.py: -------------------------------------------------------------------------------- 1 | class Singleton(type): 2 | """This class defines another classes to be a singleton. 3 | 4 | Args: 5 | type (cls): Class that wants to obtain the singleton pattern. 6 | """ 7 | _instances = {} 8 | def __call__(cls, *args, **kwargs): 9 | if cls not in cls._instances: 10 | cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) 11 | return cls._instances[cls] 12 | -------------------------------------------------------------------------------- /pygqlc/__main__.py: -------------------------------------------------------------------------------- 1 | from .GraphQLClient import GraphQLClient 2 | 3 | 4 | def main(): 5 | print('setup for GQL client') 6 | import os 7 | gql = GraphQLClient() 8 | gql.addEnvironment( 9 | 'dev', 10 | url=os.environ.get('API'), 11 | wss=os.environ.get('WSS'), 12 | headers={'Authorization': os.environ.get('TOKEN')}, 13 | default=True) 14 | 15 | 16 | if __name__ == "__main__": 17 | main() 18 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. PYGQLClient documentation master file, created by 2 | sphinx-quickstart on Tue Aug 18 09:01:36 2020. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to PYGQLClient's documentation! 7 | ======================================= 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | modules 14 | 15 | Indices and tables 16 | ================== 17 | 18 | * :ref:`genindex` 19 | * :ref:`modindex` 20 | * :ref:`search` 21 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_client/test_query_one.py: -------------------------------------------------------------------------------- 1 | from . import queries 2 | 3 | 4 | def test_query_one_flatten(gql): 5 | data, _ = gql.query_one(queries.get_last_author) 6 | assert type(data) == dict, \ 7 | 'Query result must be of type dict' 8 | assert all(key in data.keys() for key in ['name', 'lastName']), \ 9 | 'Query must contain name and lastName data' 10 | 11 | 12 | def test_query_one_null(gql): 13 | data, _ = gql.query_one(queries.get_authors_no_name) 14 | assert data is None, \ 15 | 'query_one must return None if empty list' 16 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_client/queries.py: -------------------------------------------------------------------------------- 1 | get_authors = '''{authors{id name}}''' 2 | 3 | bad_get_authors = '''{authors{id name}}}''' 4 | 5 | get_authors_siblings = ''' 6 | query Siblings($lastName: String!) { 7 | authors(filter: { lastName: $lastName }) { 8 | name 9 | lastName 10 | } 11 | } 12 | ''' 13 | 14 | get_last_author = ''' 15 | { 16 | authors(orderBy:{desc:ID} limit:1){ 17 | name 18 | lastName 19 | } 20 | } 21 | ''' 22 | 23 | get_authors_no_name = ''' 24 | { 25 | authors( 26 | filter:{name: "!()"} 27 | orderBy:{desc: ID} limit: 1 28 | ){ 29 | name 30 | lastName 31 | } 32 | } 33 | ''' 34 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_client/subscriptions.py: -------------------------------------------------------------------------------- 1 | sub_author_updated = ''' 2 | subscription{ 3 | authorUpdated{ 4 | successful 5 | messages { 6 | field 7 | message 8 | } 9 | result { 10 | id 11 | name 12 | lastName 13 | active 14 | dateOfBirth 15 | updatedAt 16 | } 17 | } 18 | } 19 | ''' 20 | 21 | sub_author_created = ''' 22 | subscription{ 23 | authorCreated{ 24 | successful 25 | messages { 26 | field 27 | message 28 | } 29 | result { 30 | id 31 | name 32 | lastName 33 | active 34 | dateOfBirth 35 | updatedAt 36 | } 37 | } 38 | } 39 | ''' 40 | -------------------------------------------------------------------------------- /pygqlc/QueryParser.py: -------------------------------------------------------------------------------- 1 | import re 2 | # ! works, but quite slow when graphql query handles more than two items in the response (20+ seconds) 3 | # query_regex = r'^\s*(query(\s+[a-zA-Z_]+[a-zA-Z_0-9]?)?)?\s*(\(\s*(((\$[a-zA-Z_]+[a-zA-Z_0-9]?)\s*:\s*([a-zA-Z_]+[a-zA-Z_0-9]?!?)\s*)+\s*)\))?\s*{\s*(\s*(([a-zA-Z_]+[a-zA-Z_0-9]?)\s*:)?\s*([a-zA-Z_]+[a-zA-Z_0-9]?\s*(\(\s*((.\s*)+)\s*\))?)\s*{\s*((.\s*)+)\s*})\s*}' 4 | 5 | query_regex = r'^\s*(query(\s+[a-zA-Z_]+[a-zA-Z_0-9]?)?)?\s*(\(\s*(((\$[a-zA-Z_]+[a-zA-Z_0-9]?)\s*:\s*([a-zA-Z_]+[a-zA-Z_0-9]?!?)\s*)+\s*)\))?\s*{\s*((.\s*)+)\s*}\s*}' 6 | 7 | class QueryParser: 8 | def __init__(self, gql_doc): 9 | self.gql_doc = gql_doc 10 | self.match = None 11 | 12 | def validate(self): 13 | self.match = re.match(query_regex, self.gql_doc) 14 | return self.match is not None 15 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # Name of the project 2 | name: PygqlClient [CI] 3 | 4 | # Controls when the action will run. Triggers the workflow on push or pull request 5 | # events but only for the master branch 6 | on: [push] 7 | 8 | jobs: 9 | tests: 10 | # OS to run 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Install poetry 15 | run: pipx install poetry 16 | - name: Setup Python 17 | uses: actions/setup-python@v5 18 | with: 19 | python-version: 3.12 20 | cache: "poetry" 21 | - run: poetry install 22 | - name: Testing with pytest 23 | env: # Environment variables 24 | API: ${{ secrets.API }} 25 | WSS: ${{ secrets.WSS }} 26 | TOKEN: ${{ secrets.TOKEN }} 27 | run: | 28 | poetry run pytest 29 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_client/test_mutate.py: -------------------------------------------------------------------------------- 1 | import pydash as py_ 2 | from . import mutations 3 | 4 | 5 | def test_mutate_no_errors(gql): 6 | _, errors = gql.mutate(mutations.author_activate) 7 | assert errors == [], \ 8 | 'There should NOT be errors on this mutation' 9 | 10 | 11 | def test_mutate_var(gql): 12 | data_1, err_1 = gql.mutate(mutations.author_set_active, {'active': True}) 13 | data_2, err_2 = gql.mutate(mutations.author_set_active, {'active': False}) 14 | assert not any([len(err_1) > 0, len(err_2) > 0]), \ 15 | 'Mutation should NOT contain any errors' 16 | assert py_.get(data_1, 'result.active') == True, \ 17 | 'ACTIVE should be set to True' 18 | assert py_.get(data_2, 'result.active') == False, \ 19 | 'ACTIVE should be set to False' 20 | 21 | 22 | def test_bad_mutate_doc(gql): 23 | _, errors = gql.mutate(mutations.bad_author_create) 24 | assert len(errors) > 0, \ 25 | 'Mutation SHOULD contain errors (bad syntax)' 26 | -------------------------------------------------------------------------------- /LICENCE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018 The Python Packaging Authority 2 | 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy 5 | of this software and associated documentation files (the "Software"), to deal 6 | in the Software without restriction, including without limitation the rights 7 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | copies of the Software, and to permit persons to whom the Software is 9 | furnished to do so, subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in all 12 | copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 19 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 20 | SOFTWARE. -------------------------------------------------------------------------------- /.github/workflows/python-check-outdated.yaml: -------------------------------------------------------------------------------- 1 | name: Check Outdated Python Dependencies 2 | on: push 3 | 4 | concurrency: 5 | group: ${{ github.workflow }}-${{ github.ref }} 6 | cancel-in-progress: true 7 | 8 | env: 9 | PYTHON_VERSION: 3.12 10 | 11 | jobs: 12 | check_outdated: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: Checkout 16 | uses: actions/checkout@v4 17 | - name: Install Poetry 18 | run: pip install poetry 19 | - name: Install Python 20 | uses: actions/setup-python@v5 21 | with: 22 | python-version: ${{ env.PYTHON_VERSION }} 23 | cache: "poetry" 24 | - name: Check Outdated Dependencies 25 | run: | 26 | # Save the output of `poetry show --outdated --top-level` to a variable 27 | OUTDATED=$(poetry show --outdated --top-level) 28 | 29 | # Check if any outdated packages are found 30 | if [ -n "$OUTDATED" ]; then 31 | echo "Outdated packages found:" 32 | echo "$OUTDATED" 33 | exit 1 34 | else 35 | echo "All packages are up-to-date." 36 | fi 37 | -------------------------------------------------------------------------------- /docs/pygqlc.rst: -------------------------------------------------------------------------------- 1 | pygqlc package 2 | ============== 3 | 4 | pygqlc.GraphQLClient module 5 | --------------------------- 6 | .. automodule:: pygqlc.GraphQLClient 7 | :members: 8 | :undoc-members: 9 | :show-inheritance: 10 | 11 | pygqlc.MutationBatch module 12 | --------------------------- 13 | 14 | .. automodule:: pygqlc.MutationBatch 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | 19 | pygqlc.MutationParser module 20 | ---------------------------- 21 | 22 | .. automodule:: pygqlc.MutationParser 23 | :members: 24 | :undoc-members: 25 | :show-inheritance: 26 | 27 | pygqlc.QueryBatch module 28 | ------------------------ 29 | 30 | .. automodule:: pygqlc.QueryBatch 31 | :members: 32 | :undoc-members: 33 | :show-inheritance: 34 | 35 | pygqlc.QueryParser module 36 | ------------------------- 37 | 38 | .. automodule:: pygqlc.QueryParser 39 | :members: 40 | :undoc-members: 41 | :show-inheritance: 42 | 43 | pygqlc.SubscriptionParser module 44 | -------------------------------- 45 | 46 | .. automodule:: pygqlc.SubscriptionParser 47 | :members: 48 | :undoc-members: 49 | :show-inheritance: 50 | 51 | -------------------------------------------------------------------------------- /.github/workflows/autodoc.yml: -------------------------------------------------------------------------------- 1 | name: Build auto documentation 2 | 3 | # On every merge to master 4 | on: 5 | push: 6 | branches: 7 | - main 8 | 9 | jobs: 10 | auto-doc: 11 | # OS to run 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | # Validate every action 16 | - uses: actions/checkout@v4 17 | 18 | - name: Setting up Python 3.12 & env vars 19 | uses: actions/setup-python@v5 20 | with: 21 | python-version: 3.12 22 | 23 | # Building autodocumentation 24 | - name: Building auto documentation 25 | uses: ammaraskar/sphinx-action@master 26 | with: 27 | docs-folder: "docs/" 28 | 29 | - name: Commit changes 30 | run: | 31 | git config --global user.email "valbot@valiot.io" 32 | git config --global user.name "ValBot" 33 | git checkout origin/Documentation 34 | git pull origin Documentation 35 | git add --all 36 | git reset -- .gitignore 37 | git commit -m "Update documentation" 38 | 39 | - name: Push Changes 40 | uses: ad-m/github-push-action@master 41 | with: 42 | branch: Documentation 43 | directory: docs 44 | force: true 45 | github_token: ${{ secrets.GITHUB_TOKEN }} 46 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "pygqlc" 3 | version = "3.6.0" 4 | description = "Python client for graphql APIs" 5 | authors = ["Baruc Almaguer "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.11" 10 | pydash = "^8.0" 11 | tenacity = "^9.0" 12 | websocket-client = "^1.8" 13 | certifi = "^2025.1.31" 14 | httpx = { extras = ["http2"], version = "^0.28.1" } 15 | orjson = "^3.10.0" 16 | valiotlogging = { version = ">=0.1.0,<2.0", optional = true } 17 | 18 | [tool.poetry.extras] 19 | valiotlogging = ["valiotlogging"] 20 | 21 | [tool.poetry.group.dev.dependencies] 22 | pytest = "^8.3" 23 | pytest-asyncio = "^0.25.0" 24 | pylint = "^3.3" 25 | autopep8 = "^2.3" 26 | setuptools = "^76.0" 27 | wheel = "^0.45" 28 | twine = "^6.1" 29 | pytest-cov = "^6.0" 30 | sphinx = "^8.2" 31 | sphinx-rtd-theme = "^3.0" 32 | 33 | [[tool.poetry.source]] 34 | name = "PyPI" 35 | priority = "primary" 36 | 37 | [[tool.poetry.source]] 38 | name = "valiot" 39 | url = "https://pypi.valiot.io/" 40 | priority = "supplemental" 41 | 42 | [build-system] 43 | requires = ["poetry-core"] 44 | build-backend = "poetry.core.masonry.api" 45 | 46 | [tool.poetry_bumpversion.file."pygqlc/__version__.py"] 47 | 48 | [tool.pytest.ini_options] 49 | asyncio_mode = "strict" 50 | asyncio_default_fixture_loop_scope = "function" 51 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_client/test_query.py: -------------------------------------------------------------------------------- 1 | import pydash as _py 2 | from . import queries 3 | 4 | 5 | def test_query_no_errors(gql): 6 | _, errors = gql.query(queries.get_authors, flatten=True) 7 | assert errors == [], \ 8 | 'query must NOT contain errors' 9 | 10 | 11 | def test_query_has_errors(gql): 12 | _, errors = gql.query(queries.bad_get_authors) 13 | assert len(errors) > 0, \ 14 | 'query MUST contain errors' 15 | 16 | 17 | def test_query_flatten(gql): 18 | # ! flatten=True by default 19 | data, _ = gql.query(queries.get_authors, flatten=True) 20 | assert not _py.get(data, 'data'), \ 21 | 'data must NOT appear as response root' 22 | 23 | 24 | def test_query_not_flatten(gql): 25 | data, _ = gql.query(queries.get_authors, flatten=False) 26 | assert _py.get(data, 'data'), \ 27 | 'data must appear as response root' 28 | 29 | 30 | def test_query_vars(gql): 31 | _, errors = gql.query( 32 | queries.get_authors_siblings, 33 | {'lastName': 'Martinez'} 34 | ) 35 | assert errors == [], \ 36 | 'query must NOT contain errors' 37 | 38 | 39 | def test_query_bad_vars(gql): 40 | _, errors = gql.query( 41 | queries.get_authors_siblings, 42 | [{'lastName': 'Martinez'}] 43 | ) 44 | assert len(errors) > 0, \ 45 | 'query MUST contain errors' 46 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_client/test_async.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from . import queries 3 | 4 | 5 | @pytest.mark.asyncio 6 | async def test_async_query_no_errors(gql): 7 | data, errors = await gql.async_query(queries.get_authors, flatten=True) 8 | assert errors == [], \ 9 | 'query must NOT contain errors' 10 | 11 | 12 | @pytest.mark.asyncio 13 | async def test_async_query_has_errors(gql): 14 | data, errors = await gql.async_query(queries.bad_get_authors) 15 | assert len(errors) > 0, \ 16 | 'query MUST contain errors' 17 | 18 | 19 | @pytest.mark.asyncio 20 | async def test_async_query_one(gql): 21 | data, errors = await gql.async_query_one(queries.get_authors) 22 | assert errors == [], \ 23 | 'query must NOT contain errors' 24 | 25 | 26 | @pytest.mark.asyncio 27 | async def test_async_mutate(gql): 28 | mutation = """ 29 | mutation { 30 | updateAuthor(input: { 31 | id: "1", 32 | firstName: "Elon" 33 | }) { 34 | author { 35 | id 36 | firstName 37 | } 38 | } 39 | } 40 | """ 41 | data, errors = await gql.async_mutate(mutation) 42 | if errors: 43 | print("Errors:", errors) 44 | else: 45 | print("author Elon was updated successfully") 46 | 47 | # The test is considered successful regardless of errors 48 | # since we're just demonstrating the async functionality 49 | assert True 50 | -------------------------------------------------------------------------------- /pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | Please include a summary of the change and/or which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. 4 | 5 | Fixes # (issue) 6 | 7 | ## Type of change 8 | 9 | Please delete options that are not relevant. 10 | 11 | - [ ] Bug fix (non-breaking change which fixes an issue) 12 | - [ ] New feature (non-breaking change which adds functionality) 13 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) 14 | - [ ] This change requires a documentation update 15 | 16 | # How Has This Been Tested? 17 | 18 | Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration 19 | 20 | - [ ] Test A 21 | - [ ] Test B 22 | 23 | **Test Configuration**: 24 | * Firmware version: 25 | * Hardware: 26 | * Toolchain: 27 | * SDK: 28 | 29 | # Checklist: 30 | 31 | - [ ] My code follows the style guidelines of this project 32 | - [ ] I have performed a self-review of my own code 33 | - [ ] I have commented my code, particularly in hard-to-understand areas 34 | - [ ] I have made corresponding changes to the documentation 35 | - [ ] My changes generate no new warnings 36 | - [ ] I have added tests that prove my fix is effective or that my feature works 37 | - [ ] New and existing unit tests pass locally with my changes 38 | - [ ] Any dependent changes have been merged and published in downstream modules -------------------------------------------------------------------------------- /tests/pygqlc/gql_client/mutations.py: -------------------------------------------------------------------------------- 1 | author_activate = ''' 2 | mutation { 3 | upsertAuthor(findBy: {name: "Elon", lastName: "Musk"}, author: { active: true }) { 4 | successful 5 | messages { 6 | field 7 | message 8 | } 9 | result { 10 | name 11 | active 12 | } 13 | } 14 | } 15 | ''' 16 | 17 | author_set_active = ''' 18 | mutation SetActive($active: Boolean!){ 19 | upsertAuthor(findBy: {name: "Elon", lastName: "Musk"}, author: { active: $active }) { 20 | successful 21 | messages { 22 | field 23 | message 24 | } 25 | result { 26 | name 27 | active 28 | } 29 | } 30 | } 31 | ''' 32 | 33 | update_any_author_active = ''' 34 | mutation SetActive( 35 | $name: String! 36 | $active: Boolean! 37 | ){ 38 | updateAuthor( 39 | findBy: { name: $name } 40 | author: { active: $active } 41 | ) { 42 | successful 43 | messages { 44 | field 45 | message 46 | } 47 | result { 48 | id 49 | name 50 | lastName 51 | active 52 | dateOfBirth 53 | updatedAt 54 | } 55 | } 56 | } 57 | ''' 58 | 59 | create_author = ''' 60 | mutation CreateAuthor( 61 | $name: String! 62 | $lastName: String! 63 | $active: Boolean 64 | ){ 65 | createAuthor( 66 | name: $name 67 | lastName: $lastName 68 | active: $active 69 | ){ 70 | successful 71 | messages{ 72 | field 73 | message 74 | } 75 | result{ 76 | id 77 | name 78 | lastName 79 | active 80 | } 81 | } 82 | } 83 | ''' 84 | 85 | bad_author_create = 'mutation {createAuthor(name:"Baruc"){succ}}' 86 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | .vscode/ 106 | -------------------------------------------------------------------------------- /tests/pygqlc/test_helpers.py: -------------------------------------------------------------------------------- 1 | from pygqlc.GraphQLClient import safe_pop 2 | from pygqlc.helper_modules.Singleton import Singleton 3 | 4 | 5 | def test_safe_pop_first(): 6 | data = [3, 4, 5] 7 | datum = safe_pop(data, 0) 8 | assert datum == 3, \ 9 | 'Datum should be the first element' 10 | 11 | 12 | def test_safe_pop_last(): 13 | data = [3, 4, 5] 14 | datum = safe_pop(data, -1) 15 | assert datum == 5, \ 16 | 'Datum should be the last element' 17 | 18 | 19 | def test_safe_pop_empty(): 20 | datum = safe_pop([]) 21 | assert datum is None, \ 22 | 'return value should be None by default' 23 | 24 | 25 | def test_safe_pop_empty_default(): 26 | datum = safe_pop([], default=0) 27 | assert datum == 0, \ 28 | 'return value should be 0 by default' 29 | 30 | 31 | def test_safe_pop_index_default(): 32 | data = [3, 4, 5] 33 | datum = safe_pop(data) 34 | assert datum == 3, \ 35 | 'return value should be the first element by default' 36 | 37 | 38 | def test_singleton_cannot_be_instantiated_twice(): 39 | class UselessLetterClass(metaclass=Singleton): 40 | 41 | def __init__(self, letter='G'): 42 | self.letter = letter 43 | 44 | # First call: actually creates a new instance 45 | first_instance = UselessLetterClass('A') 46 | # Second call: returns the cached instance 47 | second_instance = UselessLetterClass() 48 | assert first_instance is second_instance, \ 49 | 'Should be the same instance' 50 | # Throw away the cached instance 51 | del Singleton._instances[UselessLetterClass] 52 | # Third call: no cached instance, so create one 53 | third_instance = UselessLetterClass() 54 | assert first_instance is not third_instance, \ 55 | 'Should be a different instance' 56 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | import sys 15 | 16 | sys.path.insert(0, os.path.abspath('..')) 17 | 18 | 19 | # -- Project information ----------------------------------------------------- 20 | 21 | project = 'PYGQLClient' 22 | copyright = '2020, Valiot' 23 | author = 'Valiot' 24 | 25 | 26 | # -- General configuration --------------------------------------------------- 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = ['sphinx.ext.napoleon'] 32 | 33 | # Add any paths that contain templates here, relative to this directory. 34 | templates_path = ['_templates'] 35 | 36 | # List of patterns, relative to source directory, that match files and 37 | # directories to ignore when looking for source files. 38 | # This pattern also affects html_static_path and html_extra_path. 39 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 40 | 41 | 42 | # -- Options for HTML output ------------------------------------------------- 43 | 44 | # The theme to use for HTML and HTML Help pages. See the documentation for 45 | # a list of builtin themes. 46 | # 47 | html_theme = 'sphinx_rtd_theme' 48 | 49 | # Add any paths that contain custom static files (such as style sheets) here, 50 | # relative to this directory. They are copied after the builtin static files, 51 | # so a file named "default.css" will overwrite the builtin "default.css". 52 | # html_static_path = ['_static'] 53 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from socket import timeout 2 | import pytest 3 | from pygqlc import GraphQLClient # main package 4 | 5 | 6 | class EnvironmentVariablesException(Exception): 7 | """Check your environment variables""" 8 | 9 | 10 | class AuthorsModelException(Exception): 11 | """There is a problem with the author model.""" 12 | 13 | 14 | class RecordException(Exception): 15 | """Record doesn't exists in author model""" 16 | 17 | 18 | def query_authors(name=""): 19 | name_filter = '{name:"' + name + '"}' 20 | if not name: 21 | return "query {authors{id}}" 22 | authors_query = "query {authors(filter:?){id}}".replace('?', name_filter) 23 | return authors_query 24 | 25 | 26 | @pytest.fixture(scope="session") 27 | def gql(): 28 | import os 29 | import sys 30 | 31 | try: 32 | if not os.environ.get('API'): 33 | raise EnvironmentError 34 | if not os.environ.get('WSS'): 35 | raise EnvironmentError 36 | if not os.environ.get('TOKEN'): 37 | raise EnvironmentError 38 | except EnvironmentError: 39 | sys.exit("Check your environment variables") 40 | 41 | post_timeout_str = (os.environ.get('POST_TIMEOUT') or '10') 42 | 43 | gql = GraphQLClient() 44 | gql.addEnvironment( 45 | 'dev', 46 | url=os.environ.get('API'), 47 | wss=os.environ.get('WSS'), 48 | headers={'Authorization': os.environ.get('TOKEN')}, 49 | post_timeout=int(post_timeout_str), 50 | default=True) 51 | 52 | try: 53 | _, errors = gql.query(query_authors(), flatten=True) 54 | if not errors == []: 55 | raise AuthorsModelException 56 | except AuthorsModelException: 57 | sys.exit(errors) 58 | 59 | try: 60 | 61 | _, errors_pau = gql.query(query_authors("Paulinna")) 62 | _, errors_baruc = gql.query(query_authors("Baruc")) 63 | 64 | if any([errors_baruc, errors_pau]): 65 | raise RecordException 66 | except RecordException: 67 | sys.exit(errors_pau, errors_baruc) 68 | 69 | yield gql 70 | # ! Teardown for GQL fixture 71 | gql.close() 72 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_parsers/queries.py: -------------------------------------------------------------------------------- 1 | 2 | # * Valid short query 3 | q_short = '{authors{id}}' 4 | q_short_attrs = '{authors{id name lastName}}' 5 | q_short_spaces = '{ authors { id } }' 6 | q_short_attrs_spaces = '{ authors { id name lastName } }' 7 | q_short_attrs_comma = '{ authors { id, name, lastName } }' 8 | q_short_params = '{ authors( filter:{name:"Baruc"} ){ id } }' 9 | q_long_vars = 'query ($name: String!){ authors(filter:{name:$name}){ id } }' 10 | q_long_alias_vars = 'query BarucAuthors($name: String!){ authors(filter:{name:$name}){ id } }' 11 | q_short_newlines = ''' 12 | { 13 | authors { 14 | id 15 | } 16 | } 17 | ''' 18 | q_short_attrs_newlines = ''' 19 | { 20 | authors { 21 | id 22 | name 23 | lastName 24 | } 25 | } 26 | ''' 27 | q_short_params_newlines = ''' 28 | { 29 | authors ( 30 | filter: {name: "Baruc"} 31 | ) { 32 | id 33 | name 34 | } 35 | } 36 | ''' 37 | q_long_opt_var_newlines = ''' 38 | query ( 39 | $name: String 40 | ){ 41 | authors ( 42 | filter: {name: "Baruc"} 43 | ) { 44 | id 45 | name 46 | } 47 | } 48 | ''' 49 | q_long_req_var_newlines = ''' 50 | query ( 51 | $name: String! 52 | ){ 53 | authors ( 54 | filter: { name: $name } 55 | ) { 56 | id 57 | name 58 | lastName 59 | } 60 | } 61 | ''' 62 | q_long_vars_newlines = ''' 63 | query ( 64 | $name: String! 65 | $limit: Integer 66 | ){ 67 | authors ( 68 | filter: { name: $name } 69 | limit: $limit 70 | ) { 71 | id 72 | name 73 | lastName 74 | } 75 | } 76 | ''' 77 | 78 | q_looong_query = ''' 79 | query LotsOfThings( 80 | $limit: Int! 81 | ){ 82 | Authors: authors( 83 | limit: $limit 84 | filter:{active: true} 85 | ){ 86 | id 87 | name 88 | lastName 89 | dateOfBirth 90 | avatar{ 91 | imageUrl 92 | } 93 | } 94 | bp: blogPosts{ 95 | id 96 | t: title 97 | name 98 | body 99 | blagger: author{ 100 | name 101 | } 102 | } 103 | users{ 104 | id 105 | name 106 | email 107 | } 108 | } 109 | ''' 110 | # ! Wrong short query (no terminating "}") 111 | q_short_bad_term = '{authors{id}' 112 | # ! Wrong short query (no start/end brackets) 113 | q_short_bad_brackets = 'authors{id}' 114 | # ! wrong short query (no content) 115 | q_short_bad_no_content = '{authors{}}' 116 | # ! wrong short query (no query name) 117 | q_short_bad_no_name = '{ { id } }' 118 | # ! wrong short query (no params inside parenthesis) 119 | q_short_bad_no_params = '{authors(){id}}' 120 | # ! wrong long query (no vars inside parenthesis) 121 | q_long_bad_no_vars = 'query (){authors(filter:{name:$name}{id}}' 122 | -------------------------------------------------------------------------------- /tests/pygqlc/test_environments.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | def test_enter_environ(gql): 5 | original_env = gql.environment 6 | with gql.enterEnvironment('test'): 7 | context_env = gql.environment 8 | assert context_env == 'test', \ 9 | 'environment should change inside WITH statement' 10 | assert gql.environment == original_env, \ 11 | 'environment should go back to original when outside of WITH statement' 12 | 13 | 14 | def test_change_url_environment(gql): 15 | import os 16 | test_url = 'https://some.url.io' 17 | env = gql.environment 18 | url = gql.environments[env]['url'] 19 | gql.setUrl(url=test_url) 20 | new_url = gql.environments[env]['url'] 21 | gql.setUrl(url=url) # return URL to default 22 | assert new_url == test_url, \ 23 | 'URL should set on current environment as default' 24 | 25 | 26 | def test_change_wss_environment(gql): 27 | import os 28 | test_url = 'wss://some.websocket.io' 29 | env = gql.environment 30 | url = gql.environments[env]['wss'] 31 | gql.setWss(url=test_url) 32 | new_url = gql.environments[env]['wss'] 33 | gql.setWss(url=url) # return URL to default 34 | assert new_url == test_url, \ 35 | 'WSS URL should set on current environment as default' 36 | 37 | 38 | def test_add_header_environment(gql): 39 | import os 40 | env = gql.environment 41 | original_headers = gql.environments[env]['headers'] 42 | test_header = {'test_header': 'Bearer lasknsmthinsmthinflaks'} 43 | gql.addHeader(header=test_header) 44 | # ! verify header is included in new headers 45 | headers = gql.environments[env]['headers'] 46 | assert headers.get('test_header') is not None, \ 47 | '"test_header" should have been added to the GQL environment' 48 | # * teardown test (we don't want dummy headers in the test requests) 49 | gql.environments[env]['headers'] = original_headers 50 | 51 | 52 | def test_change_post_timeout(gql): 53 | import os 54 | test_post_timeout = 103 55 | env = gql.environment 56 | post_timeout = gql.environments[env]['post_timeout'] 57 | gql.setPostTimeout(post_timeout=test_post_timeout) 58 | new_post_timeout = gql.environments[env]['post_timeout'] 59 | gql.setPostTimeout(post_timeout=post_timeout) # return Timeout to default 60 | assert (new_post_timeout == test_post_timeout) and (post_timeout != new_post_timeout), \ 61 | 'Post timeout should set on current environment as default' 62 | 63 | 64 | def test_set_bad_environment(gql): 65 | with pytest.raises(Exception): 66 | assert gql.setEnvironment('bad_environ'), \ 67 | 'Environment should not be set with an unregistered environment name' 68 | 69 | 70 | def test_bad_environ_bad_query(gql): 71 | good_env = gql.environment 72 | gql.environment = 'bad_environ' # ! force bad environment 73 | with pytest.raises(Exception): 74 | assert gql.execute('bad_environ'), \ 75 | 'Environment should not be set with an unregistered environment name' 76 | gql.setEnvironment(good_env) 77 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: ["main"] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: ["main"] 20 | schedule: 21 | - cron: "18 15 * * 3" 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: ["python"] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 37 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 38 | 39 | steps: 40 | - name: Checkout repository 41 | uses: actions/checkout@v3 42 | 43 | # Initializes the CodeQL tools for scanning. 44 | - name: Initialize CodeQL 45 | uses: github/codeql-action/init@v2 46 | with: 47 | languages: ${{ matrix.language }} 48 | # If you wish to specify custom queries, you can do so here or in a config file. 49 | # By default, queries listed here will override any specified in a config file. 50 | # Prefix the list here with "+" to use these queries and those in the config file. 51 | 52 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 53 | queries: security-extended,security-and-quality 54 | 55 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). 56 | # If this step fails, then you should remove it and run the build manually (see below) 57 | - name: Autobuild 58 | uses: github/codeql-action/autobuild@v2 59 | 60 | # ℹ️ Command-line programs to run using the OS shell. 61 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 62 | 63 | # If the Autobuild fails above, remove it and uncomment the following three lines. 64 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 65 | 66 | # - run: | 67 | # echo "Run, Build Application using script" 68 | # ./location_of_script_within_repo/buildscript.sh 69 | 70 | - name: Perform CodeQL Analysis 71 | uses: github/codeql-action/analyze@v2 72 | with: 73 | category: "/language:${{matrix.language}}" 74 | -------------------------------------------------------------------------------- /pygqlc/MutationParser.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | """ 4 | The purpose of this module is to prepare a graphql transaction, such as a query 5 | or mutation, to be able to carry out a batch of them. 6 | """ 7 | 8 | mutation_regex = r'^\s*mutation\s*(\s+[a-zA-Z_]+[a-zA-Z_0-9]?)?\s*(\(\s*(((\$[a-zA-Z_]+[a-zA-Z_0-9]?)\s*:\s*([a-zA-Z_]+[a-zA-Z_0-9]?!?)\s*)+\s*)\))?\s*{\s*((.\s*)+\s*})\s*}' 9 | rgx_groups = { 10 | 'full_doc': 0, 11 | 'alias': 1, 12 | 'variables': 3, 13 | 'content': 7, 14 | } 15 | class MutationParser: 16 | """This is the MutationParser class, it can parse a graphql instructions. 17 | """ 18 | def __init__(self, gql_doc): 19 | """Constructor of the MutationParser object. 20 | 21 | Args: 22 | gql_doc (string): Graphql instructions 23 | 24 | Attributes: 25 | re (module): Regexp module. 26 | regex (regex): Transaction Regex. 27 | match (Match Object): Match object of the pattern and doc. Defaults to 28 | None. 29 | isValid (boolean): Checks if the gql_doc is valid. Defaults to False. 30 | full_doc (string): All Graphql instructions. 31 | alias (string): Name of the transaction. 32 | variables (string): Variables of the transaction. 33 | content (content): The transaction content. 34 | """ 35 | self.re = re 36 | self.regex = mutation_regex 37 | self.gql_doc = gql_doc 38 | self.match = None 39 | self.isValid = False 40 | self.full_doc = None 41 | self.alias = None 42 | self.variables = None 43 | self.content = None 44 | 45 | def parse(self): 46 | """This fuction parses and validates the transaction instructions.3 47 | 48 | Returns: 49 | (boolean): Returns if the parsed doc was succesful. 50 | """ 51 | # ! First, remove variable definitions: 52 | doc = self.gql_doc 53 | var_end = doc.find('{') 54 | short_doc = doc[var_end:] 55 | self.gql_doc = f'mutation {short_doc}' 56 | if self.validate(): 57 | self.full_doc = self.match.group(rgx_groups['full_doc']) 58 | self.alias = self.match.group(rgx_groups['alias']) 59 | self.variables = self.match.group(rgx_groups['variables']) 60 | self.content = self.match.group(rgx_groups['content']) 61 | return True 62 | return False 63 | 64 | def validate(self): 65 | """This function checks if the transaction instructions has match with the 66 | transaction regex. 67 | 68 | Returns: 69 | (boolean): Returns if it is valid or not. 70 | """ 71 | match = self.re.match(self.regex, self.gql_doc) 72 | self.isValid = match is not None 73 | if self.isValid: 74 | self.match = match 75 | return self.isValid 76 | 77 | def format_value(self, value): 78 | """This function formats document's values. 79 | 80 | Args: 81 | value (any): Value that want to format. 82 | 83 | Returns: 84 | (string): Returns a formated string. 85 | """ 86 | if type(value) == str: 87 | return f'"{value}"' 88 | elif type(value) == bool: 89 | return f'{"true" if value else "false"}' 90 | elif type(value) == type(None): 91 | return 'null' 92 | else: 93 | return str(value) 94 | -------------------------------------------------------------------------------- /pygqlc/logging.py: -------------------------------------------------------------------------------- 1 | """ 2 | Logging wrapper module for pygqlc 3 | 4 | This module provides a unified logging interface that works with or without 5 | the valiotlogging package. If valiotlogging is available, it will be used. 6 | Otherwise, a simple fallback implementation using print statements is provided. 7 | """ 8 | from enum import Enum 9 | from typing import Dict, Optional, Any, Callable 10 | import logging 11 | import sys 12 | import traceback 13 | 14 | # Try to import valiotlogging, but don't fail if it's not available 15 | try: 16 | from valiotlogging import log as valiot_log, LogLevel as ValiotLogLevel 17 | HAS_VALIOT_LOGGING = True 18 | except ImportError: 19 | HAS_VALIOT_LOGGING = False 20 | 21 | # Configure basic Python logging to avoid excessive logs 22 | logging.getLogger('httpx').setLevel(logging.WARNING) 23 | 24 | 25 | class LogLevel(Enum): 26 | """Log levels enum that mirrors valiotlogging.LogLevel""" 27 | DEBUG = 'DEBUG' 28 | ERROR = 'ERROR' 29 | INFO = 'INFO' 30 | WARNING = 'WARNING' 31 | SUCCESS = 'SUCCESS' 32 | 33 | 34 | # Map our LogLevel to valiotlogging.LogLevel if available 35 | if HAS_VALIOT_LOGGING: 36 | _LOG_LEVEL_MAP = { 37 | LogLevel.DEBUG: ValiotLogLevel.DEBUG, 38 | LogLevel.ERROR: ValiotLogLevel.ERROR, 39 | LogLevel.INFO: ValiotLogLevel.INFO, 40 | LogLevel.WARNING: ValiotLogLevel.WARNING, 41 | LogLevel.SUCCESS: ValiotLogLevel.SUCCESS, 42 | } 43 | 44 | 45 | def _fallback_log(level: LogLevel, message: str, extra: Optional[Dict[str, Any]] = None) -> None: 46 | """Simple fallback logging implementation using print statements.""" 47 | level_str = level.value 48 | if extra: 49 | print(f"[{level_str}] {message} {extra}") 50 | else: 51 | print(f"[{level_str}] {message}") 52 | 53 | # Ensure we log the traceback in case of an error, not just the message 54 | if level == LogLevel.ERROR and sys.exc_info()[0]: 55 | print(f"[{level_str}] {traceback.format_exc()}") 56 | 57 | 58 | # Define valiotlogging wrapper if available 59 | if HAS_VALIOT_LOGGING: 60 | def _valiot_wrapper( 61 | level: LogLevel, 62 | message: str, 63 | extra: Optional[Dict[str, Any]] = None, 64 | ) -> None: 65 | """Log using valiotlogging if available.""" 66 | valiot_level = _LOG_LEVEL_MAP.get(level, ValiotLogLevel.INFO) 67 | valiot_log(valiot_level, message, extra) 68 | # valiotlogging already handles traceback printing for ERROR level 69 | 70 | _log_impl = _valiot_wrapper 71 | else: 72 | _log_impl = _fallback_log 73 | 74 | 75 | # A reference to the current log function, can be changed by set_logger 76 | _current_log_fn: Callable[[LogLevel, str, 77 | Optional[Dict[str, Any]]], None] = _log_impl 78 | 79 | 80 | def log(level: LogLevel, message: str, extra: Optional[Dict[str, Any]] = None) -> None: 81 | """Log a message with the current logger.""" 82 | _current_log_fn(level, message, extra) 83 | 84 | 85 | def set_logger(log_fn: Callable[[LogLevel, str, Optional[Dict[str, Any]]], None]) -> None: 86 | """Set a custom logger function to be used by pygqlc. 87 | 88 | Args: 89 | log_fn: A function that takes level, message, and extra parameters. 90 | """ 91 | global _current_log_fn # pylint: disable=W0603 92 | _current_log_fn = log_fn 93 | 94 | 95 | def get_logger() -> Callable[[LogLevel, str, Optional[Dict[str, Any]]], None]: 96 | """Get the current logger function. 97 | 98 | Returns: 99 | The current logger function. 100 | """ 101 | return _current_log_fn 102 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_client/test_subscribe.py: -------------------------------------------------------------------------------- 1 | from . import subscriptions as subs 2 | from . import mutations as muts 3 | import types 4 | import time 5 | 6 | 7 | def on_author_updated(msg): 8 | if (msg['successful']): 9 | author = msg['result'] 10 | print(f'author {author["name"]} was updated successfully') 11 | else: 12 | print(f'error creating author: {msg["messages"]}') 13 | 14 | 15 | def on_author_created(msg): 16 | if (msg['successful']): 17 | author = msg['result'] 18 | print(f'author {author["name"]} was created successfully') 19 | else: 20 | print(f'error creating author: {msg["messages"]}') 21 | 22 | 23 | def test_subscribe_success(gql): 24 | sub_id = str(gql.sub_counter + 1) 25 | unsub_1 = gql.subscribe(subs.sub_author_created, 26 | callback=on_author_created) 27 | assert type(unsub_1) == types.FunctionType, \ 28 | 'subscribe should return an unsubscribe function' 29 | assert len(gql.subs.items()) > 0, \ 30 | 'There should be at least ONE subscription active' 31 | assert gql.subs.get(sub_id) is not None, \ 32 | 'The subscription did not start with the correct ID' 33 | 34 | 35 | def test_sub_routing_loop_message(gql): 36 | sub_id = str(gql.sub_counter + 1) 37 | _ = gql.subscribe(subs.sub_author_updated, callback=on_author_updated) 38 | runs = gql.subs[sub_id]['runs'] 39 | # * This should activate the subscription at least once: 40 | _ = gql.mutate(muts.update_any_author_active, { 41 | 'name': 'Elon', 'lastname': 'Musk', 'active': True}) 42 | _ = gql.mutate(muts.update_any_author_active, { 43 | 'name': 'Elon', 'lastname': 'Musk', 'active': False}) 44 | # ! We don't know how long will it take for the server to respond to the subscription: 45 | sub_triggered = False 46 | elapsed = 0 47 | timeout = 5.0 # * Max seconds to wait 48 | startTime = time.time() 49 | while not sub_triggered and elapsed < timeout: 50 | new_runs = gql.subs[sub_id]['runs'] 51 | sub_triggered = new_runs > runs 52 | time.sleep(0.01) # * Give time to de server to react to the request 53 | elapsed = time.time() - startTime 54 | assert sub_triggered, \ 55 | 'Subscription should be triggered at least once' 56 | 57 | 58 | def test_sub_default_callback(gql): 59 | sub_id = str(gql.sub_counter + 1) 60 | # * This adds coverage into the default callback 61 | _ = gql.subscribe(subs.sub_author_updated) 62 | runs = gql.subs[sub_id]['runs'] 63 | # * This should activate the subscription at least once: 64 | _ = gql.mutate(muts.update_any_author_active, { 65 | 'name': 'Elon', 'lastname': 'Musk', 'active': True}) 66 | _ = gql.mutate(muts.update_any_author_active, { 67 | 'name': 'Elon', 'lastname': 'Musk', 'active': False}) 68 | # ! We don't know how long will it take for the server to respond to the subscription: 69 | sub_triggered = False 70 | elapsed = 0 71 | timeout = 5.0 # * Max seconds to wait 72 | startTime = time.time() 73 | while not sub_triggered and elapsed < timeout: 74 | new_runs = gql.subs[sub_id]['runs'] 75 | sub_triggered = new_runs > runs 76 | time.sleep(0.01) # * Give time to de server to react to the request 77 | elapsed = time.time() - startTime 78 | assert new_runs > runs, \ 79 | 'Subscription should be triggered at least once with default callback' 80 | # from pygqlc import GraphQLClient 81 | # from tests.pygqlc import subscriptions as subs 82 | # from tests.pygqlc import mutations as muts 83 | # gql = GraphQLClient() 84 | 85 | # unsub_1 = gql.subscribe(subs.sub_author_updated, callback=on_author_updated) 86 | # unsub_2 = gql.subscribe(subs.sub_author_created, callback=on_author_created) 87 | 88 | # ! To trigger subscription: 89 | # data, errors = gql.mutate(muts.update_any_author_active, {'name': 'Baruc', 'active': True}) 90 | # data, errors = gql.mutate(muts.update_any_author_active, {'name': 'Baruc', 'active': False}) 91 | # data, errors = gql.mutate(muts.create_author, {'name': 'Juanito', 'lastName': 'Saldi'}) 92 | 93 | # ! to exit: 94 | # >> gql.close() 95 | # >> exit() 96 | -------------------------------------------------------------------------------- /tests/pygqlc/gql_parsers/test_parser.py: -------------------------------------------------------------------------------- 1 | from . import queries as q 2 | from pygqlc import QueryParser 3 | # * Must pass tests: **************************** 4 | 5 | 6 | def test_validate_q_short(): 7 | parser = QueryParser(q.q_short) 8 | assert parser.validate(), f'"q_short" should be a valid query' 9 | 10 | 11 | def test_validate_q_short_attrs(): 12 | parser = QueryParser(q.q_short_attrs) 13 | assert parser.validate(), f'"q_short_attrs" should be a valid query' 14 | 15 | 16 | def test_validate_q_short_spaces(): 17 | parser = QueryParser(q.q_short_spaces) 18 | assert parser.validate(), f'"q_short_spaces" should be a valid query' 19 | 20 | 21 | def test_validate_q_short_attrs_spaces(): 22 | parser = QueryParser(q.q_short_attrs_spaces) 23 | assert parser.validate(), f'"q_short_attrs_spaces" should be a valid query' 24 | 25 | 26 | def test_validate_q_short_attrs_comma(): 27 | parser = QueryParser(q.q_short_attrs_comma) 28 | assert parser.validate(), f'"q_short_attrs_comma" should be a valid query' 29 | 30 | 31 | def test_validate_q_short_params(): 32 | parser = QueryParser(q.q_short_params) 33 | assert parser.validate(), f'"q_short_params" should be a valid query' 34 | 35 | 36 | def test_validate_q_long_vars(): 37 | parser = QueryParser(q.q_long_vars) 38 | assert parser.validate(), f'"q_long_vars" should be a valid query' 39 | 40 | 41 | def test_validate_q_long_alias_vars(): 42 | parser = QueryParser(q.q_long_alias_vars) 43 | assert parser.validate(), f'"q_long_alias_vars" should be a valid query' 44 | 45 | 46 | def test_validate_q_short_newlines(): 47 | parser = QueryParser(q.q_short_newlines) 48 | assert parser.validate(), f'"q_short_newlines" should be a valid query' 49 | 50 | 51 | def test_validate_q_short_attrs_newlines(): 52 | parser = QueryParser(q.q_short_attrs_newlines) 53 | assert parser.validate(), f'"q_short_attrs_newlines" should be a valid query' 54 | 55 | 56 | def test_validate_q_short_params_newlines(): 57 | parser = QueryParser(q.q_short_params_newlines) 58 | assert parser.validate(), f'"q_short_params_newlines" should be a valid query' 59 | 60 | 61 | def test_validate_q_long_opt_var_newlines(): 62 | parser = QueryParser(q.q_long_opt_var_newlines) 63 | assert parser.validate(), f'"q_long_opt_var_newlines" should be a valid query' 64 | 65 | 66 | def test_validate_q_long_req_var_newlines(): 67 | parser = QueryParser(q.q_long_req_var_newlines) 68 | assert parser.validate(), f'"q_long_req_var_newlines" should be a valid query' 69 | 70 | 71 | def test_validate_q_long_vars_newlines(): 72 | parser = QueryParser(q.q_long_vars_newlines) 73 | assert parser.validate(), f'"q_long_vars_newlines" should be a valid query' 74 | 75 | 76 | def test_validate_q_looong_query(): 77 | parser = QueryParser(q.q_looong_query) 78 | assert parser.validate(), f'"q_looong_query" should be a valid query' 79 | 80 | # ! Must NOT pass tests: ************************* 81 | 82 | 83 | def test_validate_q_short_bad_term(): 84 | parser = QueryParser(q.q_short_bad_term) 85 | assert not parser.validate(), '"q_short_bad_term" should be an INVALID query' 86 | 87 | 88 | def test_validate_q_short_bad_brackets(): 89 | parser = QueryParser(q.q_short_bad_brackets) 90 | assert not parser.validate(), '"q_short_bad_brackets" should be an INVALID query' 91 | 92 | # TODO: Better regex (this tests are not passing, but it's not the scope of the current features to validate them) 93 | # * Currently, the regex it's only used for extraction of tokens, it is not a fully functional GraphQL validator 94 | # ! def test_validate_q_short_bad_no_content(): 95 | # ! parser = QueryParser(q.q_short_bad_no_content) 96 | # ! assert not parser.validate(), '"q_short_bad_no_content" should be an INVALID query' 97 | 98 | # ! def test_validate_q_short_bad_no_name(): 99 | # ! parser = QueryParser(q.q_short_bad_no_name) 100 | # ! assert not parser.validate(), '"q_short_bad_no_name" should be an INVALID query' 101 | 102 | # ! def test_validate_q_short_bad_no_params(): 103 | # ! parser = QueryParser(q.q_short_bad_no_params) 104 | # ! assert not parser.validate(), '"q_short_bad_no_params" should be an INVALID query' 105 | 106 | 107 | def test_validate_q_long_bad_no_vars(): 108 | parser = QueryParser(q.q_long_bad_no_vars) 109 | assert not parser.validate(), '"q_long_bad_no_vars" should be an INVALID query' 110 | -------------------------------------------------------------------------------- /pygqlc/MutationBatch.py: -------------------------------------------------------------------------------- 1 | # >>> m = '''mutation{createAuthor(name: $name, enabled: $enabled){successful}}''' 2 | # >>> m.replace('$name', 'Baruc') 3 | # 'mutation{createAuthor(name: Baruc, enabled: $enabled){successful}}' 4 | # >>> m.replace('$name', 'Baruc') 5 | # 'mutation{createAuthor(name: Baruc, enabled: $enabled){successful}}' 6 | # >>> m.replace('$name', 10) 7 | # Traceback (most recent call last): 8 | # File "", line 1, in 9 | # TypeError: replace() argument 2 must be str, not int 10 | # >>> m.replace('$name', str(10)) 11 | # 'mutation{createAuthor(name: 10, enabled: $enabled){successful}}' 12 | # >>> import re 13 | # >>> text = m 14 | # >>> m_regex = 'mutation{(.+?)}' 15 | # >>> found = re.search(m_regex, text).group(1) 16 | # >>> found 17 | # 'createAuthor(name: $name, enabled: $enabled){successful' 18 | # >>> m_regex = '^mutation{(.+?)}$' 19 | # >>> found = re.search(m_regex, text).group(1) 20 | # >>> found 21 | # 'createAuthor(name: $name, enabled: $enabled){successful}' 22 | # >>> m = '''mutation{ createAuthor(name: $name, enabled: $enabled){successful} }''' 23 | # >>> found = re.search(m_regex, text).group(1) 24 | # >>> found 25 | # 'createAuthor(name: $name, enabled: $enabled){successful}' 26 | # >>> q1 = '''query{author(findBy:{name: $name}){id name}}''' 27 | # >>> q2 = '''{author(findBy:{name: $name}){id name}}''' 28 | # >>> q3 = '''query GetAuthor($name: String!){author(findBy:{name: $name}){id name}}''' 29 | 30 | from .MutationParser import MutationParser 31 | from pprint import pprint 32 | 33 | """The purpuse of this module is batch and execute a graphql transaction, such 34 | as a query or mutation. 35 | """ 36 | 37 | 38 | class InvalidMutationException(Exception): 39 | """This class is to define the InvalidMutationException 40 | """ 41 | pass 42 | 43 | 44 | class MutationBatch: 45 | """This is the mutation batch class, it can generate and execute a batch of 46 | graphql's transaction. 47 | 48 | Args: 49 | client (GraphQLClient Object, optional): Instance of the GraphQLClient. 50 | Defaults to None. 51 | label (str, optional): Label that will get each transaction of the batch. 52 | Defaults to 'mutation'. 53 | 54 | Examples: 55 | >>> Batch example: 56 | with gql.batchMutate(label='mut') as batch: 57 | for author in authors: 58 | batch.add( 59 | muts.create_author, { 60 | 'name': author['name'] 61 | } 62 | ) 63 | data, errors = batch.execute() 64 | # Process data and errors here 65 | 66 | >>> Mutation simple example: 67 | mutation { 68 | label_1: mutationName( 69 | param1: valx 70 | param2: valy 71 | ){ 72 | response1 73 | } 74 | label_2: mutationName2( 75 | param1: valz 76 | ){ 77 | response2 78 | } 79 | } 80 | 81 | >>> Mutation complex example: 82 | mutation MutationAlias( 83 | >>>$param1: Type1 84 | $param2: Type2 85 | $param3: Type3 86 | ){ 87 | label_1: mutationName( 88 | param1: $param1 89 | param2: $param2 90 | ){ 91 | response1 92 | } 93 | label_2: mutationName2( 94 | param1: $param3 95 | ){ 96 | response2 97 | } 98 | } 99 | 100 | """ 101 | 102 | def __init__(self, client=None, label='mutation'): 103 | """Constructor of the MutatuibBatch object. 104 | """ 105 | self.client = client 106 | self.start_tag = 'mutation BatchMutation {' 107 | self.batch_doc = '' 108 | self.close_tag = '}' 109 | self.label = label 110 | self.count = 1 111 | 112 | def __enter__(self): 113 | return self 114 | 115 | def __exit__(self, type, value, traceback): 116 | pass 117 | 118 | def append(self, doc, variables={}): 119 | """This function makes each transactions for the batch. 120 | 121 | Args: 122 | doc (string): GraphQL transaction intructions. 123 | variables (dict, optional): Variables of the transaction. Defaults to {}. 124 | 125 | Raises: 126 | InvalidMutationException: It raises when the doc is invalid. 127 | """ 128 | # extract document tokens 129 | mp = MutationParser(doc) 130 | valid_doc = mp.parse() 131 | if not valid_doc: 132 | raise InvalidMutationException('Invalid mutation document') 133 | # build batch mutation from extracted tokens 134 | parsed_doc = mp.content 135 | for key, value in variables.items(): 136 | parsed_doc = parsed_doc.replace(f'${key}', mp.format_value(value)) 137 | self.batch_doc += f'\t{self.label}_{self.count}: {parsed_doc}\n' 138 | self.count += 1 139 | 140 | def get_doc(self): 141 | """This function builds the transaction. 142 | 143 | Returns: 144 | (string): Returns the full transaction, ready to execute. 145 | """ 146 | full_doc = f'''{self.start_tag}\n{self.batch_doc} {self.close_tag}''' 147 | return full_doc 148 | 149 | def execute(self): 150 | """This function can execute a TransactionBatch. 151 | 152 | Returns: 153 | (GraphqlResponse): Returns the Graphql response. 154 | """ 155 | error_dict = {} 156 | data, errors = self.client.mutate(self.get_doc()) 157 | if errors: 158 | error_dict['server'] = errors 159 | if data: 160 | for label, response in data.items(): 161 | error_dict[label] = response.get('messages', []) 162 | return data, error_dict 163 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # CHANGELOG 2 | 3 | ## [3.6.0] - 2025-06-24 4 | 5 | - [Fixed] Improved message extraction for labeled mutations - mutations with multiple labeled operations now properly extract error messages from nested response structures 6 | 7 | ## [3.5.5] - 2025-06-11 8 | 9 | - [Dependencies]: Bump valiotlogging to 1.0 and limit below 2.0 10 | 11 | ## [3.5.4] - 2025-05-02 12 | 13 | - [Dependencies]: Upgrade the `valiotlogging` to accept versions higher for ValiotLogging 14 | 15 | ## [3.5.3] - 2025-03-20 16 | 17 | - [Added] Support for valiotlogging via optional dependency 18 | - [Improved] Replaced print statements with structured logging 19 | - [Added] Custom logger functionality 20 | - [Improved] Added automatic traceback printing for ERROR level logs 21 | 22 | ## [3.5.2] - 2025-03-19 23 | 24 | - [Fixed] Reduce print statements in the library 25 | 26 | ## [3.5.1] - 2025-03-10 27 | 28 | - [Fixed] Set httpx logger to WARNING level to reduce verbose HTTP request logs in applications using this library 29 | 30 | ## [3.5.0] - 2025-03-06 31 | 32 | - [Improved] Major performance optimizations (>50% faster) 33 | - Added LRU caching for data flattening operations 34 | - Optimized thread management and reduced sleep times in polling loops 35 | - Implemented connection pooling with thread-local HTTP clients 36 | - Improved async code with better connection reuse 37 | - Added proper resource cleanup with **del** method 38 | - Reduced memory allocations with reusable constants and data structures 39 | - Added exponential backoff for connection retries 40 | - Added orjson for much faster JSON serialization/deserialization 41 | 42 | ## [3.4.0] - 2025-03-05 43 | 44 | - [Added] Async versions of the main API methods: `async_execute`, `async_query`, `async_query_one`, and `async_mutate` 45 | - [Added] Tests for the async methods 46 | - [Added] `ipv4_only` option to force IPv4 connections for environments with problematic IPv6 configurations 47 | - [Added] `GQLResponseException` is now directly importable from the package root 48 | - [Changed] Replaced the requests library with httpx (with HTTP/2 support) for better performance 49 | 50 | ## [3.3.0] - 2025-01-25 51 | 52 | - [Added] GitHub Workflow to check for outdated packages on push 53 | - [Changed] Update dependencies 54 | 55 | ## [3.2.0] - 2024-04-04 56 | 57 | ### Changed 58 | 59 | - Update dependencies, mainly websockets to ^1.0 60 | 61 | ## [3.1.4] - 2023-08-16 62 | 63 | ### Fixed 64 | 65 | - Bump `certifi` version to fix security vulnerability 66 | 67 | ## [3.1.3] - 2023-07-17 68 | 69 | ### Changed 70 | 71 | - Add automatically generated `__version__` constant (from `poetry version `) 72 | 73 | ## [3.1.2] - 2023-07-17 74 | 75 | ### Changed 76 | 77 | - Remove `__version__` constant from package exports 78 | 79 | ## [3.1.0] - 2023-03-27 80 | 81 | ### Added 82 | 83 | - Execute subscription callback in a safe way (try/except) 84 | 85 | ## [3.0.5] - 2023-02-22 86 | 87 | ### Fix 88 | 89 | - align dependencies to other valiot packages (valiotWorker/gstorm) 90 | 91 | ## [3.0.4] - 2023-02-17 92 | 93 | ### Fix 94 | 95 | - Fix method to close subscriptions correctly 96 | 97 | ## [3.0.3] - 2023-02-14 98 | 99 | ### Fix 100 | 101 | - Fix **version** string 102 | - Fix bundling tools not including nested modules (`helper_modules`) 103 | 104 | ## [3.0.1] - 2022-07-31 105 | 106 | ### Fix 107 | 108 | - Fixed race condition when a subscription is starting and \_sub_routing_loop is running. 109 | 110 | ## [3.0.0] - 2022-07-04 111 | 112 | ### Changed 113 | 114 | - Upgrade GraphQL Subscriptions over WebSocket using `graphql-transport-ws` sub-protocol compatible with 627 (Breaking Change) 115 | 116 | ## [2.1.0] - 2022-02-04 117 | 118 | ### Added 119 | 120 | - Add `post_timeout` configuration to avoid stale POST requests 121 | 122 | ## [2.0.4] - 2021-10-29 123 | 124 | ### Fix 125 | 126 | - Upgrade dependencies in setup.py 127 | 128 | ## [2.0.3] - 2021-09-21 129 | 130 | ### Fix 131 | 132 | - Outdated tests for latest version of valiot-app 133 | 134 | ### Chore 135 | 136 | - Bump dependencies versions. 137 | 138 | ## [2.0.2] - 2020-09-02 139 | 140 | ### Fix 141 | 142 | - Fixed helpermodule MODULENOTFOUND error. 143 | 144 | ## [2.0.1] - 2020-08-25 145 | 146 | ### Chore 147 | 148 | - Change ownership to Valiot Organization 149 | 150 | ## [2.0.0] - 2020-08-25 151 | 152 | ### Added 153 | 154 | - Add Documentation with sphinx 155 | - Add validation in the testing resources 156 | 157 | ### Changed 158 | 159 | - Change Singleton implementation from decorator to metaclass 160 | 161 | ### Fixed 162 | 163 | - Dependencies have been set to be installed from certain compatible version 164 | 165 | ### Removed 166 | 167 | - Singleton-decorator dependency 168 | 169 | ## [1.2.0] - 2020-07-08 170 | 171 | ### Added 172 | 173 | - Configuration parameter websocketTimeout to be used when the connection is halted 174 | - Method reset all subscriptions and websocket connection 175 | 176 | ### Fixed 177 | 178 | - Websocket reconnection when halted with WebSocketTimeoutException 179 | 180 | ## [1.1.2] - 2020-03-26 181 | 182 | ### Fixed 183 | 184 | - Websockets now send String ID instead of Int (for compatibility with Hasura and Apollo WS docs compliance) 185 | - Reconnection to subscriptions when connection lost 186 | 187 | ## [1.1.1] - 2019-09-10 188 | 189 | ### Fixed 190 | 191 | - Strip variable definition from mutation previous to Regex parsing to improve performance 192 | - Convert to "null" when variable is None 193 | 194 | ## [1.1.0] - 2019-09-10 195 | 196 | ### Added 197 | 198 | - Add batchMutation functionality (group several labeled mutations in a single transmission) 199 | 200 | ## [1.0.8] - 2019-09-02 201 | 202 | ### Fixed 203 | 204 | - Add long description content type to be parsed correctly by pypi 205 | 206 | ## [1.0.6] - 2019-09-02 207 | 208 | ### Fixed 209 | 210 | - Add long description to setup.py (it enables the description to display on pypi). 211 | - Went open source! follow the project at: https://github.com/valiot/pygqlc 212 | 213 | ## [1.0.5] - 2019-09-02 214 | 215 | ### Fixed 216 | 217 | - Bugfix closing procedure for GQL client (routing loop not closing properly). 218 | 219 | ## [1.0.4] - 2019-09-02 220 | 221 | ### Fixed 222 | 223 | - Subscription message routing loop finishing when no subscription was active. 224 | 225 | ## [1.0.0] - 2019-09-01 226 | 227 | ### Added 228 | 229 | - Working queries 230 | - Working mutations 231 | - Working subscriptions 232 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pygqlc 2 | 3 | Python client for graphql APIs 4 | 5 | ### Scope 6 | 7 | This is an open source project, please feel free to fork it and PR to contribute with the community! 8 | Repo for the project: https://github.com/valiot/pygqlc 9 | 10 | ### Features 11 | 12 | - Fast and efficient GraphQL client 13 | - Support for queries, mutations, and subscriptions 14 | - Async capabilities for modern Python applications 15 | - Connection pooling for better performance 16 | - IPv4/IPv6 network flexibility 17 | - Intelligent error handling and reporting 18 | - Easy-to-use API with minimal boilerplate 19 | - Optional structured logging with valiotlogging 20 | 21 | ### Installation 22 | 23 | Requirements: 24 | 25 | - Python 3.9+ 26 | - Pipenv 27 | 28 | Install directly from pypi: 29 | 30 | ``` 31 | $ cd 32 | $ pipenv --python 3.7 # or 3.6 33 | $ pipenv shell 34 | $ pipenv install pygqlc 35 | $ python 36 | $ >> import pygqlc 37 | $ >> print(pygqlc.name) 38 | ``` 39 | 40 | If you get "pygqlc" printed in the python repl, the installation succeded! 41 | 42 | #### Optional: Enhanced Logging with valiotlogging 43 | 44 | If you're using this library in a Valiot project, you can install it with the valiotlogging dependency for enhanced structured logging: 45 | 46 | ``` 47 | pip install pygqlc[valiotlogging] 48 | ``` 49 | 50 | Or in your pyproject.toml: 51 | 52 | ```toml 53 | [tool.poetry.dependencies] 54 | pygqlc = {version = "^3.5.3", extras = ["valiotlogging"]} 55 | 56 | [[tool.poetry.source]] 57 | name = "valiot" 58 | url = "https://pypi.valiot.io/" 59 | priority = "supplemental" 60 | ``` 61 | 62 | The library will automatically use valiotlogging if available, and will fall back to standard logging if not. 63 | 64 | ### Usage 65 | 66 | ```python 67 | import os 68 | from pygqlc import GraphQLClient 69 | gql = GraphQLClient() 70 | gql.addEnvironment( 71 | 'dev', 72 | url=os.environ.get('API'), # should be an https url 73 | wss=os.environ.get('WSS'), # should be an ws/wss url 74 | headers={'Authorization': f"Bearer {os.environ.get('TOKEN')}"}, 75 | ipv4_only=False, # Set to True to force IPv4 connections (useful for environments with problematic IPv6) 76 | default=True) 77 | ``` 78 | 79 | #### From now on, you can access to the main API: 80 | 81 | `gql.query, gql.mutate, gql.subscribe` 82 | 83 | For queries: 84 | 85 | ```python 86 | query = ''' 87 | query{ 88 | authors{ 89 | name 90 | } 91 | } 92 | ''' 93 | data, errors = gql.query( query ) 94 | ``` 95 | 96 | For mutations: 97 | 98 | ```python 99 | create_author = ''' 100 | mutation { 101 | createAuthor(){ 102 | successful 103 | messages{field message} 104 | result{id insertedAt} 105 | } 106 | } 107 | ''' 108 | data, errors = gql.mutate( create_author ) 109 | ``` 110 | 111 | For subscriptions: 112 | 113 | ```python 114 | def on_auth_created(message): 115 | print(message) 116 | 117 | sub_author_created = ''' 118 | subscription{ 119 | authorCreated{ 120 | successful 121 | messages{field message} 122 | result{id insertedAt} 123 | } 124 | } 125 | ''' 126 | # unsub may be None if subscription fails (no internet connection, host unreachable, bad subscription doc, etc) 127 | unsub = gql.subscribe(sub_author_created, callback=on_auth_created) 128 | ... 129 | # when finishing the subscription: 130 | unsub() 131 | # when finishing all subscriptions: 132 | gql.close() 133 | ``` 134 | 135 | #### Exception Handling 136 | 137 | You can directly import the `GQLResponseException` for better error handling: 138 | 139 | ```python 140 | from pygqlc import GraphQLClient, GQLResponseException 141 | 142 | gql = GraphQLClient() 143 | # ... configure client ... 144 | 145 | try: 146 | data, errors = gql.query('{ invalidQuery }') 147 | # Process data if no errors 148 | except GQLResponseException as e: 149 | print(f"GraphQL error: {e.message}, Status: {e.status_code}") 150 | # Handle the exception appropriately 151 | ``` 152 | 153 | The subscribe method, returns an `unsubscribe` function, 154 | this allows to stop subscriptions whenever needed. 155 | 156 | After finishing all subscriptions, the method 157 | `GraphQLClient.close()` should be called to close correctly the open GQL/websocket connections. 158 | 159 | To reset all subscriptions and websocket connection use the method `GraphQLClient.resetSubsConnection()`. 160 | 161 | To be noted: 162 | 163 | All main methods from the API accept a `variables` param. 164 | it is a dictionary type and may include variables from your queries or mutations: 165 | 166 | ```python 167 | query_with_vars = ''' 168 | query CommentsFromAuthor( 169 | $authorName: String! 170 | $limit: Int 171 | ){ 172 | author( 173 | findBy:{ name: $authorName } 174 | ){ 175 | id 176 | name 177 | comments( 178 | orderBy:{desc: ID} 179 | limit: $limit 180 | ){ 181 | id 182 | blogPost{name} 183 | body 184 | } 185 | } 186 | } 187 | ''' 188 | 189 | data, errors = gql.query( 190 | query=query_with_vars, 191 | variables={ 192 | "authorName": "Baruc", 193 | "limit": 10 194 | } 195 | ) 196 | ``` 197 | 198 | There is also an optional parameter `flatten` that simplifies the response format: 199 | 200 | ```python 201 | # From this: 202 | response = { 203 | 'data': { 204 | 'authors': [ 205 | { 'name': 'Baruc' }, 206 | { 'name': 'Juan' }, 207 | { 'name': 'Gerardo' } 208 | ] 209 | } 210 | } 211 | # To this: 212 | authors = [ 213 | { 'name': 'Baruc' }, 214 | { 'name': 'Juan' }, 215 | { 'name': 'Gerardo' } 216 | ] 217 | ``` 218 | 219 | Simplifying the data access from this: 220 | 221 | `response['data']['authors'][0]['name']` 222 | 223 | to this: 224 | 225 | `authors[0]['name']` 226 | 227 | It is `query(query, variables, flatten=True)` by default, to avoid writing it down everytime 228 | 229 | The `(_, errors)` part of the response, is the combination of GraphQL errors, and communication errors, simplifying validations, it has this form: 230 | 231 | ```python 232 | errors = [ 233 | {"field": , "message":}, 234 | {"field": , "message":}, 235 | {"field": , "message":}, 236 | ... 237 | ] 238 | ``` 239 | 240 | The field Attribute it's only available for GraphQL errors, when it is included in the response, so it's suggested that every mutation has at least this parameters in the response: 241 | 242 | ``` 243 | mutation{ 244 | myMutation(){ 245 | successful 246 | messages{ 247 | field 248 | message 249 | } 250 | result{ 251 | 252 | } 253 | } 254 | } 255 | ``` 256 | 257 | ### Post timeout: 258 | 259 | You can set a post timeout to avoid an inactive process. 260 | 261 | Use `gql.setPostTimeout(seconds)`, or directly in the environment `gql.addEnvironment(post_timeout=seconds)`. Default port_timeout is 60 seconds 262 | 263 | ### Websocket timeout: 264 | 265 | You can set a websocket timeout to keep subscriptions alive. 266 | 267 | Use `gql.setTimeoutWebsocket(seconds)`, or directly in the environment `gql.addEnvironment(timeoutWebsocket=seconds)`. Default timeoutWebsocket is 60 seconds 268 | 269 | ### IPv4 Only Connections 270 | 271 | In some network environments, particularly on Linux systems, IPv6 connectivity issues can cause slow requests. To force the client to use IPv4 connections only, you can set the `ipv4_only` parameter when adding an environment: 272 | 273 | ```python 274 | gql.addEnvironment( 275 | 'dev', 276 | url="https://api.example.com/graphql", 277 | ipv4_only=True # Force IPv4 connections 278 | ) 279 | ``` 280 | 281 | This can resolve connectivity issues in networks with suboptimal IPv6 configurations. 282 | 283 | ### Custom Logging 284 | 285 | You can customize the logging behavior of pygqlc by using the `set_logger` function: 286 | 287 | ```python 288 | from pygqlc.logging import set_logger, LogLevel 289 | 290 | # Example: Create a custom logger that writes to a file 291 | def my_custom_logger(level, message, extra=None): 292 | with open('pygqlc.log', 'a') as f: 293 | f.write(f"[{level.value}] {message}\n") 294 | 295 | # Set the custom logger 296 | set_logger(my_custom_logger) 297 | ``` 298 | 299 | The default logger will use valiotlogging if available, otherwise it will use a simple print-based logger. 300 | 301 | ### for mantainers: 302 | 303 | #### Initial configuration 304 | 305 | first of all, ensure you have configured poetry repositories correctly: 306 | `poetry config repositories.valiot https://pypi.valiot.io/` 307 | 308 | and their credentials: 309 | 310 | For private valiot's pypi: 311 | 312 | `poetry config http-basic.valiot ` 313 | 314 | (_ask adrian to send you the proper username and password for this step_) 315 | 316 | And for public pypi: 317 | 318 | `poetry config pypi-token.pypi ` 319 | 320 | (_ask adrian or baruc to generate a token for you_) 321 | 322 | then, 323 | 324 | #### regular publish steps (after initial configuration) 325 | 326 | deploy using: 327 | 328 | `poetry version ` 329 | 330 | then publish to valiot's private pypi: 331 | 332 | `poetry publish --build -r valiot # build and PUBLISH TO PRIVATE VALIOTs PYPI` 333 | 334 | or: 335 | 336 | `poetry publish -r valiot` 337 | 338 | (if you already built the package) 339 | 340 | then publish to public pypi: 341 | 342 | `poetry publish` 343 | 344 | After release, publish to github: 345 | 346 | `cat pygqlc/__version__.py` 347 | 348 | `gh release create` 349 | 350 | `gh release upload v<#.#.#> ./dist/pygqlc-<#.#.#>-py3-none-any.whl` 351 | 352 | and don't forget to keep the `CHANGELOG.md` updated! 353 | 354 | ## Async Usage 355 | 356 | Python 3.10+ supports async/await syntax for asynchronous programming. The GraphQLClient class provides async versions of the main methods: 357 | 358 | ```python 359 | import asyncio 360 | from pygqlc import GraphQLClient 361 | 362 | async def main(): 363 | client = GraphQLClient() 364 | client.addEnvironment('dev', "https://api.example.com/graphql") 365 | 366 | # Async query 367 | data, errors = await client.async_query(''' 368 | query { 369 | users { 370 | id 371 | name 372 | } 373 | } 374 | ''') 375 | 376 | if not errors: 377 | print("Users:", data) 378 | 379 | # Async mutation 380 | data, errors = await client.async_mutate(''' 381 | mutation { 382 | createUser(input: {name: "John Doe"}) { 383 | user { 384 | id 385 | name 386 | } 387 | } 388 | } 389 | ''') 390 | 391 | if not errors: 392 | print("Created user:", data) 393 | 394 | if __name__ == "__main__": 395 | asyncio.run(main()) 396 | ``` 397 | 398 | The async methods are: 399 | 400 | - `async_execute`: Low-level method to execute GraphQL operations 401 | - `async_query`: For GraphQL queries 402 | - `async_query_one`: For queries that return a single item 403 | - `async_mutate`: For GraphQL mutations 404 | 405 | These methods can be used with `await` in async functions and provide the same functionality as their synchronous counterparts, but with the benefits of asynchronous execution. 406 | -------------------------------------------------------------------------------- /pygqlc/GraphQLClient.py: -------------------------------------------------------------------------------- 1 | '''GraphQL client implementation 2 | 3 | This module has the general purpose of defining the GraphQLClient class 4 | and all its methods. 5 | 6 | GQLResponse (type variable): [data[field(string)], errors[message(string), 7 | field?(string)] 8 | ''' 9 | import traceback 10 | import time 11 | import threading 12 | from functools import lru_cache 13 | import websocket 14 | import httpx 15 | import pydash as py_ 16 | import orjson 17 | import logging 18 | from pygqlc.helper_modules.Singleton import Singleton 19 | from pygqlc.logging import log, LogLevel 20 | from tenacity import ( 21 | retry, 22 | retry_if_result, 23 | stop_after_attempt, 24 | wait_random 25 | ) 26 | from .MutationBatch import MutationBatch 27 | 28 | # Set httpx logger to WARNING level to reduce HTTP request logs 29 | logging.getLogger('httpx').setLevel(logging.WARNING) 30 | 31 | GQL_WS_SUBPROTOCOL = "graphql-transport-ws" 32 | 33 | # * Custom Exception class for GraphQL responses 34 | 35 | 36 | class GQLResponseException(Exception): 37 | """Custom GraphQL exception for query/mutation execution errors. 38 | 39 | Attributes: 40 | status_code (int): HTTP status code of the response 41 | query (str): GraphQL query or mutation that caused the error 42 | variables (dict): Variables used in the query/mutation 43 | """ 44 | 45 | def __init__( 46 | self, 47 | message: str, 48 | status_code: int, 49 | query: str, 50 | variables: dict | None = None, 51 | ) -> None: 52 | # Initialize the normal exception with the message 53 | super().__init__(message) 54 | self.message = message 55 | self.status_code = status_code 56 | self.query = query 57 | self.variables = variables 58 | 59 | 60 | def is_ws_payloadErrors_msg(message): 61 | return bool(py_.get(message, 'payload.errors')) 62 | 63 | 64 | def is_ws_connection_init_msg(message): 65 | data = py_.get(message, 'payload.data', {}) 66 | if not data: 67 | return False # may have an error, but is not connection init message 68 | keys = list(data.keys()) 69 | if keys and data[keys[0]] is None: 70 | # this message is a connection init one, 71 | # with the shape: {data: {datumCreatedOrSomething: None}} 72 | return True 73 | return False 74 | 75 | 76 | def has_errors(result): 77 | """This function checks if a GqlResponse has any errors. 78 | 79 | Args: 80 | result (GqlResponse): [data, errors] 81 | 82 | Returns: 83 | (boolean): Returns `True` if a transaction has at least one error. 84 | """ 85 | _, errors = result 86 | return bool(errors) 87 | 88 | 89 | @lru_cache(maxsize=128) 90 | def _data_flatten_cacheable(data_str, single_child): 91 | """Internal cacheable version of data_flatten using string representation of data. 92 | 93 | Args: 94 | data_str (str): String representation of data object 95 | single_child (bool): Whether to flatten if data has only one child 96 | 97 | Returns: 98 | The flattened data structure 99 | """ 100 | data = orjson.loads(data_str) 101 | return _data_flatten_impl(data, single_child) 102 | 103 | 104 | def _data_flatten_impl(data, single_child=False): 105 | """Implementation of data_flatten that works on data structures directly. 106 | 107 | Args: 108 | data (dict, list): The data of a GqlResponse. 109 | single_child (bool): Checks if the data has only one element. 110 | 111 | Returns: 112 | (dict): Returns a formatted data. 113 | """ 114 | if isinstance(data, dict): 115 | keys = list(data.keys()) 116 | if len(keys) == 1: 117 | return _data_flatten_impl(data[keys[0]], single_child) 118 | else: 119 | return data # ! various elements, nothing to flatten 120 | elif single_child and isinstance(data, list): 121 | if len(data) == 1: 122 | return _data_flatten_impl(data[0], single_child) 123 | elif len(data) == 0: 124 | return None # * Return none if no child was found 125 | else: 126 | return data 127 | else: 128 | return data # ! not a dict, nothing to flatten 129 | 130 | 131 | def data_flatten(data, single_child=False): 132 | """This function formats the data structure of a GqlResponse. 133 | 134 | Args: 135 | data (dict, list): The data of a GqlResponse. 136 | single_child (bool, optional): Checks if the data has only one element. 137 | Defaults to False. 138 | 139 | Returns: 140 | (dict): Returns a formatted data. 141 | """ 142 | if data is None: 143 | return None 144 | 145 | # For simple types or non-serializable objects, use direct implementation 146 | if isinstance(data, (str, int, float, bool)) or not isinstance(data, (dict, list)): 147 | return data 148 | 149 | try: 150 | # For cacheable data structures, use the cached version with orjson 151 | # orjson doesn't have sort_keys parameter, but has OPT_SORT_KEYS option 152 | data_str = orjson.dumps( 153 | data, option=orjson.OPT_SORT_KEYS).decode('utf-8') 154 | return _data_flatten_cacheable(data_str, single_child) 155 | except (TypeError, ValueError): 156 | # Fall back to direct implementation for non-serializable data 157 | return _data_flatten_impl(data, single_child) 158 | 159 | 160 | def safe_pop(data, index=0, default=None): 161 | """This function pops safetly a GqlResponse from a subscription queue. 162 | 163 | Args: 164 | data (list): Is the list of GqlResponse that caught the subscription. 165 | index (int, optional): Index of the subscription queue. Defaults to 0. 166 | default (None, optional): Define the default message. Defaults to None. 167 | 168 | Returns: 169 | [GqlResponse]: Returns the GqlResponse. If the subscription queue is 170 | empty, it returns the default message. 171 | """ 172 | if data: 173 | return data.pop(index) 174 | else: 175 | return default 176 | 177 | 178 | # Prepare common JSON structures for reuse 179 | PING_JSON = orjson.dumps({'type': 'ping'}).decode('utf-8') 180 | CONNECTION_ACK_TYPE = 'connection_ack' 181 | PONG_TYPE = 'pong' 182 | NEXT_TYPE = 'next' 183 | ERROR_TYPE = 'error' 184 | COMPLETE_TYPE = 'complete' 185 | 186 | 187 | class GraphQLClient(metaclass=Singleton): 188 | """The GraphQLClient class follows the singleton design pattern. It can 189 | make a query, mutation or subscription from an api. 190 | 191 | Attributes: 192 | environments (dict): Dictonary with all envieroments. Defaults to 193 | empty dict. 194 | environment (dict): Dictionary with the data of the actual enviroment. 195 | Defaults to None. 196 | ws_url (string): String with the WSS url. Defaults to None. 197 | subs (dict): Dictionary with all active subscriptions in the instance. 198 | Defaults to empty dict. 199 | sub_counter (int): Count of active subscriptions in the instance. 200 | Defaults to 0. 201 | sub_router_thread (thread): Thread with all subscription logic. 202 | Defaults to None. 203 | wss_conn_halted (boolean): Checks if the wss connection is halted. 204 | Defaults to False. 205 | closing (boolean): Checks if all subscriptions were successfully closed. 206 | Defaults to False. 207 | unsubscribing (boolean): Checks if all subscriptions were successfully 208 | canceled. Defaults to False. 209 | websocket_timeout (int): seconds of the websocket timeout. Defaults to 210 | 60. 211 | 212 | Examples: 213 | >>> clause: 214 | ''' 215 | client = GraphQLClient() 216 | with client.enterEnvironment('dev') as gql: 217 | data, errors = gql.query('{lines(limit:2){id}}') 218 | # Process data and errors here 219 | ''' 220 | >>> setEnvironment: 221 | ''' 222 | client = GraphQLClient() 223 | client.addEnvironment('dev', "https://heineken.valiot.app/") 224 | client.addHeader( 225 | environment='dev', 226 | header={'Authorization': dev_token}) 227 | data, errors = gql.query('{lines(limit:2){id}}') 228 | # Process data and errors here 229 | ''' 230 | """ 231 | 232 | # Reusable headers 233 | DEFAULT_HEADERS = { 234 | 'Accept': 'application/json', 235 | 'Content-Type': 'application/json' 236 | } 237 | 238 | def __init__(self): 239 | """Constructor of the GraphQlClient object. 240 | """ 241 | # * query/mutation related attributes 242 | self.environments = {} 243 | self.environment = None 244 | # * wss/subscription related attributes: 245 | self.ws_url = None 246 | self._conn = None 247 | self.ack_timeout = 5 248 | self._subscription_running = False 249 | self.subs = {} # * subscriptions running 250 | self.sub_counter = 0 251 | self.sub_router_thread = None 252 | self.sub_pingpong_thread = None 253 | self.wss_conn_halted = False 254 | self.closing = False 255 | self.unsubscribing = False 256 | self.websocket_timeout = 60 257 | self.pingIntervalTime = 15 258 | self.pingTimer = time.time() 259 | 260 | # Setup common client parameters 261 | self.client_params = {"http2": True} 262 | self.async_client_params = {"http2": True} 263 | 264 | # Reuse HTTP client for better performance 265 | self._http_client = None 266 | self._thread_local = threading.local() 267 | self._async_client = None 268 | 269 | # Configure sleep time for polling loops 270 | self.poll_interval = 0.005 # reduced from 0.01 for faster response 271 | 272 | # * with implementation 273 | def __enter__(self): 274 | return self 275 | 276 | def __exit__(self, type, value, traceback): 277 | self.environment = self.save_env # restores environment 278 | return 279 | 280 | def enterEnvironment(self, name): 281 | """This function makes a safe access to an environment. 282 | 283 | Args: 284 | name (string): Name of the environment. 285 | 286 | Returns: 287 | (self): Returns self instance for the use with `with` keyword. 288 | """ 289 | self.save_env = self.environment 290 | self.environment = name 291 | return self # * for use with "with" keyword 292 | 293 | # * HIGH LEVEL METHODS --------------------------------- 294 | # TODO: Implement tenacity in query, mutation and subscription methods 295 | # @retry( 296 | # retry=(retry_if_result(has_errors)), 297 | # stop=stop_after_attempt(5), 298 | # wait=wait_random(min=0.25, max=0.5)) 299 | # def query_wrapper(self, query, variables=None): 300 | # data = None 301 | # errors = [] 302 | # try: 303 | # result = self.execute(query, variables) 304 | # data = result.get('data', None) 305 | # errors = result.get('errors', []) 306 | # except Exception as e: 307 | # errors = [{'message': str(e)}] 308 | # return data, errors 309 | 310 | # * Query high level implementation 311 | def query( 312 | self, 313 | query: str, 314 | variables: dict | None = None, 315 | flatten: bool = True, 316 | single_child: bool = False 317 | ) -> tuple: 318 | """This function makes a query transaction to the actual environment. 319 | 320 | Args: 321 | query (string): GraphQL query instructions. 322 | variables (string, optional): Query variables. Defaults to None. 323 | flatten (bool, optional): Check if GraphQLResponse should be flatten or 324 | not. Defaults to True. 325 | single_child (bool, optional): Check if GraphQLResponse only has one 326 | element. Defaults to False. 327 | 328 | Returns: 329 | tuple: Tuple containing (data, errors) from the GraphQL response. 330 | """ 331 | data = None 332 | errors = [] 333 | try: 334 | response = self.execute(query, variables) 335 | if flatten: 336 | data = response.get('data', None) 337 | else: 338 | data = response 339 | errors = response.get('errors', []) 340 | if flatten and data is not None: 341 | data = data_flatten(data, single_child=single_child) 342 | except Exception as e: 343 | errors = [{'message': str(e)}] 344 | return data, errors 345 | 346 | # * Query high level implementation 347 | def query_one(self, query: str, variables: dict | None = None) -> tuple: 348 | """This function makes a single child query. 349 | 350 | Args: 351 | query (string): GraphQL query instructions. 352 | variables (string, optional): Query variables. Defaults to None. 353 | 354 | 355 | Returns: 356 | tuple: Tuple containing (data, errors) from the GraphQL response. 357 | """ 358 | return self.query(query, variables, flatten=True, single_child=True) 359 | 360 | def _get_messages(self, data: dict | None) -> list[dict]: 361 | """Gets the messages in a mutation. It normally simply takes the 362 | 'messages' key, but if it is a mutation with labels it joins all 363 | """ 364 | if not data: 365 | return [] 366 | if "messages" in data: 367 | return data["messages"] or [] 368 | messages = [] 369 | for datum in data.values(): 370 | if isinstance(datum, dict): 371 | messages.extend(datum.get("messages") or []) 372 | return messages 373 | 374 | # * Mutation high level implementation 375 | def mutate(self, mutation: str, variables: dict | None = None, flatten: bool = True) -> tuple: 376 | """This function makes a mutation transaction to the actual environment. 377 | 378 | Args: 379 | mutation (string): GraphQL mutation instructions. 380 | variables (string, optional): Mutation variables. Defaults to None. 381 | flatten (bool, optional): Check if GraphQLResponse should be flatten or 382 | not. Defaults to True. 383 | 384 | Returns: 385 | tuple: Tuple containing (data, errors) from the GraphQL response. 386 | """ 387 | response = {} 388 | data = None 389 | errors = [] 390 | try: 391 | response = self.execute(mutation, variables) 392 | except Exception as e: 393 | errors = [{'message': str(e)}] 394 | finally: 395 | response_errors = response.get('errors', []) 396 | if response_errors: 397 | errors.extend(response_errors) 398 | if not errors: 399 | data = response.get('data', None) 400 | if flatten and data: 401 | data = data_flatten(data) 402 | data_messages = self._get_messages(data) 403 | if data_messages: 404 | errors.extend(data_messages) 405 | return data, errors 406 | 407 | # * Subscription high level implementation ****************** 408 | 409 | def subscribe( 410 | self, 411 | query, 412 | variables=None, 413 | callback=None, 414 | flatten=True, 415 | _id=None, 416 | on_error_callback=None 417 | ): 418 | """This functions makes a subscription to the actual environment. 419 | 420 | Args: 421 | query (string): Graphql subscription instructions. 422 | variables (string, optional): Subscription variables. Defaults to None. 423 | callback (function, optional): Trigger function of the subscription. 424 | Defaults to None. 425 | flatten (bool, optional): Check if GraphqlResponse should be flatten or 426 | not. Defaults to True. 427 | _id (int, optional): Subscription id. Defaults to None. 428 | 429 | Returns: 430 | (GraphqlResponse): Returns the GraphqlResponse of the subscription. 431 | """ 432 | # ! initialize websocket only once 433 | if not self._conn: 434 | if not self._new_conn(): 435 | log(LogLevel.ERROR, 'Error creating WSS connection for subscription') 436 | return None 437 | 438 | _cb = callback if callback is not None else self._on_message 439 | _ecb = on_error_callback 440 | _id = self._registerSub(_id) 441 | self.subs[_id].update({ 442 | 'thread': threading.Thread(target=self._subscription_loop, args=(_cb, _id, _ecb)), 443 | 'flatten': flatten, 444 | 'queue': [], 445 | 'runs': 0, 446 | 'query': query, 447 | 'variables': variables, 448 | 'callback': callback, 449 | 'on_error_callback': on_error_callback 450 | }) 451 | self.subs[_id]['thread'].start() 452 | payload = {'query': query, 'variables': variables} 453 | self._start(payload, _id) 454 | # ! Create unsubscribe function for this specific thread: 455 | 456 | def unsubscribe(): 457 | return self._unsubscribe(_id) 458 | self.subs[_id].update({'unsub': unsubscribe}) 459 | return unsubscribe 460 | 461 | def _unsubscribe(self, _id): 462 | sub = self.subs.get(_id) 463 | if not sub: 464 | log(LogLevel.WARNING, 'Subscription already cleared') 465 | return 466 | self.unsubscribing = True 467 | sub['kill'] = True 468 | try: 469 | self._stop(_id) 470 | except BrokenPipeError as e: 471 | log(LogLevel.WARNING, 'WSS Pipe broken, nothing to stop') 472 | sub['thread'].join() 473 | sub['running'] = False 474 | self.unsubscribing = False 475 | 476 | def _sub_routing_loop(self): 477 | log(LogLevel.SUCCESS, 'first subscription, starting routing loop') 478 | last_reconnect_attempt = 0 479 | reconnect_delay = 1.0 480 | 481 | while not self.closing: 482 | if self.wss_conn_halted: 483 | # Rate limit reconnection attempts 484 | current_time = time.time() 485 | if current_time - last_reconnect_attempt >= reconnect_delay: 486 | log(LogLevel.WARNING, 487 | 'Connection halted, attempting reconnection...') 488 | if self._new_conn(): 489 | self.wss_conn_halted = False 490 | log(LogLevel.SUCCESS, 491 | 'WSS Reconnection succeeded, attempting resubscription to lost subs') 492 | self._resubscribe_all() 493 | log(LogLevel.INFO, 'finished resubscriptions') 494 | reconnect_delay = 1.0 # Reset delay on success 495 | else: 496 | # Use exponential backoff for reconnection attempts (up to 5 seconds) 497 | reconnect_delay = min(reconnect_delay * 1.5, 5.0) 498 | last_reconnect_attempt = current_time 499 | time.sleep(self.poll_interval) 500 | continue 501 | 502 | if self.unsubscribing: 503 | time.sleep(self.poll_interval) 504 | continue 505 | 506 | # Process terminated subscriptions 507 | to_del = [] 508 | for sub_id, sub in self.subs.items(): 509 | if (sub['kill'] or not sub['running']) and not sub['starting']: 510 | # Don't block if thread is already dead 511 | if sub['thread'].is_alive(): 512 | # Use timeout to avoid blocking indefinitely 513 | sub['thread'].join(0.1) 514 | to_del.append(sub_id) 515 | 516 | for sub_id in to_del: 517 | del self.subs[sub_id] 518 | 519 | try: 520 | # Set a smaller timeout for faster response 521 | self._conn.settimeout(0.5) 522 | message = orjson.loads(self._conn.recv()) 523 | # Reset timeout after successful receive 524 | self._conn.settimeout(self.websocket_timeout) 525 | except (TimeoutError, websocket.WebSocketTimeoutException): 526 | # Expected timeout - not an error 527 | time.sleep(self.poll_interval) 528 | continue 529 | except Exception as e: 530 | if not self.closing: 531 | log(LogLevel.ERROR, 'Some error trying to receive WSS') 532 | self.wss_conn_halted = True 533 | continue 534 | 535 | message_type = message.get('type') 536 | if 'id' in message: 537 | # if the message has an ID request, it will be handled by the _subscription_loop 538 | _id = message['id'] 539 | active_sub = self.subs.get(_id) 540 | # the connection may not be active due to: 541 | # 1. server error (incorrect ID sent) 542 | # 2. race condition (we closed connection, but a message was already on its way) 543 | if active_sub: 544 | active_sub['queue'].append(message) 545 | elif message_type == CONNECTION_ACK_TYPE: 546 | pass # Connection Ack with the server 547 | elif message_type == PONG_TYPE: 548 | pass 549 | else: 550 | log(LogLevel.WARNING, f'unknown msg type: {message}') 551 | 552 | # Use non-blocking sleep 553 | time.sleep(self.poll_interval) 554 | 555 | def _resubscribe_all(self): 556 | # Copy subscription info before killing threads 557 | old_subs = {sub_id: { 558 | 'query': sub.get('query'), 559 | 'variables': sub.get('variables'), 560 | 'callback': sub.get('callback'), 561 | 'on_error_callback': sub.get('on_error_callback'), 562 | 'flatten': sub.get('flatten'), 563 | } for sub_id, sub in self.subs.items()} 564 | 565 | # First, signal all threads to stop 566 | for sub in self.subs.values(): 567 | sub['kill'] = True 568 | 569 | # Then join all threads with timeout to avoid blocking indefinitely 570 | for sub_id, sub in self.subs.items(): 571 | if sub['thread'].is_alive(): 572 | sub['thread'].join(0.5) 573 | 574 | # Clear existing subscriptions 575 | self.subs.clear() 576 | 577 | # Resubscribe using the saved information 578 | for sub_id, sub_info in old_subs.items(): 579 | self.subscribe( 580 | query=sub_info['query'], 581 | variables=sub_info['variables'], 582 | callback=sub_info['callback'], 583 | on_error_callback=sub_info['on_error_callback'], 584 | flatten=sub_info['flatten'], 585 | _id=sub_id, 586 | ) 587 | 588 | def _subscription_loop(self, _cb, _id, _ecb): 589 | self.subs[_id].update({'running': True, 'starting': False}) 590 | while self.subs[_id]['running']: 591 | if self.subs[_id]['kill']: 592 | log(LogLevel.INFO, 593 | f'stopping subscription id={_id} on Unsubscribe') 594 | break 595 | 596 | # Get message without copying the queue 597 | message = safe_pop(self.subs[_id]['queue']) 598 | if not message: 599 | time.sleep(self.poll_interval) 600 | continue 601 | 602 | # Message type handling 603 | message_type = message.get('type') 604 | if message_type == NEXT_TYPE: 605 | pass # continue with payload handling 606 | elif message_type == ERROR_TYPE: 607 | if _ecb: 608 | _ecb(message) 609 | log(LogLevel.WARNING, 610 | f'stopping subscription id={_id} on {message_type}') 611 | break 612 | elif message_type == COMPLETE_TYPE: 613 | log(LogLevel.INFO, 614 | f'stopping subscription id={_id} on {message_type}') 615 | break 616 | else: 617 | log(LogLevel.WARNING, f'unknown msg type: {message}') 618 | continue 619 | 620 | # Payload handling 621 | if is_ws_payloadErrors_msg(message): 622 | if _ecb: 623 | _ecb(message) 624 | continue 625 | log(LogLevel.ERROR, 'Subscription message has payload Errors') 626 | log(LogLevel.ERROR, f'{message}') 627 | elif is_ws_connection_init_msg(message): 628 | # Subscription successfully initialized 629 | pass 630 | else: 631 | # Process message more efficiently 632 | gql_msg = self._clean_sub_message(_id, message) 633 | try: 634 | _cb(gql_msg) # execute callback function 635 | # Increment counter without locking 636 | self.subs[_id]['runs'] += 1 637 | except Exception as _e: 638 | log(LogLevel.ERROR, f'Error on subscription callback') 639 | sub_query = self.subs[_id].get('query') 640 | sub_variables = self.subs[_id].get('variables') 641 | if sub_query: 642 | log(LogLevel.ERROR, 643 | f'subscription document: \n\t{sub_query}') 644 | if sub_variables: 645 | log(LogLevel.ERROR, 646 | f'subscription variables: \n\t{sub_variables}') 647 | log(LogLevel.ERROR, traceback.format_exc()) 648 | 649 | # Subscription stopped, update state atomically 650 | self.subs[_id].update({'running': False, 'kill': True}) 651 | log(LogLevel.INFO, f'Subscription id={_id} stopped') 652 | 653 | def _clean_sub_message(self, _id, message): 654 | data = py_.get(message, 'payload', {}) 655 | return data_flatten(data) if self.subs[_id]['flatten'] else data 656 | 657 | def _new_conn(self): 658 | env = self.environments.get(self.environment, None) 659 | self.ws_url = env.get('wss') 660 | try: 661 | self._conn = websocket.create_connection( 662 | self.ws_url, subprotocols=[GQL_WS_SUBPROTOCOL]) 663 | self._conn_init() 664 | return True 665 | except Exception as e: 666 | log(LogLevel.ERROR, f'Failed connecting to {self.ws_url}') 667 | return False 668 | 669 | def close(self): 670 | """This function ends and resets all subscriptions and related attributes 671 | to their default values. 672 | """ 673 | # ! ask subscription message router to stop 674 | self.closing = True 675 | if not self.sub_router_thread: 676 | log(LogLevel.INFO, 'connection not stablished, nothing to close') 677 | self.closing = False 678 | return 679 | for sub in self.subs.values(): 680 | sub['unsub']() 681 | self._conn.close() 682 | self.sub_router_thread.join() 683 | self.sub_pingpong_thread.join() 684 | self.sub_router_thread = None 685 | self.sub_pingpong_thread = None 686 | self._conn = None 687 | self.sub_counter = 0 688 | self.subs = {} 689 | self.closing = False 690 | 691 | def _on_message(self, message): 692 | '''Dummy callback for subscription''' 693 | # Message handling happens elsewhere - no need to print here 694 | pass 695 | 696 | def _conn_init(self): 697 | env = self.environments.get(self.environment, None) 698 | headers = env.get('headers', {}) 699 | payload = { 700 | 'type': 'connection_init', 701 | 'payload': headers 702 | } 703 | self._conn.send(orjson.dumps(payload).decode('utf-8')) 704 | self._waiting_connection_ack() 705 | self._conn.settimeout(self.websocket_timeout) 706 | 707 | if not self.sub_router_thread: 708 | self.sub_router_thread = threading.Thread( 709 | target=self._sub_routing_loop) 710 | if not self.sub_router_thread.is_alive(): 711 | self.sub_router_thread.start() 712 | if not self.sub_pingpong_thread: 713 | self.sub_pingpong_thread = threading.Thread(target=self._ping_pong) 714 | if not self.sub_pingpong_thread.is_alive(): 715 | self.sub_pingpong_thread.start() 716 | 717 | def _waiting_connection_ack(self): 718 | self._conn.settimeout(self.ack_timeout) 719 | # set timeout to raise Exception websocket.WebSocketTimeoutException 720 | message = orjson.loads(self._conn.recv()) 721 | if message['type'] == CONNECTION_ACK_TYPE: 722 | pass # Connection Ack with the server 723 | 724 | def _ping_pong(self): 725 | self.pingTimer = time.time() 726 | ping_count = 0 727 | 728 | while not self.closing: 729 | time.sleep(0.1) 730 | if self.wss_conn_halted: 731 | continue 732 | 733 | current_time = time.time() 734 | if (current_time - self.pingTimer) > self.pingIntervalTime: 735 | self.pingTimer = current_time 736 | try: 737 | self._conn.send(PING_JSON) 738 | ping_count += 1 739 | # No need to log normal ping operations 740 | except Exception as e: 741 | if not self.closing: 742 | log(LogLevel.ERROR, 743 | 'error trying to send ping, WSS Pipe is broken') 744 | self.wss_conn_halted = True 745 | 746 | def _registerSub(self, _id=None): 747 | if not _id: 748 | self.sub_counter += 1 749 | _id = str(self.sub_counter) 750 | self.subs[_id] = {'running': False, 'kill': False, 'starting': True} 751 | return _id 752 | 753 | def _start(self, payload, _id): 754 | frame = {'id': _id, 'type': 'subscribe', 'payload': payload} 755 | self._conn.send(orjson.dumps(frame).decode('utf-8')) 756 | 757 | def _stop(self, _id): 758 | payload = {'id': _id, 'type': 'complete'} 759 | self._conn.send(orjson.dumps(payload).decode('utf-8')) 760 | 761 | def resetSubsConnection(self): 762 | """This function resets all subscriptions connections. 763 | 764 | Returns: 765 | (boolean): Returns if the reconnection has been possible. 766 | """ 767 | if not self.sub_router_thread: 768 | log(LogLevel.INFO, 'connection not stablished, nothing to reset') 769 | return False 770 | if self.sub_router_thread.is_alive(): # check that _sub_routing_loop() is running 771 | self._conn.close() # forces connection halted (wss_conn_halted) 772 | return True 773 | # in case for some reason _sub_routing_loop() is not running 774 | if self._new_conn(): 775 | log(LogLevel.INFO, 776 | 'WSS Reconnection succeeded, attempting resubscription to lost subs') 777 | self._resubscribe_all() 778 | log(LogLevel.INFO, 'finished resubscriptions') 779 | return True 780 | else: 781 | log(LogLevel.ERROR, 'Reconnection has not been possible') 782 | return False 783 | 784 | # * END SUBSCRIPTION functions ****************************** 785 | 786 | # * BATCH functions ***************************************** 787 | def batchMutate(self, label='mutation'): 788 | """This fuction makes a batchs of mutation transactions. 789 | 790 | Args: 791 | label (str, optional): Name of the mutation batch. Defaults to 'mutation'. 792 | 793 | Returns: 794 | (MutationBatch): Returns a MutationBatch Object. 795 | """ 796 | return MutationBatch(client=self, label=label) 797 | 798 | def batchQuery(self, label='query'): 799 | """This fuction makes a batchs of query transactions. 800 | 801 | Args: 802 | label (str, optional): Name of the query batch. Defaults to 'query'. 803 | 804 | Returns: 805 | (MutationBatch): Returns a MutationBatch Object. 806 | """ 807 | return MutationBatch(client=self, label=label) 808 | 809 | # * END BATCH function ************************************** 810 | # * helper methods 811 | def addEnvironment( 812 | self, 813 | name, 814 | url=None, 815 | wss=None, 816 | headers={}, 817 | default=False, 818 | timeoutWebsocket=60, 819 | post_timeout=60, 820 | ipv4_only=False 821 | ): 822 | """This fuction adds a new environment to the instance. 823 | 824 | Args: 825 | name (string): Name of the environment. 826 | url (string, optional): URL of the environmet. Defaults to None. 827 | wss (string, optional): URL of the WSS of the environment. Defaults to None. 828 | headers (dict, optional): A dictionary with the headers 829 | (like authorization). Defaults to {}. 830 | default (bool, optional): Checks if the new environment will be the 831 | default one of the instance. Defaults to False. 832 | timeoutWebsocket (int, optional): Seconds of the timeout of the 833 | websocket. Defaults to 60. 834 | post_timeout (int, optional): Timeout in seconds for each post. 835 | Defaults to 60. 836 | ipv4_only (bool, optional): Forces connections to use IPv4 only. 837 | Helps with slow connections on networks with problematic IPv6. Defaults to False. 838 | """ 839 | self.environments[name] = { 840 | 'url': url, 841 | 'wss': wss, 842 | 'headers': headers.copy(), 843 | 'post_timeout': post_timeout, 844 | 'ipv4_only': ipv4_only 845 | } 846 | 847 | if ipv4_only: 848 | self._update_client_params(ipv4_only) 849 | 850 | if default: 851 | self.setEnvironment(name) 852 | self.setTimeoutWebsocket(timeoutWebsocket) 853 | 854 | def _update_client_params(self, ipv4_only): 855 | """Update HTTP client parameters based on IPv4 setting""" 856 | if ipv4_only: 857 | self.client_params["transport"] = httpx.HTTPTransport( 858 | local_address="0.0.0.0") 859 | self.async_client_params["transport"] = httpx.AsyncHTTPTransport( 860 | local_address="0.0.0.0") 861 | else: 862 | # Remove transport if it exists 863 | self.client_params.pop("transport", None) 864 | self.async_client_params.pop("transport", None) 865 | 866 | def setUrl(self, environment=None, url=None): 867 | """This function sets a new url to an existing environment. 868 | 869 | Args: 870 | environment (string, optional): Name of the environment. Defaults to None. 871 | url (string, optional): New URL for the enviroment. Defaults to None. 872 | """ 873 | # if environment is not selected, use current environment 874 | if not environment: 875 | environment = self.environment 876 | self.environments[environment]['url'] = url 877 | 878 | def setWss(self, environment=None, url=None): 879 | """This function sets a new WSS to an existing environment. 880 | 881 | Args: 882 | environment (string, optional): Name of the environment. Defaults to None. 883 | url (string, optional): New WSS URL for the environment. Defaults to None. 884 | """ 885 | # if environment is not selected, use current environment 886 | if not environment: 887 | environment = self.environment 888 | self.environments[environment]['wss'] = url 889 | 890 | def addHeader(self, environment=None, header={}): 891 | """This function updates the header of an existing environment. 892 | 893 | Args: 894 | environment (string, optional): Name of the environment. Defaults to None. 895 | header (dict, optional): New headers to add. Defaults to {}. 896 | """ 897 | # if environment is not selected, use current environment 898 | if not environment: 899 | environment = self.environment 900 | self.environments[environment]['headers'].update(header) 901 | 902 | def setEnvironment(self, name): 903 | """This functions sets the actual environment of the instance. 904 | 905 | Args: 906 | name (string): Name of the environment. 907 | 908 | Raises: 909 | Exception: The environment's name doesn't exists in the environment list. 910 | """ 911 | env = self.environments.get(name) 912 | if not env: 913 | raise Exception(f'selected environment not set ({name})') 914 | self.environment = name 915 | 916 | # Update client parameters based on environment settings 917 | ipv4_only = env.get('ipv4_only', False) 918 | self._update_client_params(ipv4_only) 919 | 920 | def setPostTimeout(self, environment=None, post_timeout=60): 921 | """This function sets the post's timeout. 922 | 923 | Args: 924 | seconds (int): Time for the timeout. 925 | """ 926 | # if environment is not selected, use current environment 927 | if not environment: 928 | environment = self.environment 929 | self.environments[environment]['post_timeout'] = post_timeout 930 | 931 | def setTimeoutWebsocket(self, seconds): 932 | """This function sets the webscoket's timeout. 933 | 934 | Args: 935 | seconds (int): Time for the timeout. 936 | """ 937 | self.websocket_timeout = seconds 938 | if self._conn: 939 | self._conn.settimeout(self.websocket_timeout) 940 | 941 | # * LOW LEVEL METHODS ---------------------------------- 942 | def _get_http_client(self): 943 | """Get a thread-local HTTP client to improve performance with connection pooling""" 944 | if not hasattr(self._thread_local, 'client'): 945 | self._thread_local.client = httpx.Client(**self.client_params) 946 | return self._thread_local.client 947 | 948 | def execute(self, query: str, variables: dict | None = None) -> dict: 949 | """This function executes the intructions of a query or mutation. 950 | 951 | Args: 952 | query (string): GraphQL instructions. 953 | variables (string, optional): Variables of the transaction. Defaults 954 | to None. 955 | 956 | Raises: 957 | Exception: There is not setted a main environment. 958 | GQLResponseException: Raised when the GraphQL query fails. 959 | 960 | Returns: 961 | dict: Raw GraphQLResponse. 962 | """ 963 | data = { 964 | 'query': query, 965 | 'variables': variables 966 | } 967 | env = self.environments.get(self.environment) 968 | if not env: 969 | raise Exception( 970 | f'cannot execute query without setting an environment') 971 | 972 | headers = self.DEFAULT_HEADERS.copy() 973 | env_headers = env.get('headers') 974 | if env_headers: 975 | headers.update(env_headers) 976 | 977 | # Use thread-local client for better connection pooling 978 | try: 979 | client = self._get_http_client() 980 | response = client.post( 981 | env['url'], 982 | json=data, 983 | headers=headers, 984 | timeout=float(env.get('post_timeout', 60)) 985 | ) 986 | except Exception as _e: 987 | # If connection fails, create a new client and retry 988 | self._thread_local.client = httpx.Client(**self.client_params) 989 | client = self._thread_local.client 990 | response = client.post( 991 | env['url'], 992 | json=data, 993 | headers=headers, 994 | timeout=float(env.get('post_timeout', 60)) 995 | ) 996 | 997 | if response.status_code == 200: 998 | return orjson.loads(response.content) 999 | else: 1000 | error_message = "Query failed to run by returning code of " + \ 1001 | f"{response.status_code}.\n{query}" 1002 | raise GQLResponseException( 1003 | message=error_message, 1004 | status_code=response.status_code, 1005 | query=query, 1006 | variables=variables 1007 | ) 1008 | 1009 | # * ASYNC METHODS ---------------------------------- 1010 | async def _get_async_client(self): 1011 | """Get or create a reusable async HTTP client for better performance 1012 | 1013 | Detects if the event loop has been closed (which can happen in test environments) 1014 | and creates a new client if necessary. 1015 | """ 1016 | new_client_needed = False 1017 | 1018 | # Check if client exists 1019 | if self._async_client is None: 1020 | new_client_needed = True 1021 | else: 1022 | # Check if client's event loop is closed 1023 | try: 1024 | # Make a simple request to check if client is still usable 1025 | # This will fail with "Event loop is closed" if the loop is closed 1026 | await self._async_client.get_timeout() 1027 | except (RuntimeError, AttributeError) as e: 1028 | if "Event loop is closed" in str(e) or "has no attribute" in str(e): 1029 | # Event loop closed or client has been partially destroyed 1030 | # Create a new client 1031 | new_client_needed = True 1032 | # Intentionally don't try to close the old client as its event loop is closed 1033 | self._async_client = None 1034 | else: 1035 | # Some other error, re-raise 1036 | raise 1037 | 1038 | # Create a new client if needed 1039 | if new_client_needed: 1040 | self._async_client = httpx.AsyncClient(**self.async_client_params) 1041 | 1042 | return self._async_client 1043 | 1044 | async def _close_async_client(self): 1045 | """Close the async client if it exists""" 1046 | if self._async_client is not None: 1047 | await self._async_client.aclose() 1048 | self._async_client = None 1049 | 1050 | async def async_execute(self, query: str, variables: dict | None = None) -> dict: 1051 | """Async version of execute method that executes instructions of a query or mutation. 1052 | 1053 | Args: 1054 | query (string): GraphQL instructions. 1055 | variables (string, optional): Variables of the transaction. Defaults 1056 | to None. 1057 | 1058 | Raises: 1059 | Exception: There is not setted a main environment. 1060 | GQLResponseException: Raised when the GraphQL query fails. 1061 | 1062 | Returns: 1063 | dict: Raw GraphQLResponse. 1064 | """ 1065 | data = { 1066 | 'query': query, 1067 | 'variables': variables 1068 | } 1069 | env = self.environments.get(self.environment) 1070 | if not env: 1071 | raise Exception( 1072 | f'cannot execute query without setting an environment') 1073 | 1074 | headers = self.DEFAULT_HEADERS.copy() 1075 | env_headers = env.get('headers') 1076 | if env_headers: 1077 | headers.update(env_headers) 1078 | 1079 | # Get a client that we know is connected to a valid event loop 1080 | client = await self._get_async_client() 1081 | 1082 | try: 1083 | # Make the actual request 1084 | response = await client.post( 1085 | env['url'], 1086 | json=data, 1087 | headers=headers, 1088 | timeout=float(env.get('post_timeout', 60)) 1089 | ) 1090 | except (httpx.RequestError, RuntimeError) as e: 1091 | # Check if this is an event loop issue or a network issue 1092 | if "Event loop is closed" in str(e): 1093 | # Event loop was closed - need to get a new client with a valid loop 1094 | # The _get_async_client method will handle this on the next call 1095 | self._async_client = None 1096 | # Try again with a new client 1097 | client = await self._get_async_client() 1098 | response = await client.post( 1099 | env['url'], 1100 | json=data, 1101 | headers=headers, 1102 | timeout=float(env.get('post_timeout', 60)) 1103 | ) 1104 | else: 1105 | # Some other request error, re-raise 1106 | raise 1107 | 1108 | if response.status_code == 200: 1109 | return orjson.loads(response.content) 1110 | else: 1111 | error_message = "Query failed to run by returning code of " + \ 1112 | f"{response.status_code}.\n{query}" 1113 | raise GQLResponseException( 1114 | message=error_message, 1115 | status_code=response.status_code, 1116 | query=query, 1117 | variables=variables 1118 | ) 1119 | 1120 | async def async_query( 1121 | self, 1122 | query: str, 1123 | variables: dict | None = None, 1124 | flatten: bool = True, 1125 | single_child: bool = False 1126 | ) -> tuple: 1127 | """Async version of query method that makes a query transaction to the actual environment. 1128 | 1129 | Args: 1130 | query (string): GraphQL query instructions. 1131 | variables (string, optional): Query variables. Defaults to None. 1132 | flatten (bool, optional): Check if GraphQLResponse should be flatten or 1133 | not. Defaults to True. 1134 | single_child (bool, optional): Check if GraphQLResponse only has one 1135 | element. Defaults to False. 1136 | 1137 | Returns: 1138 | tuple: Tuple containing (data, errors) from the GraphQL response. 1139 | """ 1140 | data = None 1141 | errors = [] 1142 | try: 1143 | response = await self.async_execute(query, variables) 1144 | if flatten: 1145 | data = response.get('data', None) 1146 | else: 1147 | data = response 1148 | errors = response.get('errors', []) 1149 | if flatten and data is not None: 1150 | data = data_flatten(data, single_child=single_child) 1151 | except Exception as e: 1152 | errors = [{'message': str(e)}] 1153 | return data, errors 1154 | 1155 | async def async_query_one(self, query: str, variables: dict | None = None) -> tuple: 1156 | """Async version of query_one method that makes a single child query. 1157 | 1158 | Args: 1159 | query (string): GraphQL query instructions. 1160 | variables (string, optional): Query variables. Defaults to None. 1161 | 1162 | Returns: 1163 | tuple: Tuple containing (data, errors) from the GraphQL response. 1164 | """ 1165 | return await self.async_query(query, variables, flatten=True, single_child=True) 1166 | 1167 | async def async_mutate( 1168 | self, 1169 | mutation: str, 1170 | variables: dict | None = None, 1171 | flatten: bool = True 1172 | ) -> tuple: 1173 | """Async version of mutate method that makes a mutation transaction 1174 | to the current environment. 1175 | 1176 | Args: 1177 | mutation (string): GraphQL mutation instructions. 1178 | variables (string, optional): Mutation variables. Defaults to None. 1179 | flatten (bool, optional): Check if GraphQLResponse should be flatten or 1180 | not. Defaults to True. 1181 | 1182 | Returns: 1183 | tuple: Tuple containing (data, errors) from the GraphQL response. 1184 | """ 1185 | response = {} 1186 | data = None 1187 | errors = [] 1188 | try: 1189 | response = await self.async_execute(mutation, variables) 1190 | except Exception as e: 1191 | errors = [{'message': str(e)}] 1192 | finally: 1193 | response_errors = response.get('errors', []) 1194 | if response_errors: 1195 | errors.extend(response_errors) 1196 | if not errors: 1197 | data = response.get('data', None) 1198 | if flatten and data: 1199 | data = data_flatten(data) 1200 | data_messages = self._get_messages(data) 1201 | if data_messages: 1202 | errors.extend(data_messages) 1203 | return data, errors 1204 | 1205 | # Ensure cleanup of resources 1206 | async def async_cleanup(self): 1207 | """Close any open async resources 1208 | 1209 | This method should only be called when you know no other 1210 | async operations are in progress. It handles cases where 1211 | the event loop might already be closed. 1212 | """ 1213 | if self._async_client is not None: 1214 | try: 1215 | # Check if the client is still usable 1216 | try: 1217 | # This will raise an exception if the event loop is closed 1218 | await self._async_client.get_timeout() 1219 | # If we get here, the client is usable, so close it 1220 | await self._async_client.aclose() 1221 | except (RuntimeError, AttributeError) as e: 1222 | if "Event loop is closed" in str(e) or "has no attribute" in str(e): 1223 | # Client's event loop is already closed 1224 | # We can't await aclose(), just let it be garbage collected 1225 | pass 1226 | else: 1227 | # Some other error during check, still try to close 1228 | await self._async_client.aclose() 1229 | except Exception as e: # pylint: disable=broad-except 1230 | # If closing fails, log but continue 1231 | log(LogLevel.WARNING, 1232 | f"Warning: Error closing async client: {str(e)}") 1233 | finally: 1234 | # Always set to None to allow garbage collection and recreation 1235 | self._async_client = None 1236 | 1237 | def _close(self): 1238 | """Explicitly close resources""" 1239 | # Clean up synchronous client 1240 | if hasattr(self, '_thread_local') and hasattr(self._thread_local, 'client'): 1241 | try: 1242 | self._thread_local.client.close() 1243 | except Exception: # pylint: disable=broad-except 1244 | pass 1245 | 1246 | # For async client, we can't use await in close(), so just set to None 1247 | # to allow garbage collection. We don't try to close it properly here 1248 | # as that would require an event loop, which might be closed already. 1249 | if hasattr(self, '_async_client') and self._async_client is not None: 1250 | self._async_client = None 1251 | 1252 | def __del__(self): 1253 | """Cleanup resources when the instance is being destroyed""" 1254 | self._close() 1255 | --------------------------------------------------------------------------------