├── .coveragerc ├── .dockerignore ├── .env.example ├── .github ├── PULL_REQUEST_TEMPLATE.md ├── dependabot.yml └── workflows │ ├── code-quality.yml │ ├── pr-rating.yml │ ├── pytest.yml │ ├── test-build.yml │ └── test-docker.yml ├── .gitignore ├── AUTHORS.rst ├── CHANGELOG.rst ├── LICENSE.txt ├── README.md ├── docker ├── Dockerfile ├── with-ipfs.dockerfile └── with-ipfs.entrypoint.sh ├── docs ├── Makefile ├── _static │ ├── .gitignore │ └── logo.svg ├── authors.rst ├── changelog.rst ├── conf.py ├── content │ ├── account.rst │ ├── aggregates.rst │ ├── async_notes.rst │ ├── cli.rst │ ├── introduction.rst │ ├── posts.rst │ └── programs.rst ├── index.rst └── license.rst ├── pyproject.toml ├── scripts ├── build-and-shell.sh ├── build-and-test.sh ├── build-to-publish.sh └── gendoc.py ├── src └── aleph_client │ ├── __init__.py │ ├── __main__.py │ ├── account.py │ ├── commands │ ├── __init__.py │ ├── about.py │ ├── account.py │ ├── aggregate.py │ ├── domain.py │ ├── files.py │ ├── help_strings.py │ ├── instance │ │ ├── __init__.py │ │ ├── display.py │ │ └── network.py │ ├── message.py │ ├── node.py │ ├── pricing.py │ ├── program.py │ ├── program_utils │ │ ├── runtime_checker.squashfs │ │ └── runtime_checker │ │ │ └── main.py │ └── utils.py │ ├── models.py │ ├── utils.py │ └── voucher.py ├── test.py └── tests ├── __init__.py ├── test_post.json └── unit ├── __init__.py ├── conftest.py ├── mocks.py ├── test_aggregate.py ├── test_commands.py ├── test_init.py ├── test_instance.py ├── test_node.py ├── test_pricing.py ├── test_program.py ├── test_utils.py └── test_voucher.py /.coveragerc: -------------------------------------------------------------------------------- 1 | # .coveragerc to control coverage.py 2 | [run] 3 | branch = True 4 | source = aleph_client 5 | # omit = bad_file.py 6 | 7 | [paths] 8 | source = 9 | src/ 10 | */site-packages/ 11 | 12 | [report] 13 | # Regexes for lines to exclude from consideration 14 | exclude_lines = 15 | # Have to re-enable the standard pragma 16 | pragma: no cover 17 | 18 | # Don't complain about missing debug-only code: 19 | def __repr__ 20 | if self\.debug 21 | 22 | # Don't complain if tests don't hit defensive assertion code: 23 | raise AssertionError 24 | raise NotImplementedError 25 | 26 | # Don't complain if non-runnable code isn't run: 27 | if 0: 28 | if __name__ == .__main__.: 29 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | **/*.pyc 2 | **/__pycache__ 3 | 4 | /.idea 5 | /.eggs 6 | **/.mypy_cache 7 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | # To modify aleph.sdk.conf's settings, create a .env file and add: 2 | # ALEPH_= 3 | # To modify active & rpc fields in CHAINS, follow this example: 4 | # ALEPH_CHAINS_SEPOLIA_ACTIVE=True 5 | # ALEPH_CHAINS_SEPOLIA_RPC=https://... -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Explain what problem this PR is resolving 2 | 3 | Related ClickUp, GitHub or Jira tickets : ALEPH-XXX 4 | 5 | ## Self proofreading checklist 6 | 7 | - [ ] The new code clear, easy to read and well commented. 8 | - [ ] New code does not duplicate the functions of builtin or popular libraries. 9 | - [ ] An LLM was used to review the new code and look for simplifications. 10 | - [ ] New classes and functions contain docstrings explaining what they provide. 11 | - [ ] All new code is covered by relevant tests. 12 | 13 | ## Documentation 14 | 15 | The documentation regarding the impacted features is available on: 16 | > URL 17 | 18 | The changes in the documentation are available here: 19 | > URL 20 | 21 | ## Changes 22 | 23 | Explain the changes that were made. The idea is not to list exhaustively all the changes made (GitHub already provides a full diff), but to help the reviewers better understand: 24 | - which specific file changes go together, e.g: when creating a table in the front-end, there usually is a config file that goes with it 25 | - the reasoning behind some changes, e.g: deleted files because they are now redundant 26 | - the behaviour to expect, e.g: tooltip has purple background color because the client likes it so, changed a key in the API response to be consistent with other endpoints 27 | 28 | ## How to test 29 | 30 | Explain how to test your PR. 31 | If a specific config is required explain it here (account, data entry, ...) 32 | 33 | ## Print screen / video 34 | 35 | Upload here screenshots or videos showing the changes if relevant. 36 | 37 | ## Notes 38 | 39 | Things that the reviewers should know: known bugs that are out of the scope of the PR, other trade-offs that were made. 40 | If the PR depends on a PR in another repo, or merges into another PR (i.o. main), it should also be mentioned here 41 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2 3 | 4 | 5 | updates: 6 | - package-ecosystem: "pip" 7 | directory: "/" 8 | schedule: 9 | interval: "weekly" 10 | -------------------------------------------------------------------------------- /.github/workflows/code-quality.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Test code quality 3 | 4 | 5 | on: 6 | push: 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | 12 | jobs: 13 | code-quality: 14 | runs-on: ubuntu-22.04 15 | 16 | steps: 17 | - uses: actions/checkout@v4 18 | 19 | - name: Workaround github issue https://github.com/actions/runner-images/issues/7192 20 | run: sudo echo RESET grub-efi/install_devices | sudo debconf-communicate grub-pc 21 | 22 | - name: Install pip and hatch 23 | run: | 24 | sudo apt-get install -y python3-pip 25 | pip3 install hatch hatch-vcs 26 | 27 | - name: Cache dependencies 28 | uses: actions/cache@v4 29 | with: 30 | path: ~/.cache/pip 31 | key: ${{ runner.os }}-code-quality-${{ hashFiles('pyproject.toml') }} 32 | restore-keys: | 33 | ${{ runner.os }}-code-quality- 34 | 35 | - name: Install required system packages only for Ubuntu Linux 36 | run: sudo apt-get install -y libsecp256k1-dev 37 | 38 | - name: Run Hatch lint 39 | run: hatch run linting:all 40 | -------------------------------------------------------------------------------- /.github/workflows/pr-rating.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Test PR Difficulty Rating Action 3 | 4 | 5 | permissions: 6 | pull-requests: write 7 | 8 | 9 | on: 10 | pull_request: 11 | types: [opened, reopened, ready_for_review] 12 | 13 | 14 | jobs: 15 | difficulty-rating: 16 | runs-on: ubuntu-latest 17 | if: github.event.pull_request.draft == false 18 | steps: 19 | - name: PR Difficulty Rating 20 | uses: rate-my-pr/rate@v2 21 | with: 22 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 23 | LLAMA_URL: ${{ secrets.LLAMA_URL }} 24 | -------------------------------------------------------------------------------- /.github/workflows/pytest.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Pytest and code Coverage 3 | 4 | 5 | on: 6 | push: 7 | branches: 8 | - "*" 9 | pull_request: 10 | branches: 11 | - main 12 | schedule: 13 | # Run every night at 04:00 (GitHub Actions timezone) 14 | # in order to catch when unfrozen dependency updates 15 | # break the use of the library. 16 | - cron: "4 0 * * *" 17 | 18 | 19 | jobs: 20 | pytest: 21 | strategy: 22 | fail-fast: false 23 | matrix: 24 | os: [macos-13, macos-14, ubuntu-22.04, ubuntu-24.04] 25 | runs-on: ${{matrix.os}} 26 | 27 | steps: 28 | - uses: actions/checkout@v4 29 | 30 | - name: Install required system packages for Ubuntu 31 | run: sudo apt-get install -y python3-pip libsecp256k1-dev python3-coverage 32 | if: startsWith(matrix.os, 'ubuntu-') 33 | 34 | - name: Install required system packages for macOS 35 | if: startsWith(matrix.os, 'macos-') 36 | run: | 37 | brew update 38 | brew tap cuber/homebrew-libsecp256k1 39 | brew install libsecp256k1 40 | brew install automake 41 | 42 | - name: Set up Python for macOS 43 | if: startsWith(matrix.os, 'macos') 44 | uses: actions/setup-python@v5 45 | with: 46 | python-version: 3.11 47 | 48 | - run: python3 -m venv /tmp/venv 49 | - run: /tmp/venv/bin/python -m pip install --upgrade pip hatch coverage 50 | 51 | # Only run coverage on one OS 52 | - run: /tmp/venv/bin/hatch run testing:test 53 | if: matrix.os != 'ubuntu-24.04' 54 | 55 | - name: Test with coverage 56 | if: matrix.os == 'ubuntu-24.04' 57 | run: | 58 | /tmp/venv/bin/hatch run testing:cov 59 | 60 | - uses: codecov/codecov-action@v4.0.1 61 | if: matrix.os == 'ubuntu-24.04' 62 | with: 63 | token: ${{ secrets.CODECOV_TOKEN }} 64 | slug: aleph-im/aleph-client 65 | file: ./coverage.xml 66 | directory: /home/runner/work/aleph-client/aleph-client 67 | -------------------------------------------------------------------------------- /.github/workflows/test-build.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Build source and wheel packages 3 | 4 | 5 | on: 6 | push: 7 | branches: 8 | - master 9 | pull_request: 10 | branches: 11 | - master 12 | 13 | 14 | jobs: 15 | build-install-wheel: 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | os: [macos-13, macos-14, ubuntu-22.04, ubuntu-24.04] 20 | runs-on: ${{matrix.os}} 21 | 22 | steps: 23 | - uses: actions/checkout@v4 24 | 25 | - name: Workaround github issue https://github.com/actions/runner-images/issues/7192 26 | if: startsWith(matrix.os, 'ubuntu-') 27 | run: sudo echo RESET grub-efi/install_devices | sudo debconf-communicate grub-pc 28 | 29 | - name: Install required system packages for macOS 30 | if: startsWith(matrix.os, 'macos-') 31 | run: | 32 | brew update 33 | brew tap cuber/homebrew-libsecp256k1 34 | brew install libsecp256k1 35 | 36 | - name: Set up Python for macOS 37 | if: startsWith(matrix.os, 'macos') 38 | uses: actions/setup-python@v5 39 | with: 40 | python-version: 3.11 41 | 42 | - name: Install required system packages only for Ubuntu Linux 43 | if: startsWith(matrix.os, 'ubuntu-') 44 | run: | 45 | sudo apt-get update 46 | sudo apt-get -y upgrade 47 | sudo apt-get install -y libsecp256k1-dev 48 | 49 | - name: Install required Python packages 50 | run: | 51 | python3 -m venv /tmp/venv 52 | /tmp/venv/bin/python3 -m pip install --upgrade hatch hatch-vcs 53 | 54 | - name: Build source and wheel packages 55 | run: | 56 | /tmp/venv/bin/hatch build 57 | 58 | - name: Install the Python wheel 59 | run: |- 60 | python3 -m venv /tmp/install-venv 61 | /tmp/install-venv/bin/python3 -m pip install dist/aleph_client-*.whl 62 | -------------------------------------------------------------------------------- /.github/workflows/test-docker.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Test using Pytest in Docker 3 | 4 | 5 | on: 6 | push: 7 | branches: 8 | - "*" 9 | pull_request: 10 | branches: 11 | - master 12 | 13 | 14 | jobs: 15 | test-docker: 16 | runs-on: ubuntu-22.04 17 | 18 | steps: 19 | - uses: actions/checkout@v4 20 | 21 | # Use GitHub's Docker registry to cache intermediate layers 22 | - run: echo ${{ secrets.GITHUB_TOKEN }} | docker login docker.pkg.github.com 23 | -u $GITHUB_ACTOR --password-stdin 24 | - run: docker pull docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-client-build-cache 25 | || true 26 | 27 | - name: Build the Docker image 28 | run: | 29 | git fetch --prune --unshallow --tags 30 | docker build . -t aleph-client:${GITHUB_REF##*/} -f docker/Dockerfile --cache-from=docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-client-build-cache 31 | 32 | - name: Push the image on GitHub's repository 33 | run: docker tag aleph-client:${GITHUB_REF##*/} docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-client:${GITHUB_REF##*/} 34 | && docker push docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-client:${GITHUB_REF##*/} 35 | || true 36 | 37 | - name: Cache the image on GitHub's repository 38 | run: docker tag aleph-client:${GITHUB_REF##*/} docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-client-build-cache 39 | && docker push docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-client-build-cache 40 | || true 41 | 42 | - name: Pytest in the Docker image 43 | run: | 44 | docker run -w /opt/aleph-client --entrypoint /opt/venv/bin/hatch aleph-client:${GITHUB_REF##*/} run testing:test 45 | 46 | - name: MyPy in the Docker image 47 | run: |- 48 | docker run -w /opt/aleph-client --entrypoint /opt/venv/bin/hatch aleph-client:${GITHUB_REF##*/} run linting:all 49 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Temporary and binary files 2 | *~ 3 | *.py[cod] 4 | *.so 5 | *.cfg 6 | !.isort.cfg 7 | !setup.cfg 8 | *.orig 9 | *.log 10 | *.pot 11 | __pycache__/* 12 | .cache/* 13 | .*.swp 14 | */.ipynb_checkpoints/* 15 | 16 | # Project files 17 | .ropeproject 18 | .project 19 | .pydevproject 20 | .settings 21 | .idea 22 | tags 23 | 24 | # Package files 25 | *.egg 26 | *.eggs/ 27 | .installed.cfg 28 | *.egg-info 29 | 30 | # Unittest and coverage 31 | htmlcov/* 32 | .coverage 33 | .tox 34 | junit.xml 35 | coverage.xml 36 | .pytest_cache/ 37 | 38 | # Build and docs folder/files 39 | build/* 40 | dist/* 41 | sdist/* 42 | docs/api/* 43 | docs/_rst/* 44 | docs/_build/* 45 | cover/* 46 | MANIFEST 47 | 48 | # Per-project virtualenvs 49 | .venv*/ 50 | venv/* 51 | **/device.key 52 | 53 | # Environment variables 54 | .env 55 | .env.local 56 | 57 | .gitsigners 58 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Contributors 3 | ============ 4 | 5 | * Henry Taieb 6 | * Hugo Herter 7 | * Moshe Malawach 8 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | Version 0.1 6 | =========== 7 | 8 | - Converted from minialeph -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2019 Aleph.im project 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # aleph-client 2 | 3 | Python Client for the [aleph.im network](https://www.aleph.im), next generation network of 4 | decentralized big data applications. Development follows the [Aleph 5 | Whitepaper](https://github.com/aleph-im/aleph-whitepaper). 6 | 7 | ## Documentation 8 | 9 | Documentation can be found on https://docs.aleph.im/tools/aleph-client/ 10 | 11 | ## Requirements 12 | 13 | ### Linux 14 | 15 | Some cryptographic functionalities use curve secp256k1 and require 16 | installing [libsecp256k1](https://github.com/bitcoin-core/secp256k1). 17 | 18 | > apt-get install -y python3-pip libsecp256k1-dev squashfs-tools 19 | 20 | ### macOs 21 | 22 | > brew tap cuber/homebrew-libsecp256k1 23 | > brew install libsecp256k1 24 | 25 | ### Windows 26 | 27 | We recommend using [WSL](https://learn.microsoft.com/en-us/windows/wsl/install) (Windows Subsystem for Linux). 28 | 29 | ## Installation 30 | 31 | ### From PyPI 32 | 33 | Using pip and [PyPI](https://pypi.org/project/aleph-client/): 34 | 35 | > pip install aleph-client 36 | 37 | ### Using a container 38 | 39 | Use the Aleph client and it\'s CLI from within Docker or Podman with: 40 | 41 | > docker run --rm -ti -v $(pwd)/ ghcr.io/aleph-im/aleph-client/aleph-client:master --help 42 | 43 | Warning: This will use an ephemeral key pair that will be discarded when 44 | stopping the container 45 | 46 | ## Installation for development 47 | 48 | We recommend using [hatch](https://hatch.pypa.io/) for development. 49 | 50 | Hatch is a modern, extensible Python project manager. 51 | It creates a virtual environment for each project and manages dependencies. 52 | 53 | > pip install hatch 54 | 55 | ### Running tests 56 | 57 | > hatch test 58 | 59 | or 60 | 61 | > hatch run testing:cov 62 | 63 | ### Formatting code 64 | 65 | > hatch run linting:fmt 66 | 67 | ### Checking types 68 | 69 | > hatch run linting:typing 70 | 71 | ## Publish to PyPI 72 | 73 | > hatch build 74 | > hatch upload 75 | 76 | If you want NULS2 support you will need to install nuls2-python 77 | (currently only available on github): 78 | 79 | > pip install aleph-sdk-python[nuls2] 80 | 81 | To install from source and still be able to modify the source code: 82 | 83 | > pip install -e . 84 | 85 | ## Updating the User Documentation 86 | 87 | The user documentation for Aleph is maintained in the [aleph-docs](https://github.com/aleph-im/aleph-docs) repository. The CLI page is generated using the `typer` command. When releasing a new version, it's important to update the documentation as part of the release process. 88 | 89 | If you have the `aleph-docs` repository cloned as a sibling folder to your current directory, you can use the following 90 | command to generate updated documentation: 91 | 92 | ```shell 93 | ./scripts/gendoc.py src/aleph_client/__main__.py docs \ 94 | --name aleph --title 'Aleph CLI Documentation' \ 95 | --output ../aleph-docs/docs/tools/aleph-client/usage.md 96 | ``` 97 | 98 | Then, open a Pull Request (PR) on the [aleph-docs](https://github.com/aleph-im/aleph-docs/pulls) repository with your changes. -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-bullseye 2 | MAINTAINER The aleph.im project 3 | 4 | RUN apt-get update && apt-get -y upgrade && apt-get install -y \ 5 | libsecp256k1-dev \ 6 | && rm -rf /var/lib/apt/lists/* 7 | 8 | RUN useradd -s /bin/bash --create-home user 9 | RUN mkdir /opt/venv 10 | RUN mkdir /opt/aleph-client/ 11 | RUN chown user:user /opt/venv 12 | RUN chown user:user /opt/aleph-client 13 | 14 | USER user 15 | RUN python3.9 -m venv /opt/venv 16 | ENV PATH="/opt/venv/bin:$PATH" 17 | ENV PATH="/opt/venv/bin:$PATH" 18 | 19 | RUN pip install --upgrade pip wheel twine 20 | 21 | # Preinstall dependencies for faster steps 22 | RUN pip install --upgrade secp256k1 coincurve aiohttp eciespy python-magic typer 23 | RUN pip install --upgrade 'aleph-message~=0.3.2' eth_account pynacl base58 24 | RUN pip install --upgrade hatch 25 | 26 | WORKDIR /opt/aleph-client/ 27 | COPY . . 28 | USER root 29 | RUN chown -R user:user /opt/aleph-client 30 | 31 | RUN git config --global --add safe.directory /opt/aleph-client 32 | RUN pip install -e .[testing,ethereum,solana,tezos] 33 | 34 | RUN mkdir /data 35 | RUN chown user:user /data 36 | ENV ALEPH_PRIVATE_KEY_FILE=/data/secret.key 37 | 38 | WORKDIR /home/user 39 | USER user 40 | ENTRYPOINT ["aleph"] 41 | CMD ["--help"] 42 | -------------------------------------------------------------------------------- /docker/with-ipfs.dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9 2 | 3 | # === Install IPFS === 4 | RUN apt-get install -y wget 5 | RUN wget https://ipfs.io/ipns/dist.ipfs.io/kubo/v0.15.0/kubo_v0.15.0_linux-amd64.tar.gz 6 | RUN tar -xvzf kubo_v0.15.0_linux-amd64.tar.gz -C /opt/ 7 | RUN ln -s /opt/kubo/ipfs /usr/local/bin/ 8 | 9 | # Volume to store IPFS data 10 | RUN mkdir /var/lib/ipfs 11 | ENV IPFS_PATH /var/lib/ipfs 12 | VOLUME /var/lib/ipfs 13 | 14 | # IPFS Swarm 15 | EXPOSE 4001 16 | # IPFS WebUI 17 | EXPOSE 5001 18 | # IPFS Gateway 19 | EXPOSE 8080 20 | 21 | 22 | # === Install Aleph-Client === 23 | 24 | RUN apt-get update && apt-get -y upgrade && apt-get install -y \ 25 | libsecp256k1-dev \ 26 | && rm -rf /var/lib/apt/lists/* 27 | 28 | RUN mkdir /opt/aleph-client/ 29 | WORKDIR /opt/aleph-client/ 30 | COPY . . 31 | 32 | RUN pip install -e .[testing,ethereum] 33 | 34 | 35 | # - User 'aleph' to run the code itself 36 | RUN useradd --create-home -s /bin/bash aleph 37 | WORKDIR /home/aleph 38 | 39 | COPY docker/with-ipfs.entrypoint.sh /entrypoint.sh 40 | CMD ["/entrypoint.sh"] 41 | -------------------------------------------------------------------------------- /docker/with-ipfs.entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euo pipefail 4 | 5 | # Initialize IPFS if it has not been done yet 6 | if [ ! -f /var/lib/ipfs/config ]; then 7 | chown -R aleph:aleph /var/lib/ipfs 8 | su aleph -c "/opt/go-ipfs/ipfs init --profile server" 9 | fi 10 | 11 | # Start IPFS as a daemon 12 | su aleph -c "/opt/go-ipfs/ipfs daemon --enable-pubsub-experiment" & 13 | 14 | # Run a shell 15 | su aleph -c "/bin/bash" 16 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = ../build/sphinx/ 9 | AUTODOCDIR = api 10 | AUTODOCBUILD = sphinx-apidoc 11 | PROJECT = aleph-client 12 | MODULEDIR = ../src/aleph_client 13 | 14 | # User-friendly check for sphinx-build 15 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $?), 1) 16 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 17 | endif 18 | 19 | # Internal variables. 20 | PAPEROPT_a4 = -D latex_paper_size=a4 21 | PAPEROPT_letter = -D latex_paper_size=letter 22 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 23 | # the i18n builder cannot share the environment and doctrees with the others 24 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 25 | 26 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext doc-requirements 27 | 28 | help: 29 | @echo "Please use \`make ' where is one of" 30 | @echo " html to make standalone HTML files" 31 | @echo " dirhtml to make HTML files named index.html in directories" 32 | @echo " singlehtml to make a single large HTML file" 33 | @echo " pickle to make pickle files" 34 | @echo " json to make JSON files" 35 | @echo " htmlhelp to make HTML files and a HTML help project" 36 | @echo " qthelp to make HTML files and a qthelp project" 37 | @echo " devhelp to make HTML files and a Devhelp project" 38 | @echo " epub to make an epub" 39 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 40 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 41 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 42 | @echo " text to make text files" 43 | @echo " man to make manual pages" 44 | @echo " texinfo to make Texinfo files" 45 | @echo " info to make Texinfo files and run them through makeinfo" 46 | @echo " gettext to make PO message catalogs" 47 | @echo " changes to make an overview of all changed/added/deprecated items" 48 | @echo " xml to make Docutils-native XML files" 49 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 50 | @echo " linkcheck to check all external links for integrity" 51 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 52 | 53 | clean: 54 | rm -rf $(BUILDDIR)/* $(AUTODOCDIR) 55 | 56 | $(AUTODOCDIR): $(MODULEDIR) 57 | mkdir -p $@ 58 | $(AUTODOCBUILD) -f -o $@ $^ 59 | 60 | doc-requirements: $(AUTODOCDIR) 61 | 62 | html: doc-requirements 63 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 64 | @echo 65 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 66 | 67 | dirhtml: doc-requirements 68 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 69 | @echo 70 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 71 | 72 | singlehtml: doc-requirements 73 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 74 | @echo 75 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 76 | 77 | pickle: doc-requirements 78 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 79 | @echo 80 | @echo "Build finished; now you can process the pickle files." 81 | 82 | json: doc-requirements 83 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 84 | @echo 85 | @echo "Build finished; now you can process the JSON files." 86 | 87 | htmlhelp: doc-requirements 88 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 89 | @echo 90 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 91 | ".hhp project file in $(BUILDDIR)/htmlhelp." 92 | 93 | qthelp: doc-requirements 94 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 95 | @echo 96 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 97 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 98 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/$(PROJECT).qhcp" 99 | @echo "To view the help file:" 100 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/$(PROJECT).qhc" 101 | 102 | devhelp: doc-requirements 103 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 104 | @echo 105 | @echo "Build finished." 106 | @echo "To view the help file:" 107 | @echo "# mkdir -p $HOME/.local/share/devhelp/$(PROJECT)" 108 | @echo "# ln -s $(BUILDDIR)/devhelp $HOME/.local/share/devhelp/$(PROJEC)" 109 | @echo "# devhelp" 110 | 111 | epub: doc-requirements 112 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 113 | @echo 114 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 115 | 116 | patch-latex: 117 | find _build/latex -iname "*.tex" | xargs -- \ 118 | sed -i'' 's~includegraphics{~includegraphics\[keepaspectratio,max size={\\textwidth}{\\textheight}\]{~g' 119 | 120 | latex: doc-requirements 121 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 122 | $(MAKE) patch-latex 123 | @echo 124 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 125 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 126 | "(use \`make latexpdf' here to do that automatically)." 127 | 128 | latexpdf: doc-requirements 129 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 130 | $(MAKE) patch-latex 131 | @echo "Running LaTeX files through pdflatex..." 132 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 133 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 134 | 135 | latexpdfja: doc-requirements 136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 137 | @echo "Running LaTeX files through platex and dvipdfmx..." 138 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 139 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 140 | 141 | text: doc-requirements 142 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 143 | @echo 144 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 145 | 146 | man: doc-requirements 147 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 148 | @echo 149 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 150 | 151 | texinfo: doc-requirements 152 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 153 | @echo 154 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 155 | @echo "Run \`make' in that directory to run these through makeinfo" \ 156 | "(use \`make info' here to do that automatically)." 157 | 158 | info: doc-requirements 159 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 160 | @echo "Running Texinfo files through makeinfo..." 161 | make -C $(BUILDDIR)/texinfo info 162 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 163 | 164 | gettext: doc-requirements 165 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 166 | @echo 167 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 168 | 169 | changes: doc-requirements 170 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 171 | @echo 172 | @echo "The overview file is in $(BUILDDIR)/changes." 173 | 174 | linkcheck: doc-requirements 175 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 176 | @echo 177 | @echo "Link check complete; look for any errors in the above output " \ 178 | "or in $(BUILDDIR)/linkcheck/output.txt." 179 | 180 | doctest: doc-requirements 181 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 182 | @echo "Testing of doctests in the sources finished, look at the " \ 183 | "results in $(BUILDDIR)/doctest/output.txt." 184 | 185 | xml: doc-requirements 186 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 187 | @echo 188 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 189 | 190 | pseudoxml: doc-requirements 191 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 192 | @echo 193 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 194 | -------------------------------------------------------------------------------- /docs/_static/.gitignore: -------------------------------------------------------------------------------- 1 | # Empty directory 2 | -------------------------------------------------------------------------------- /docs/_static/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | image/svg+xml 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. _authors: 2 | .. include:: ../AUTHORS.rst 3 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. _changes: 2 | .. include:: ../CHANGELOG.rst 3 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # 2 | # This file is execfile()d with the current directory set to its containing dir. 3 | # 4 | # Note that not all possible configuration values are present in this 5 | # autogenerated file. 6 | # 7 | # All configuration values have a default; values that are commented out 8 | # serve to show the default. 9 | 10 | import inspect 11 | import os 12 | import shutil 13 | import sys 14 | 15 | __location__ = os.path.join(os.getcwd(), os.path.dirname(inspect.getfile(inspect.currentframe()))) 16 | 17 | # If extensions (or modules to document with autodoc) are in another directory, 18 | # add these directories to sys.path here. If the directory is relative to the 19 | # documentation root, use os.path.abspath to make it absolute, like shown here. 20 | sys.path.insert(0, os.path.join(__location__, "../src")) 21 | 22 | # -- Run sphinx-apidoc ------------------------------------------------------ 23 | # This hack is necessary since RTD does not issue `sphinx-apidoc` before running 24 | # `sphinx-build -b html . _build/html`. See Issue: 25 | # https://github.com/rtfd/readthedocs.org/issues/1139 26 | # DON'T FORGET: Check the box "Install your project inside a virtualenv using 27 | # setup.py install" in the RTD Advanced Settings. 28 | # Additionally it helps us to avoid running apidoc manually 29 | 30 | try: # for Sphinx >= 1.7 31 | from sphinx.ext import apidoc 32 | except ImportError: 33 | from sphinx import apidoc 34 | 35 | output_dir = os.path.join(__location__, "api") 36 | module_dir = os.path.join(__location__, "../src/aleph_client") 37 | try: 38 | shutil.rmtree(output_dir) 39 | except FileNotFoundError: 40 | pass 41 | 42 | try: 43 | import sphinx 44 | from pkg_resources import parse_version 45 | 46 | cmd_line_template = "sphinx-apidoc -f -o {outputdir} {moduledir}" 47 | cmd_line = cmd_line_template.format(outputdir=output_dir, moduledir=module_dir) 48 | 49 | args = cmd_line.split(" ") 50 | if parse_version(sphinx.__version__) >= parse_version("1.7"): 51 | args = args[1:] 52 | 53 | apidoc.main(args) 54 | except Exception: 55 | pass 56 | 57 | # -- General configuration ----------------------------------------------------- 58 | 59 | # If your documentation needs a minimal Sphinx version, state it here. 60 | # needs_sphinx = '1.0' 61 | 62 | # Add any Sphinx extension module names here, as strings. They can be extensions 63 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 64 | extensions = [ 65 | "sphinx.ext.autodoc", 66 | "sphinx.ext.intersphinx", 67 | "sphinx.ext.todo", 68 | "sphinx.ext.autosummary", 69 | "sphinx.ext.viewcode", 70 | "sphinx.ext.coverage", 71 | "sphinx.ext.doctest", 72 | "sphinx.ext.ifconfig", 73 | "sphinx.ext.mathjax", 74 | "sphinx.ext.napoleon", 75 | "sphinxcontrib.plantuml", 76 | ] 77 | 78 | # plantuml = 'java -jar docs/plantuml.jar' 79 | 80 | # Add any paths that contain templates here, relative to this directory. 81 | templates_path = ["_templates"] 82 | 83 | # The suffix of source filenames. 84 | source_suffix = ".rst" 85 | 86 | # The encoding of source files. 87 | # source_encoding = 'utf-8-sig' 88 | 89 | # The master toctree document. 90 | master_doc = "index" 91 | 92 | # General information about the project. 93 | project = "aleph-client" 94 | copyright = "2019, Aleph.im" 95 | 96 | # The version info for the project you're documenting, acts as replacement for 97 | # |version| and |release|, also used in various other places throughout the 98 | # built documents. 99 | # 100 | # The short X.Y version. 101 | version = "" # Is set by calling `setup.py docs` 102 | # The full version, including alpha/beta/rc tags. 103 | release = "" # Is set by calling `setup.py docs` 104 | 105 | # The language for content autogenerated by Sphinx. Refer to documentation 106 | # for a list of supported languages. 107 | # language = None 108 | 109 | # There are two options for replacing |today|: either, you set today to some 110 | # non-false value, then it is used: 111 | # today = '' 112 | # Else, today_fmt is used as the format for a strftime call. 113 | # today_fmt = '%B %d, %Y' 114 | 115 | # List of patterns, relative to source directory, that match files and 116 | # directories to ignore when looking for source files. 117 | exclude_patterns = ["_build"] 118 | 119 | # The reST default role (used for this markup: `text`) to use for all documents. 120 | # default_role = None 121 | 122 | # If true, '()' will be appended to :func: etc. cross-reference text. 123 | # add_function_parentheses = True 124 | 125 | # If true, the current module name will be prepended to all description 126 | # unit titles (such as .. function::). 127 | # add_module_names = True 128 | 129 | # If true, sectionauthor and moduleauthor directives will be shown in the 130 | # output. They are ignored by default. 131 | # show_authors = False 132 | 133 | # The name of the Pygments (syntax highlighting) style to use. 134 | pygments_style = "sphinx" 135 | 136 | # A list of ignored prefixes for module index sorting. 137 | # modindex_common_prefix = [] 138 | 139 | # If true, keep warnings as "system message" paragraphs in the built documents. 140 | # keep_warnings = False 141 | 142 | 143 | # -- Options for HTML output --------------------------------------------------- 144 | 145 | # The theme to use for HTML and HTML Help pages. See the documentation for 146 | # a list of builtin themes. 147 | html_theme = "alabaster" 148 | 149 | # Theme options are theme-specific and customize the look and feel of a theme 150 | # further. For a list of options available for each theme, see the 151 | # documentation. 152 | html_theme_options = { 153 | #'sidebar_width': '300px', 154 | #'page_width': '1200px' 155 | "logo": "logo.svg", 156 | "description": "Aleph Network Client", 157 | "github_user": "aleph-im", 158 | "github_repo": "aleph-client", 159 | "github_button": True, 160 | "github_type": "star", 161 | "github_banner": True, 162 | "travis_button": True, 163 | "codecov_button": True, 164 | "pre_bg": "#FFF6E5", 165 | "note_bg": "#E5ECD1", 166 | "note_border": "#BFCF8C", 167 | "body_text": "#482C0A", 168 | "sidebar_text": "#49443E", 169 | "sidebar_header": "#4B4032", 170 | "sidebar_collapse": False, 171 | } 172 | 173 | # Add any paths that contain custom themes here, relative to this directory. 174 | # html_theme_path = [] 175 | 176 | # The name for this set of Sphinx documents. If None, it defaults to 177 | # " v documentation". 178 | try: 179 | from aleph_client import __version__ as version 180 | except ImportError: 181 | pass 182 | else: 183 | release = version 184 | 185 | # A shorter title for the navigation bar. Default is the same as html_title. 186 | # html_short_title = None 187 | 188 | # The name of an image file (relative to this directory) to place at the top 189 | # of the sidebar. 190 | html_logo = "logo.svg" 191 | 192 | # The name of an image file (within the static path) to use as favicon of the 193 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 194 | # pixels large. 195 | # html_favicon = None 196 | 197 | # Add any paths that contain custom static files (such as style sheets) here, 198 | # relative to this directory. They are copied after the builtin static files, 199 | # so a file named "default.css" will overwrite the builtin "default.css". 200 | html_static_path = ["_static"] 201 | 202 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 203 | # using the given strftime format. 204 | # html_last_updated_fmt = '%b %d, %Y' 205 | 206 | # If true, SmartyPants will be used to convert quotes and dashes to 207 | # typographically correct entities. 208 | # html_use_smartypants = True 209 | 210 | # Custom sidebar templates, maps document names to template names. 211 | # html_sidebars = {} 212 | 213 | # Additional templates that should be rendered to pages, maps page names to 214 | # template names. 215 | # html_additional_pages = {} 216 | 217 | # If false, no module index is generated. 218 | # html_domain_indices = True 219 | 220 | # If false, no index is generated. 221 | # html_use_index = True 222 | 223 | # If true, the index is split into individual pages for each letter. 224 | # html_split_index = False 225 | 226 | # If true, links to the reST sources are added to the pages. 227 | # html_show_sourcelink = True 228 | 229 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 230 | # html_show_sphinx = True 231 | 232 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 233 | # html_show_copyright = True 234 | 235 | # If true, an OpenSearch description file will be output, and all pages will 236 | # contain a tag referring to it. The value of this option must be the 237 | # base URL from which the finished HTML is served. 238 | # html_use_opensearch = '' 239 | 240 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 241 | # html_file_suffix = None 242 | 243 | # Output file base name for HTML help builder. 244 | htmlhelp_basename = "aleph_client-doc" 245 | 246 | 247 | # -- Options for LaTeX output -------------------------------------------------- 248 | 249 | latex_elements = { 250 | # The paper size ('letterpaper' or 'a4paper'). 251 | # 'papersize': 'letterpaper', 252 | # The font size ('10pt', '11pt' or '12pt'). 253 | # 'pointsize': '10pt', 254 | # Additional stuff for the LaTeX preamble. 255 | # 'preamble': '', 256 | } 257 | 258 | # Grouping the document tree into LaTeX files. List of tuples 259 | # (source start file, target name, title, author, documentclass [howto/manual]). 260 | latex_documents = [ 261 | ("index", "user_guide.tex", "aleph-client Documentation", "Aleph.im", "manual"), 262 | ] 263 | 264 | # The name of an image file (relative to this directory) to place at the top of 265 | # the title page. 266 | # latex_logo = "" 267 | 268 | # For "manual" documents, if this is true, then toplevel headings are parts, 269 | # not chapters. 270 | # latex_use_parts = False 271 | 272 | # If true, show page references after internal links. 273 | # latex_show_pagerefs = False 274 | 275 | # If true, show URL addresses after external links. 276 | # latex_show_urls = False 277 | 278 | # Documents to append as an appendix to all manuals. 279 | # latex_appendices = [] 280 | 281 | # If false, no module index is generated. 282 | # latex_domain_indices = True 283 | 284 | # -- External mapping ------------------------------------------------------------ 285 | python_version = ".".join(map(str, sys.version_info[0:2])) 286 | intersphinx_mapping = { 287 | "sphinx": ("http://www.sphinx-doc.org/en/stable", None), 288 | "python": ("https://docs.python.org/" + python_version, None), 289 | "matplotlib": ("https://matplotlib.org", None), 290 | "numpy": ("https://docs.scipy.org/doc/numpy", None), 291 | "sklearn": ("http://scikit-learn.org/stable", None), 292 | "pandas": ("http://pandas.pydata.org/pandas-docs/stable", None), 293 | "scipy": ("https://docs.scipy.org/doc/scipy/reference", None), 294 | } 295 | -------------------------------------------------------------------------------- /docs/content/account.rst: -------------------------------------------------------------------------------- 1 | Accounts 2 | ======== 3 | 4 | To send data to the aleph.im network, you need to have an account. 5 | This account can be made using any of the supported providers. 6 | 7 | Common 8 | ------ 9 | 10 | You will need to instanciate an account using a private key accepted by the 11 | corresponding account provider. 12 | 13 | If you don't want to handle the private key yourself, you can use the 14 | "fallback" helper. This searches for a "device.key" file in the current folder. 15 | If this file isn't found, it will try to create a new key file with a random 16 | key. 17 | 18 | Ethereum 19 | ******** 20 | 21 | Example using Ethereum: 22 | 23 | .. code-block:: python3 24 | 25 | from aleph.sdk.chains.ethereum import get_fallback_account 26 | 27 | account = get_fallback_account() 28 | 29 | Another example setting the private key manually: 30 | 31 | .. code-block:: python3 32 | 33 | from aleph.sdk.chains.ethereum import ETHAccount 34 | 35 | prv = bytes.fromhex("xxxxxx") 36 | 37 | account = ETHAccount(prv) 38 | 39 | Depending on account provider, the key can be passed as an hex string. 40 | It's the case for Ethereum: 41 | 42 | .. code-block:: python3 43 | 44 | >>> from aleph.sdk.chains.ethereum import ETHAccount 45 | >>> account = ETHAccount("0x0000000000000000000000000000000000000000000000000000000000000001") 46 | >>> account.get_address() 47 | '0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf' 48 | 49 | .. WARNING:: 50 | Do not use this dummy private key, it's just an example! 51 | 52 | Polkadot / Substrate 53 | ******************** 54 | 55 | DOT/Substrate accounts are a bit different. You pass them mnemonics, and optionally an address_type. 56 | 57 | Example using Substrate (if you already used a fallback on ethereum or nuls, you might consider deleting the private key file): 58 | 59 | .. code-block:: python3 60 | 61 | from aleph.sdk.chains.substrate import get_fallback_account 62 | 63 | account = get_fallback_account() 64 | 65 | Another example setting the mnemonics manually: 66 | 67 | .. code-block:: python3 68 | 69 | from aleph.sdk.chains.substrate import DOTAccount 70 | 71 | account = DOTAccount("payment shy team bargain chest fold bless artwork identify breeze pelican category") 72 | 73 | .. WARNING:: 74 | Do not use this dummy private key, it's just an example! 75 | 76 | You can also change the address_type (0 for polkadot, 2 for canary, 42 generic...). 77 | 78 | .. code-block:: python3 79 | 80 | >>> from aleph.sdk.chains.substrate import DOTAccount 81 | >>> account = DOTAccount("payment shy team bargain chest fold bless artwork identify breeze pelican category") 82 | >>> account.get_address() 83 | '5CGNMKCscqN2QNcT7Jtuz23ab7JUxh8wTEtXhECZLJn5vCGX' 84 | >>> account = DOTAccount("payment shy team bargain chest fold bless artwork identify breeze pelican category", 85 | ... address_type=0) 86 | >>> account.get_address() 87 | '1CfVeTwUcdVqucy4wwv8AsjSjJ8ezh5Xjd1rXButPoc6WJY' 88 | 89 | .. WARNING:: 90 | Do not use this dummy private key, it's just an example! 91 | 92 | NULS 93 | **** 94 | 95 | The NULS provider is very similar. 96 | 97 | Fallback account: 98 | 99 | .. code-block:: python3 100 | 101 | from aleph.sdk.chains.nuls2 import get_fallback_account 102 | 103 | account = get_fallback_account() 104 | 105 | From a private key: 106 | 107 | .. code-block:: python3 108 | 109 | >>> from aleph.sdk.chains.nuls2 import NULSAccount 110 | >>> account = NULSAccount( 111 | ... bytes.fromhex( 112 | ... "0000000000000000000000000000000000000000000000000000000000000001")) 113 | >>> account.get_address() 114 | 'NULSd6Hgb53vAd7ZMoA2E17DUTT4C1nGrJVpn' -------------------------------------------------------------------------------- /docs/content/aggregates.rst: -------------------------------------------------------------------------------- 1 | .. _aggregates: 2 | 3 | ========== 4 | Aggregates 5 | ========== 6 | 7 | Aggregates are a key-value store specific to an account. 8 | Each time a new aggregate message is received for a specific account, the 9 | nodes update the aggregate for this account. 10 | 11 | Like a dictionary update, if a key already exists, it is updated, 12 | otherwise it is created. 13 | 14 | Query aggregate of an account 15 | ----------------------------- 16 | 17 | To query keys from an account aggregate, you need to call the 18 | fetch_aggregate function on the client. 19 | 20 | Since version 0.8.0, only the asynchronous methods are available. 21 | To use them in the Python REPL, simply start it with: 22 | 23 | .. code-block:: bash 24 | 25 | $ python3 -m asyncio 26 | 27 | Then you can use the asynchronous methods: 28 | 29 | .. code-block:: python3 30 | 31 | >>> from aleph.sdk.client import AlephHttpClient 32 | >>> async with AlephHttpClient() as client: 33 | ... await client.fetch_aggregate( 34 | ... "0x06DE0C46884EbFF46558Cd1a9e7DA6B1c3E9D0a8", 35 | ... "profile", 36 | ... ) 37 | {"bio": "tester", "name": "Moshe on Ethereum"} 38 | 39 | 40 | Mutate aggregate 41 | ---------------- 42 | 43 | To mutate an aggregate you need to call the create_aggregate function (it will 44 | create an AGGREGATE type message for you and submit it). 45 | 46 | You need a valid account and instantiate an authenticated client: 47 | 48 | .. code-block:: python3 49 | 50 | >>> from aleph.sdk.chains.ethereum import get_fallback_account 51 | >>> from aleph.sdk.client import AuthenticatedAlephHttpClient 52 | >>> account = get_fallback_account() 53 | >>> async with AuthenticatedAlephHttpClient(account) as client: 54 | ... message, status = await client.create_aggregate( 55 | ... "profile", 56 | ... {"bio": "tester", "name": "Moshe on Ethereum"}, 57 | ... ) 58 | >>> message.content 59 | { 60 | 'key': 'profile', 61 | 'content': {'bio': 'tester', 'name': 'Moshe on Ethereum'}, 62 | 'address': '0x...', 63 | 'time': 1689081614.4252806, 64 | } 65 | 66 | 67 | Delegate write access to an aggregate key 68 | ----------------------------------------- 69 | 70 | If you want to set an aggregate on another address than the one of your 71 | account, this address should have something similar to this in its 72 | "security" key: 73 | 74 | .. code-block:: python3 75 | 76 | >>> async with AuthenticatedAlephHttpClient(account) as client: 77 | >>> await client.fetch_aggregate('YOUR_ADDRESS', 'security') 78 | {'authorizations': [ 79 | { 80 | 'address': 'TARGET_ADDRESS', 81 | 'types': ['AGGREGATE] 82 | 'aggregate_keys': ['testkey'] 83 | } 84 | ]} 85 | 86 | The owner of TARGET_ADDRESS can then set content of the "testkey" key of 87 | YOUR_ADDRESS's aggregate: 88 | 89 | .. code-block:: python3 90 | 91 | >>> async with AuthenticatedAlephHttpClient(account) as client: 92 | ... # Assuming 'account' is TARGET_ADDRESS 93 | ... message, status = await client.create_aggregate( 94 | ... "testkey", 95 | ... {"access": "alien"}, 96 | ... address="YOUR_ADDRESS", 97 | ... ) 98 | >>> message.content 99 | { 100 | 'key': 'testkey', 101 | 'content': {"access": "alien"}, 102 | 'address': 'TARGET_ADDRESS', 103 | 'time': 1689081614.4252806, 104 | } 105 | 106 | 107 | .. note:: 108 | 109 | For more information on the authorizations model, see 110 | `this pyaleph doc 111 | `_. 112 | -------------------------------------------------------------------------------- /docs/content/async_notes.rst: -------------------------------------------------------------------------------- 1 | ============= 2 | Async vs Sync 3 | ============= 4 | 5 | At aleph.im we really like coding using asyncio, 6 | using async/await construct on Python 3. 7 | 8 | That being said, we totally understand that you might not 9 | have the same opinion, or that you might not be in a position 10 | to use it. 11 | 12 | For this reason, all the functions have an async version 13 | and a sync version. The sync version are actually 14 | calling the async code behind your back (sneaky!) so you might 15 | be careful if you are calling it in an environment where you 16 | already have an asyncio loop used. 17 | 18 | Most chain specific code is synchronous, and core aleph.im interaction 19 | might by async. 20 | 21 | Sync code have to be imported from :py:mod:`aleph_client.synchronous`, 22 | async code from :py:mod:`aleph_client.asynchronous`, with 23 | same functions names. 24 | 25 | aiohttp session 26 | --------------- 27 | 28 | Most of the rest interface interaction code is based on aiohttp. 29 | For simplicity sake, if there isn't a passed aiohttp session, 30 | the async functions needing it will instanciate one as a singleton 31 | and reuse it. 32 | 33 | There is a lot of use cases where you might prefer to use your own version 34 | instead. Most functions will allow you to do so, by passing a session arg. 35 | 36 | Example: 37 | 38 | .. code-block:: python3 39 | 40 | >>> import aiohttp 41 | >>> from aleph_client.asynchronous import fetch_aggregate 42 | >>> async with aiohttp.ClientSession() as session: 43 | ... await fetch_aggregate( 44 | ... "0x06DE0C46884EbFF46558Cd1a9e7DA6B1c3E9D0a8", 45 | ... "profile", session=session) 46 | ... 47 | {"bio": "tester", "name": "Moshe on Ethereum"} 48 | 49 | -------------------------------------------------------------------------------- /docs/content/cli.rst: -------------------------------------------------------------------------------- 1 | .. _posts: 2 | 3 | ======== 4 | Command-line Interface 5 | ======== 6 | 7 | Aleph-client can be used as a command-line interface to some Aleph.im 8 | functionalities. 9 | 10 | The following commands are available: 11 | 12 | Post 13 | ---- 14 | 15 | Post a message on Aleph.im. 16 | 17 | The content must be JSON encoded and is obtained either from a file 18 | or from a user prompt. 19 | 20 | .. code-block:: bash 21 | 22 | python3 -m aleph_client post [OPTIONS] 23 | 24 | Post a message on Aleph.im. 25 | 26 | Options: 27 | --path TEXT 28 | --type TEXT [default: test] 29 | --channel TEXT [default: TEST] 30 | --private-key TEXT 31 | --private-key-file TEXT 32 | --help Show this message and exit. 33 | 34 | 35 | Upload 36 | ------ 37 | 38 | Upload and store a file on Aleph.im. 39 | 40 | .. code-block:: bash 41 | 42 | python3 -m aleph_client upload [OPTIONS] PATH 43 | 44 | Upload and store a file on Aleph.im. 45 | 46 | Arguments: 47 | PATH [required] 48 | 49 | Options: 50 | --channel TEXT [default: TEST] 51 | --private-key TEXT 52 | --private-key-file TEXT 53 | --help Show this message and exit. 54 | 55 | Pin 56 | --- 57 | 58 | Persist a file from IPFS on Aleph.im. 59 | 60 | .. code-block:: bash 61 | 62 | python3 -m aleph_client pin [OPTIONS] HASH 63 | 64 | Persist a file from IPFS on Aleph.im. 65 | 66 | Arguments: 67 | HASH [required] 68 | 69 | Options: 70 | --channel TEXT [default: TEST] 71 | --private-key TEXT 72 | --private-key-file TEXT 73 | --help Show this message and exit. 74 | 75 | Program 76 | ------- 77 | 78 | Register a program to run on Aleph.im virtual machines from a zip archive. 79 | 80 | .. code-block:: bash 81 | 82 | python3 -m aleph_client program [OPTIONS] PATH ENTRYPOINT 83 | 84 | Register a program to run on Aleph.im virtual machines from a zip archive. 85 | 86 | Arguments: 87 | PATH [required] 88 | ENTRYPOINT [required] 89 | 90 | Options: 91 | --channel TEXT [default: TEST] 92 | --private-key TEXT 93 | --private-key-file TEXT 94 | --help Show this message and exit. 95 | -------------------------------------------------------------------------------- /docs/content/introduction.rst: -------------------------------------------------------------------------------- 1 | Introduction to Aleph.im 2 | ======================== 3 | 4 | The Aleph.im network can be accessed from any API server. 5 | To run one yourself, you will need to install 6 | `PyAleph `_. 7 | 8 | 9 | Data retrieval 10 | -------------- 11 | 12 | Data retrieval is simple, using ReST APIs on any API server. 13 | There is a few helpers available in this library (depending on the requested 14 | data type). 15 | 16 | 17 | Data structures 18 | --------------- 19 | 20 | All data transferred over the aleph.im network are aleph messages. 21 | 22 | .. uml:: 23 | 24 | @startuml 25 | entity Message { 26 | .. Message info .. 27 | *type : text 28 | one of: POST, AGGREGATE, STORE 29 | *channel : text 30 | (channel of the message, one application ideally has one channel) 31 | *time : timestamp 32 | .. Sender info .. 33 | *sender : text <
> 34 | *chain : text 35 | (chain of sender: NULS, NULS2, ETH, BNB...) 36 | -- Content -- 37 | *item_hash <> 38 | if IPFS: multihash of json serialization of content 39 | if internal storage: hash of the content (sha256 only for now) 40 | if inline: hash of item_content using hash_type (sha256 only for now) 41 | *item_content : text <> 42 | mandatory if of inline type, json serialization of the message 43 | #item_type : text (optional) 44 | one of: 'ipfs', 'inline', 'storage'. 45 | default: 'ipfs' if no item_content and hash length 56, 46 | 'storage' if length 64, 'inline' if there is an item_content. 47 | #hash_type : text (optional) 48 | default: sha256 (only supported value for now) 49 | } 50 | 51 | hide circle 52 | @enduml 53 | 54 | Actual content sent by regular users can currently be of two types: 55 | 56 | - AGGREGATE: a key-value storage specific to an address 57 | - POST: unique data posts (unique data points, events 58 | 59 | .. uml:: 60 | 61 | @startuml 62 | object Message { 63 | ... 64 | } 65 | 66 | object Aggregate <> { 67 | key : text 68 | address : text <
> 69 | ~ content : object 70 | time : timestamp 71 | } 72 | 73 | object Post <> { 74 | type : text 75 | address : text <
> 76 | ~ content : object 77 | time : timestamp 78 | } 79 | 80 | object Store <> { 81 | address : text <
> 82 | item_type : same than Message.item_type 83 | (note: does not support inline) 84 | item_hash : same than Message.item_hash 85 | time : timestamp 86 | } 87 | 88 | 89 | Message ||--o| Aggregate 90 | Message ||--o| Post 91 | Message ||--o| Store 92 | @enduml 93 | -------------------------------------------------------------------------------- /docs/content/posts.rst: -------------------------------------------------------------------------------- 1 | .. _posts: 2 | 3 | ===== 4 | Posts 5 | ===== 6 | 7 | Posts are unique data entries, that can be amended later on. 8 | Example of use: 9 | 10 | - Events 11 | - Blog posts 12 | - Comments 13 | - and many more... 14 | 15 | Getting posts 16 | ------------- 17 | 18 | To get posts you have two options, either use the get_posts function, and get 19 | the posts in their amended state, or use get_message and only get the unique POST 20 | messages (with their content obviously). 21 | 22 | Since version 0.8.0, get_posts uses a PostFilter object to specify the filters: 23 | 24 | .. code-block:: python3 25 | 26 | >>> from aleph.sdk.chains.sol import get_fallback_account 27 | >>> from aleph.sdk.client import AuthenticatedAlephHttpClient 28 | >>> from aleph.sdk.posts import PostFilter 29 | >>> account = get_fallback_account() 30 | >>> async with AuthenticatedAlephHttpClient(account) as client: 31 | ... posts, status = await client.get_posts( 32 | ... post_filter=PostFilter(channel='MY_CHANNEL') 33 | ... ) 34 | 35 | 36 | Creating a Post 37 | --------------- 38 | 39 | Creating a post means creating a post object and wrapping it in a message. 40 | There is an helper for that: create_post. 41 | 42 | .. code-block:: python3 43 | 44 | >>> from aleph.sdk.chains.sol import get_fallback_account 45 | >>> from aleph.sdk.client import AuthenticatedAlephHttpClient 46 | >>> account = get_fallback_account() 47 | >>> async with AuthenticatedAlephHttpClient(account) as client: 48 | ... post, status = await client.create_post({'content': 'test'}, post_type='testtype', channel='MY_CHANNEL') 49 | >>> message 50 | { 51 | 'chain': 'SOL', 52 | 'channel': 'MY_CHANNEL', 53 | 'sender': '21hKNCB7xmDZ1pgteuJPbhKN1aDvsvPJRJ5Q95G5gyCW', 54 | 'type': 'POST', 55 | 'time': '2023-07-11T13:20:14.604485+00:00', 56 | 'item_content': '{"type":"testtype","address":"21hKNCB7xmDZ1pgteuJPbhKN1aDvsvPJRJ5Q95G5gyCW","content":{"content":"test"},"time":1573570575.2818618}', 57 | 'content': { 58 | 'type': 'testtype', 59 | 'address': '21hKNCB7xmDZ1pgteuJPbhKN1aDvsvPJRJ5Q95G5gyCW', 60 | 'content': { 61 | 'content': 'test' 62 | }, 63 | 'time': 1573570575.2818618 64 | }, 65 | 'item_hash': '02afdbf33ff2c6ddb46349298a4598a8801cec61dbaa8f3a17ba9d1ad6dd8cb1', 66 | 'signature': 'G7yJjMCPgvX04Dd9rsz0oEuuRFa4PfuKAMOPA3Oblf6vd5YA1x15jvWLL2WycnnzYLEl0usjTiVxBl530ZOmYgw=' 67 | } 68 | 69 | Amending a Post 70 | --------------- 71 | 72 | Amending is as simple as creating a new post, but with two differences: 73 | 74 | - The post_type must be 'amend' 75 | - When calling create_post, you must pass the hash of the post you want to amend as 'ref' 76 | 77 | Example: 78 | 79 | .. code-block:: python3 80 | 81 | >>> from aleph.sdk.chains.sol import get_fallback_account 82 | >>> from aleph.sdk.client import AuthenticatedAlephHttpClient 83 | >>> account = get_fallback_account() 84 | >>> async with AuthenticatedAlephHttpClient(account) as client: 85 | ... post, status = await client.create_post({'content': 'test2'}, post_type='amend', ref='02afdbf33ff2c6ddb46349298a4598a8801cec61dbaa8f3a17ba9d1ad6dd8cb1', channel='MY_CHANNEL') 86 | >>> message 87 | { 88 | 'chain': 'SOL', 89 | 'channel': 'MY_CHANNEL', 90 | 'sender': '21hKNCB7xmDZ1pgteuJPbhKN1aDvsvPJRJ5Q95G5gyCW', 91 | 'type': 'POST', 92 | 'time': '2023-07-11T13:20:14.604485+00:00', 93 | 'item_content': '{"type":"amend","address":"21hKNCB7xmDZ1pgteuJPbhKN1aDvsvPJRJ5Q95G5gyCW","content":{"content":"test2"},"time":1573570575.2818618,"ref":"02afdbf33ff2c6ddb46349298a4598a8801cec61dbaa8f3a17ba9d1ad6dd8cb1"}', 94 | 'content': { 95 | 'type': 'amend', 96 | 'address': '21hKNCB7xmDZ1pgteuJPbhKN1aDvsvPJRJ5Q95G5gyCW', 97 | 'content': { 98 | 'content': 'test2' 99 | }, 100 | 'time': 1573570575.2818618, 101 | 'ref': '02afdbf33ff2c6ddb46349298a4598a8801cec61dbaa8f3a17ba9d1ad6dd8cb1' 102 | }, 103 | 'item_hash': '02afdbf33ff2c6ddb46349298a4598a8801cec61dbaa8f3a17ba9d1ad6dd8cb1', 104 | 'signature': 'G7yJjMCPgvX04Dd9rsz0oEuuRFa4PfuKAMOPA3Oblf6vd5YA1x15jvWLL2WycnnzYLEl0usjTiVxBl530ZOmYgw=' 105 | } 106 | 107 | 108 | .. note:: 109 | 110 | More information on posts and messages in general can be found in the 111 | `pyaleph docs 112 | `_. 113 | -------------------------------------------------------------------------------- /docs/content/programs.rst: -------------------------------------------------------------------------------- 1 | .. _posts: 2 | 3 | ======== 4 | Programs 5 | ======== 6 | 7 | Programs are special entries that define code to run on Aleph.im virtual machines. 8 | 9 | Aleph.im currently supports programs written in Python that follow the 10 | `ASGI interface `_. 11 | 12 | In practice, the easiest approach is to use an 13 | `ASGI compatible web framework `_, 14 | such as 15 | `FastAPI `_ or 16 | `Django `_. 17 | 18 | Creating a program 19 | ------------------ 20 | 21 | Follow the `FastAPI Tutorial `_ 22 | to create your first program and test it using uvicorn. 23 | 24 | Running on Aleph.im 25 | ------------------- 26 | 27 | Use the :ref:`cli` to upload your program. 28 | 29 | In this example, we will upload the 30 | `example_fastapi_2 example from Aleph-VM 31 | `_. 32 | 33 | .. code-block:: bash 34 | 35 | python3 -m aleph_client program /tmp/aleph-vm/examples/example_fastapi_2 __init__:app 36 | 37 | The command will output two URLs: 38 | 39 | - A URL link to see the message definition of your program 40 | - A URL to run your program 41 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | aleph-client 3 | ============ 4 | 5 | This is the documentation of **aleph-client**. 6 | 7 | Contents 8 | ======== 9 | 10 | .. toctree:: 11 | :maxdepth: 3 12 | 13 | content/introduction 14 | content/account 15 | content/aggregates 16 | content/posts 17 | content/programs 18 | content/cli 19 | content/async_notes 20 | 21 | License 22 | Authors 23 | Changelog 24 | Module Reference 25 | 26 | 27 | Indices and tables 28 | ================== 29 | 30 | * :ref:`genindex` 31 | * :ref:`modindex` 32 | * :ref:`search` 33 | 34 | .. _toctree: http://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html 35 | .. _reStructuredText: http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html 36 | .. _references: http://www.sphinx-doc.org/en/stable/markup/inline.html 37 | .. _Python domain syntax: http://sphinx-doc.org/domains.html#the-python-domain 38 | .. _Sphinx: http://www.sphinx-doc.org/ 39 | .. _Python: http://docs.python.org/ 40 | .. _Numpy: http://docs.scipy.org/doc/numpy 41 | .. _SciPy: http://docs.scipy.org/doc/scipy/reference/ 42 | .. _matplotlib: https://matplotlib.org/contents.html# 43 | .. _Pandas: http://pandas.pydata.org/pandas-docs/stable 44 | .. _Scikit-Learn: http://scikit-learn.org/stable 45 | .. _autodoc: http://www.sphinx-doc.org/en/stable/ext/autodoc.html 46 | .. _Google style: https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings 47 | .. _NumPy style: https://numpydoc.readthedocs.io/en/latest/format.html 48 | .. _classical style: http://www.sphinx-doc.org/en/stable/domains.html#info-field-lists 49 | -------------------------------------------------------------------------------- /docs/license.rst: -------------------------------------------------------------------------------- 1 | .. _license: 2 | 3 | ======= 4 | License 5 | ======= 6 | 7 | .. include:: ../LICENSE.txt 8 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "hatchling.build" 3 | 4 | requires = [ "hatch-vcs", "hatchling" ] 5 | 6 | [project] 7 | name = "aleph-client" 8 | description = "Python Client library for the Aleph.im network" 9 | readme = "README.md" 10 | keywords = [ "Aleph.im", "Client", "Library", "Python" ] 11 | license = { file = "LICENSE.txt" } 12 | authors = [ 13 | { name = "Aleph.im Team", email = "hello@aleph.im" }, 14 | ] 15 | requires-python = ">=3.9" 16 | classifiers = [ 17 | "Development Status :: 4 - Beta", 18 | "Environment :: Console", 19 | "Framework :: aiohttp", 20 | "License :: OSI Approved :: MIT License", 21 | "Programming Language :: Python :: 3 :: Only", 22 | "Programming Language :: Python :: 3.9", 23 | "Programming Language :: Python :: 3.10", 24 | "Programming Language :: Python :: 3.11", 25 | "Programming Language :: Python :: 3.12", 26 | "Topic :: System :: Distributed Computing", 27 | ] 28 | 29 | dynamic = [ "version" ] 30 | dependencies = [ 31 | "aiodns==3.2", 32 | "aiohttp==3.11.13", 33 | "aleph-message>=1.0.1", 34 | "aleph-sdk-python @ git+https://github.com/aleph-im/aleph-sdk-python@andres-feature-implement_unichain_network", 35 | "base58==2.1.1", # Needed now as default with _load_account changement 36 | "click<8.2", 37 | "py-sr25519-bindings==0.2", # Needed for DOT signatures 38 | "pydantic>=2", 39 | "pygments==2.19.1", 40 | "pynacl==1.5", # Needed now as default with _load_account changement 41 | "python-magic==0.4.27", 42 | "rich==13.9.*", 43 | "setuptools>=65.5", 44 | "substrate-interface==1.7.11", # Needed for DOT signatures 45 | "textual==0.73", 46 | "typer==0.15.2", 47 | ] 48 | optional-dependencies.cosmos = [ "cosmospy==6" ] 49 | optional-dependencies.docs = [ "sphinxcontrib-plantuml==0.30" ] 50 | optional-dependencies.ethereum = [ "eth-account>=0.4" ] 51 | optional-dependencies.nuls2 = [ "aleph-nuls2==0.1" ] 52 | optional-dependencies.polkadot = [ "substrate-interface==1.7.11" ] 53 | optional-dependencies.solana = [ "base58==2.1.1", "pynacl==1.5" ] 54 | optional-dependencies.tezos = [ "aleph-pytezos==3.13.4", "pynacl==1.5" ] 55 | urls.Discussions = "https://community.aleph.im/" 56 | urls.Documentation = "https://docs.aleph.im/tools/aleph-client/" 57 | urls.Issues = "https://github.com/aleph-im/aleph-client/issues" 58 | urls.Source = "https://github.com/aleph-im/aleph-client" 59 | scripts.aleph = "aleph_client.__main__:app" 60 | readme-content-type = "text/x-rst; charset=UTF-8" 61 | 62 | [tool.hatch.metadata] 63 | allow-direct-references = true 64 | 65 | [tool.hatch.build.targets.sdist] 66 | include = [ 67 | "src/aleph_client", 68 | ] 69 | 70 | [tool.hatch.build.targets.wheel] 71 | packages = [ "src/aleph_client" ] 72 | 73 | [tool.hatch.version] 74 | source = "vcs" 75 | 76 | [tool.hatch.envs.default] 77 | platforms = [ "linux", "macos" ] 78 | dependencies = [ 79 | "setuptools>=65.5.0", 80 | "pytest==8.2.2", 81 | "pytest-asyncio==0.23.7", 82 | "pytest-cov==5.0.0", 83 | "mypy==1.10.0", 84 | "base58==2.1.1", 85 | "fastapi==0.100.0", 86 | "httpx==0.27.0", 87 | "types-requests==2.32.0.20240602", 88 | "types-setuptools==70.0.0.20240524", 89 | "typing_extensions==4.12.2", 90 | "sphinxcontrib-plantuml==0.30", 91 | ] 92 | 93 | [tool.hatch.envs.testing] 94 | type = "virtual" 95 | dependencies = [ 96 | "pytest==8.2.2", 97 | "pytest-asyncio==0.23.7", 98 | "pytest-cov==5.0.0", 99 | "mypy==1.10.0", 100 | "base58==2.1.1", 101 | "fastapi==0.100.0", 102 | "httpx==0.27.0", 103 | ] 104 | [tool.hatch.envs.testing.scripts] 105 | test = "pytest {args:} ./src/aleph_client/ ./tests/" 106 | test-cov = "pytest --cov {args:} ./src/aleph_client/ ./tests/ --cov-report=xml --cov-report=term ./tests/" 107 | cov-report = [ 108 | "coverage report", 109 | ] 110 | cov = [ 111 | "test-cov", 112 | "cov-report", 113 | ] 114 | 115 | [[tool.hatch.envs.all.matrix]] 116 | python = [ "3.9", "3.10", "3.11", "3.12" ] 117 | 118 | [tool.hatch.envs.linting] 119 | dependencies = [ 120 | "black==24.4.2", 121 | "mypy==1.10.0", 122 | "ruff==0.9.*", 123 | "isort==5.13.2", 124 | "yamlfix==1.17.0", 125 | "pyproject-fmt==2.2.1", 126 | 127 | "types-requests==2.32.0.20240602", 128 | "types-setuptools==70.0.0.20240524", 129 | "typing_extensions==4.12.2", 130 | ] 131 | [tool.hatch.envs.linting.scripts] 132 | typing = "mypy --config-file=pyproject.toml {args:} ./src/ ./tests/" 133 | style = [ 134 | "ruff check {args:}", 135 | "black --check --diff {args:} ./src/ ./tests/", 136 | "isort --check-only --profile black {args:} ./src/ ./tests/", 137 | "yamlfix --check .", 138 | "pyproject-fmt --check pyproject.toml", 139 | ] 140 | fmt = [ 141 | "black {args:} ./src/ ./tests/", 142 | "ruff check --fix {args:}", 143 | "isort --profile black {args:} ./src/ ./tests/", 144 | "yamlfix .", 145 | "pyproject-fmt pyproject.toml", 146 | "style", 147 | ] 148 | all = [ 149 | "style", 150 | "typing", 151 | ] 152 | 153 | [tool.black] 154 | line-length = 120 155 | target-version = [ "py39" ] 156 | 157 | [tool.ruff] 158 | target-version = "py39" 159 | line-length = 120 160 | exclude = [ "docs/", "scripts/gendoc.py" ] 161 | 162 | lint.select = [ 163 | "A", 164 | "ARG", 165 | "B", 166 | "C", 167 | "DTZ", 168 | "E", 169 | "EM", 170 | "F", 171 | "FBT", 172 | "I", 173 | "ICN", 174 | "ISC", 175 | "N", 176 | "PLC", 177 | "PLE", 178 | "PLR", 179 | "PLW", 180 | "Q", 181 | "RUF", 182 | "S", 183 | "T", 184 | "TID", 185 | "UP", 186 | "W", 187 | "YTT", 188 | ] 189 | lint.ignore = [ 190 | "A001", 191 | "A002", 192 | "ARG001", 193 | "ARG002", 194 | # Be compatible with typer 195 | "B008", 196 | # Ignore complexity 197 | "C901", 198 | # Allow boolean arguments in functions definitions 199 | "FBT001", 200 | "FBT002", 201 | "FBT003", 202 | # to avoid errors on variables like total_kB 203 | "N815", 204 | "PLR0911", 205 | "PLR0912", 206 | "PLR0913", 207 | "PLR0915", 208 | "PLR2004", 209 | "RUF012", 210 | # Allow the use of assert statements 211 | "S101", 212 | # Ignore checks for possible passwords 213 | "S105", # "S106", "S107", 214 | # ignore false positive 215 | "S603", 216 | # don't perform this modification: 217 | # Union[a, b] -> a | b /Optional[type] -> type | None 218 | # since python 3.9 doesn't support it 219 | "UP007", 220 | ] 221 | #[tool.ruff.isort] 222 | #known-first-party = ["aleph_client"] 223 | lint.per-file-ignores."src/aleph_client/commands/help_strings.py" = [ "E501" ] 224 | lint.per-file-ignores."tests/unit/*" = [ "T201" ] 225 | 226 | lint.per-file-ignores."tests/unit/test_instance.py" = [ "S106", "T201" ] 227 | 228 | [tool.pytest.ini_options] 229 | pythonpath = [ 230 | "src", 231 | ] 232 | testpaths = [ 233 | "tests", 234 | ] 235 | asyncio_default_fixture_loop_scope = "function" 236 | 237 | [tool.coverage.run] 238 | branch = true 239 | parallel = true 240 | source_pkgs = [ "aleph_client", "tests" ] 241 | 242 | [tool.coverage.paths] 243 | aleph_client = [ "src/aleph_client" ] 244 | tests = [ "tests" ] 245 | 246 | [tool.coverage.report] 247 | exclude_lines = [ 248 | "no cov", 249 | "if __name__ == .__main__.:", 250 | "if TYPE_CHECKING:", 251 | ] 252 | 253 | [tool.mypy] 254 | python_version = "3.9" 255 | install_types = true 256 | non_interactive = true 257 | explicit_package_bases = true 258 | exclude = "conftest.py" 259 | show_column_numbers = true 260 | 261 | # Suppressing errors 262 | # Shows errors related to strict None checking, if the global strict_optional flag is enabled 263 | strict_optional = true 264 | no_implicit_optional = true 265 | 266 | # Import discovery 267 | # Suppresses error messages about imports that cannot be resolved 268 | ignore_missing_imports = true 269 | # Forces import to reference the original source file 270 | no_implicit_reexport = true 271 | # show error messages from unrelated files 272 | follow_imports = "silent" 273 | follow_imports_for_stubs = false 274 | 275 | # Disallow dynamic typing 276 | # Disallows usage of types that come from unfollowed imports 277 | disallow_any_unimported = false 278 | # Disallows all expressions in the module that have type Any 279 | disallow_any_expr = false 280 | # Disallows functions that have Any in their signature after decorator transformation. 281 | disallow_any_decorated = false 282 | # Disallows explicit Any in type positions such as type annotations and generic type parameters. 283 | disallow_any_explicit = false 284 | # Disallows usage of generic types that do not specify explicit type parameters. 285 | disallow_any_generics = false 286 | # Disallows subclassing a value of type Any. 287 | disallow_subclassing_any = false 288 | 289 | # Untyped definitions and calls 290 | # Disallows calling functions without type annotations from functions with type annotations. 291 | disallow_untyped_calls = false 292 | # Disallows defining functions without type annotations or with incomplete type annotations 293 | disallow_untyped_defs = false 294 | # Disallows defining functions with incomplete type annotations. 295 | check_untyped_defs = true 296 | # Type-checks the interior of functions without type annotations. 297 | disallow_incomplete_defs = false 298 | # Reports an error whenever a function with type annotations is decorated with a decorator without annotations. 299 | disallow_untyped_decorators = false 300 | 301 | # Prohibit comparisons of non-overlapping types (ex: 42 == "no") 302 | strict_equality = true 303 | 304 | # Configuring warnings 305 | # Warns about unneeded # type: ignore comments. 306 | warn_unused_ignores = true 307 | # Shows errors for missing return statements on some execution paths. 308 | warn_no_return = true 309 | # Shows a warning when returning a value with type Any from a function declared with a non- Any return type. 310 | warn_return_any = false 311 | 312 | [tool.sphinx] 313 | source-dir = "docs" 314 | build-dir = "docs/_build" 315 | 316 | [tool.yamlfix] 317 | sequence_style = "keep_style" 318 | preserve_quotes = true 319 | whitelines = 1 320 | section_whitelines = 2 321 | -------------------------------------------------------------------------------- /scripts/build-and-shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -euf 4 | 5 | # Use Podman if installed, else use Docker 6 | if hash podman 2> /dev/null 7 | then 8 | DOCKER_COMMAND=podman 9 | else 10 | DOCKER_COMMAND=docker 11 | fi 12 | 13 | $DOCKER_COMMAND build -t aleph-client -f docker/Dockerfile . 14 | $DOCKER_COMMAND run -ti --rm --entrypoint /bin/bash -v "$(pwd)":/opt/aleph-client aleph-client 15 | -------------------------------------------------------------------------------- /scripts/build-and-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -euf 4 | 5 | # Use Podman if installed, else use Docker 6 | if hash podman 2> /dev/null 7 | then 8 | DOCKER_COMMAND=podman 9 | else 10 | DOCKER_COMMAND=docker 11 | fi 12 | 13 | $DOCKER_COMMAND build -t aleph-client -f docker/Dockerfile . 14 | $DOCKER_COMMAND run -ti --rm --entrypoint /opt/venv/bin/pytest aleph-client /opt/aleph-client/ "$@" 15 | $DOCKER_COMMAND run -ti --rm --entrypoint /opt/venv/bin/mypy aleph-client /opt/aleph-client/src/ --ignore-missing-imports 16 | -------------------------------------------------------------------------------- /scripts/build-to-publish.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -euf 4 | 5 | # Use Podman if installed, else use Docker 6 | if hash podman 2> /dev/null 7 | then 8 | DOCKER_COMMAND=podman 9 | else 10 | DOCKER_COMMAND=docker 11 | fi 12 | 13 | mkdir -p ./dist 14 | chmod 0777 ./dist 15 | 16 | $DOCKER_COMMAND build -t aleph-client -f docker/Dockerfile . 17 | $DOCKER_COMMAND run -ti --rm \ 18 | -w /opt/aleph-client \ 19 | -v "$(pwd)/dist":/opt/aleph-client/dist \ 20 | --entrypoint /bin/bash \ 21 | aleph-client 22 | -------------------------------------------------------------------------------- /scripts/gendoc.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Copied from typer.cli.py to customise doc generation 4 | """ 5 | 6 | import importlib.util 7 | import re 8 | import sys 9 | from pathlib import Path 10 | from typing import Any, Optional 11 | 12 | import click 13 | import typer 14 | import typer.core 15 | from click import Command, Group 16 | 17 | default_app_names = ("app", "cli", "main") 18 | default_func_names = ("main", "cli", "app") 19 | 20 | utils_app = typer.Typer(help="Extra utility commands for Typer apps.") 21 | 22 | 23 | class State: 24 | def __init__(self) -> None: 25 | self.app: Optional[str] = None 26 | self.func: Optional[str] = None 27 | self.file: Optional[Path] = None 28 | self.module: Optional[str] = None 29 | 30 | 31 | state = State() 32 | 33 | 34 | def maybe_update_state(ctx: click.Context) -> None: 35 | path_or_module = ctx.params.get("path_or_module") 36 | if path_or_module: 37 | file_path = Path(path_or_module) 38 | if file_path.exists() and file_path.is_file(): 39 | state.file = file_path 40 | else: 41 | if not re.fullmatch(r"[a-zA-Z_]\w*(\.[a-zA-Z_]\w*)*", path_or_module): 42 | typer.echo(f"Not a valid file or Python module: {path_or_module}", err=True) 43 | sys.exit(1) 44 | state.module = path_or_module 45 | app_name = ctx.params.get("app") 46 | if app_name: 47 | state.app = app_name 48 | func_name = ctx.params.get("func") 49 | if func_name: 50 | state.func = func_name 51 | 52 | 53 | class TyperCLIGroup(typer.core.TyperGroup): 54 | def list_commands(self, ctx: click.Context) -> list[str]: 55 | self.maybe_add_run(ctx) 56 | return super().list_commands(ctx) 57 | 58 | def get_command(self, ctx: click.Context, name: str) -> Optional[Command]: 59 | self.maybe_add_run(ctx) 60 | return super().get_command(ctx, name) 61 | 62 | def invoke(self, ctx: click.Context) -> Any: 63 | self.maybe_add_run(ctx) 64 | return super().invoke(ctx) 65 | 66 | def maybe_add_run(self, ctx: click.Context) -> None: 67 | maybe_update_state(ctx) 68 | maybe_add_run_to_cli(self) 69 | 70 | 71 | def get_typer_from_module(module: Any) -> Optional[typer.Typer]: 72 | # Try to get defined app 73 | if state.app: 74 | obj = getattr(module, state.app, None) 75 | if not isinstance(obj, typer.Typer): 76 | typer.echo(f"Not a Typer object: --app {state.app}", err=True) 77 | sys.exit(1) 78 | return obj 79 | # Try to get defined function 80 | if state.func: 81 | func_obj = getattr(module, state.func, None) 82 | if not callable(func_obj): 83 | typer.echo(f"Not a function: --func {state.func}", err=True) 84 | sys.exit(1) 85 | sub_app = typer.Typer() 86 | sub_app.command()(func_obj) 87 | return sub_app 88 | # Iterate and get a default object to use as CLI 89 | local_names = dir(module) 90 | local_names_set = set(local_names) 91 | # Try to get a default Typer app 92 | for name in default_app_names: 93 | if name in local_names_set: 94 | obj = getattr(module, name, None) 95 | if isinstance(obj, typer.Typer): 96 | return obj 97 | # Try to get any Typer app 98 | for name in local_names_set - set(default_app_names): 99 | obj = getattr(module, name) 100 | if isinstance(obj, typer.Typer): 101 | return obj 102 | # Try to get a default function 103 | for func_name in default_func_names: 104 | func_obj = getattr(module, func_name, None) 105 | if callable(func_obj): 106 | sub_app = typer.Typer() 107 | sub_app.command()(func_obj) 108 | return sub_app 109 | # Try to get any func app 110 | for func_name in local_names_set - set(default_func_names): 111 | func_obj = getattr(module, func_name) 112 | if callable(func_obj): 113 | sub_app = typer.Typer() 114 | sub_app.command()(func_obj) 115 | return sub_app 116 | return None 117 | 118 | 119 | def get_typer_from_state() -> Optional[typer.Typer]: 120 | spec = None 121 | if state.file: 122 | module_name = state.file.name 123 | spec = importlib.util.spec_from_file_location(module_name, str(state.file)) 124 | elif state.module: 125 | spec = importlib.util.find_spec(state.module) 126 | if spec is None: 127 | if state.file: 128 | typer.echo(f"Could not import as Python file: {state.file}", err=True) 129 | else: 130 | typer.echo(f"Could not import as Python module: {state.module}", err=True) 131 | sys.exit(1) 132 | module = importlib.util.module_from_spec(spec) 133 | spec.loader.exec_module(module) # type: ignore 134 | obj = get_typer_from_module(module) 135 | return obj 136 | 137 | 138 | def maybe_add_run_to_cli(cli: click.Group) -> None: 139 | if "run" not in cli.commands: 140 | if state.file or state.module: 141 | obj = get_typer_from_state() 142 | if obj: 143 | obj._add_completion = False 144 | click_obj = typer.main.get_command(obj) 145 | click_obj.name = "run" 146 | if not click_obj.help: 147 | click_obj.help = "Run the provided Typer app." 148 | cli.add_command(click_obj) 149 | 150 | 151 | @utils_app.callback(cls=TyperCLIGroup, no_args_is_help=True) 152 | def callback( 153 | ctx: typer.Context, 154 | *, 155 | path_or_module: str = typer.Argument(None), 156 | app: str = typer.Option(None, help="The typer app object/variable to use."), 157 | func: str = typer.Option(None, help="The function to convert to Typer."), 158 | ) -> None: 159 | """ 160 | Run Typer scripts with completion, without having to create a package. 161 | 162 | You probably want to install completion for the typer command: 163 | 164 | $ typer --install-completion 165 | 166 | https://typer.tiangolo.com/ 167 | """ 168 | maybe_update_state(ctx) 169 | 170 | 171 | def get_docs_for_click( 172 | *, 173 | obj: Command, 174 | ctx: typer.Context, 175 | indent: int = 0, 176 | name: str = "", 177 | call_prefix: str = "", 178 | title: Optional[str] = None, 179 | ) -> str: 180 | docs = "#" * (1 + indent) 181 | command_name = name or obj.name 182 | if call_prefix: 183 | command_name = f"{call_prefix} {command_name}" 184 | if not title: 185 | title = f"`{command_name}`" if command_name else "CLI" 186 | docs += f" {title}\n\n" 187 | if obj.help: 188 | docs += f"{obj.help}\n\n" 189 | usage_pieces = obj.collect_usage_pieces(ctx) 190 | if usage_pieces: 191 | docs += "**Usage**:\n\n" 192 | docs += "```console\n" 193 | docs += "$ " 194 | if command_name: 195 | docs += f"{command_name} " 196 | docs += f"{' '.join(usage_pieces)}\n" 197 | docs += "```\n\n" 198 | args = [] 199 | opts = [] 200 | for param in obj.get_params(ctx): 201 | rv = param.get_help_record(ctx) 202 | if rv is not None: 203 | if param.param_type_name == "argument": 204 | args.append(rv) 205 | elif param.param_type_name == "option": 206 | opts.append(rv) 207 | if args: 208 | docs += "**Arguments**:\n\n" 209 | for arg_name, arg_help in args: 210 | docs += f"* `{arg_name}`" 211 | if arg_help: 212 | docs += f": {arg_help}" 213 | docs += "\n" 214 | docs += "\n" 215 | if opts: 216 | docs += "**Options**:\n\n" 217 | for opt_name, opt_help in opts: 218 | docs += f"* `{opt_name}`" 219 | if opt_help: 220 | docs += f": {opt_help}" 221 | docs += "\n" 222 | docs += "\n" 223 | if obj.epilog: 224 | docs += f"{obj.epilog}\n\n" 225 | if isinstance(obj, Group): 226 | group = obj 227 | commands = group.list_commands(ctx) 228 | if commands: 229 | docs += "**Commands**:\n\n" 230 | for command in commands: 231 | command_obj = group.get_command(ctx, command) 232 | assert command_obj 233 | anchor_name = f"{command_name.replace(' ', '-')}-{command_obj.name}" 234 | docs += f"* [`{command_obj.name}`](#{anchor_name})" 235 | command_help = command_obj.get_short_help_str(limit=1000) 236 | if command_help: 237 | docs += f": {command_help}" 238 | docs += "\n" 239 | docs += "\n" 240 | for command in commands: 241 | command_obj = group.get_command(ctx, command) 242 | assert command_obj 243 | use_prefix = "" 244 | if command_name: 245 | use_prefix += f"{command_name}" 246 | docs += get_docs_for_click(obj=command_obj, ctx=ctx, indent=indent + 1, call_prefix=use_prefix) 247 | return docs 248 | 249 | 250 | def replace_local_values(text: str) -> str: 251 | # Replace username 252 | current_user = Path.home().owner() 253 | text = text.replace(current_user, "$USER") 254 | 255 | # Replace private key file path 256 | pattern = r"[^/]+\.key" 257 | replacement = r"ethereum.key" 258 | text = re.sub(pattern, replacement, text) 259 | 260 | return text 261 | 262 | 263 | @utils_app.command() 264 | def docs( 265 | ctx: typer.Context, 266 | name: str = typer.Option("", help="The name of the CLI program to use in docs."), 267 | output: Optional[Path] = typer.Option( 268 | None, 269 | help="An output file to write docs to, like README.md.", 270 | file_okay=True, 271 | dir_okay=False, 272 | ), 273 | title: Optional[str] = typer.Option( 274 | None, 275 | help="The title for the documentation page. If not provided, the name of the program is used.", 276 | ), 277 | ) -> None: 278 | """ 279 | Generate Markdown docs for a Typer app. 280 | """ 281 | typer_obj = get_typer_from_state() 282 | if not typer_obj: 283 | typer.echo("No Typer app found", err=True) 284 | raise typer.Abort() 285 | click_obj = typer.main.get_command(typer_obj) 286 | generated_docs = get_docs_for_click(obj=click_obj, ctx=ctx, name=name, title=title) 287 | clean_docs = f"{generated_docs.strip()}\n" 288 | fixed_docs = replace_local_values(clean_docs) 289 | if output: 290 | output.write_text(fixed_docs) 291 | typer.echo(f"Docs saved to: {output}") 292 | else: 293 | typer.echo(fixed_docs) 294 | 295 | 296 | utils_app() 297 | -------------------------------------------------------------------------------- /src/aleph_client/__init__.py: -------------------------------------------------------------------------------- 1 | from importlib.metadata import PackageNotFoundError, version 2 | 3 | try: 4 | # Change here if project is renamed and does not equal the package name 5 | __version__ = version("aleph-client") 6 | except PackageNotFoundError: 7 | __version__ = "unknown" 8 | 9 | # Deprecation check 10 | moved_types = ["__version__", "AlephClient", "AuthenticatedAlephClient", "synchronous", "asynchronous"] 11 | 12 | 13 | def __getattr__(name): 14 | if name in moved_types: 15 | msg = ( 16 | f"The 'aleph_client.{name}' type is deprecated and has been removed from " 17 | f"aleph_client. Please use `aleph.sdk.{name}` instead." 18 | ) 19 | raise ImportError(msg) 20 | -------------------------------------------------------------------------------- /src/aleph_client/__main__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Aleph Client command-line interface. 3 | """ 4 | 5 | from aleph_client.commands import ( 6 | about, 7 | account, 8 | aggregate, 9 | domain, 10 | files, 11 | instance, 12 | message, 13 | node, 14 | pricing, 15 | program, 16 | ) 17 | from aleph_client.utils import AsyncTyper 18 | 19 | app = AsyncTyper(no_args_is_help=True) 20 | 21 | app.add_typer(account.app, name="account", help="Manage accounts") 22 | app.add_typer( 23 | message.app, 24 | name="message", 25 | help="Manage messages (post, amend, watch and forget) on aleph.im & twentysix.cloud", 26 | ) 27 | app.add_typer( 28 | aggregate.app, name="aggregate", help="Manage aggregate messages and permissions on aleph.im & twentysix.cloud" 29 | ) 30 | app.add_typer(files.app, name="file", help="Manage files (upload and pin on IPFS) on aleph.im & twentysix.cloud") 31 | app.add_typer(program.app, name="program", help="Manage programs (micro-VMs) on aleph.im & twentysix.cloud") 32 | app.add_typer(instance.app, name="instance", help="Manage instances (VMs) on aleph.im & twentysix.cloud") 33 | app.add_typer(domain.app, name="domain", help="Manage custom domain (DNS) on aleph.im & twentysix.cloud") 34 | app.add_typer(node.app, name="node", help="Get node info on aleph.im & twentysix.cloud") 35 | app.add_typer(about.app, name="about", help="Display the informations of Aleph CLI") 36 | app.command("pricing")(pricing.prices_for_service) 37 | 38 | if __name__ == "__main__": 39 | app() 40 | -------------------------------------------------------------------------------- /src/aleph_client/account.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleph-im/aleph-client/c444fd7aa3ee6b0ebb4ff94b172955b9174ceddd/src/aleph_client/account.py -------------------------------------------------------------------------------- /src/aleph_client/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleph-im/aleph-client/c444fd7aa3ee6b0ebb4ff94b172955b9174ceddd/src/aleph_client/commands/__init__.py -------------------------------------------------------------------------------- /src/aleph_client/commands/about.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from importlib.metadata import version as importlib_version 4 | 5 | import typer 6 | 7 | from aleph_client.utils import AsyncTyper 8 | 9 | app = AsyncTyper(no_args_is_help=True) 10 | 11 | 12 | def get_version(value: bool): 13 | __version__ = "NaN" 14 | dist_name = "aleph-client" 15 | if value: 16 | try: 17 | __version__ = importlib_version(dist_name) 18 | finally: 19 | typer.echo(f"Aleph CLI Version: {__version__}") 20 | raise typer.Exit(1) 21 | 22 | 23 | @app.command() 24 | def version(): 25 | get_version(True) 26 | -------------------------------------------------------------------------------- /src/aleph_client/commands/files.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json as json_lib 4 | import logging 5 | from datetime import datetime 6 | from pathlib import Path 7 | from typing import Annotated, Optional 8 | 9 | import aiohttp 10 | import typer 11 | from aiohttp import ClientResponseError 12 | from aleph.sdk import AlephHttpClient, AuthenticatedAlephHttpClient 13 | from aleph.sdk.account import _load_account 14 | from aleph.sdk.conf import settings 15 | from aleph.sdk.types import AccountFromPrivateKey, StorageEnum, StoredContent 16 | from aleph.sdk.utils import safe_getattr 17 | from aleph_message.models import ItemHash, StoreMessage 18 | from aleph_message.status import MessageStatus 19 | from pydantic import BaseModel, Field 20 | from rich import box 21 | from rich.console import Console 22 | from rich.table import Table 23 | 24 | from aleph_client.commands import help_strings 25 | from aleph_client.commands.utils import setup_logging 26 | from aleph_client.utils import AsyncTyper 27 | 28 | logger = logging.getLogger(__name__) 29 | app = AsyncTyper(no_args_is_help=True) 30 | 31 | 32 | @app.command() 33 | async def pin( 34 | item_hash: Annotated[str, typer.Argument(help="IPFS hash to pin on aleph.im")], 35 | channel: Annotated[Optional[str], typer.Option(help=help_strings.CHANNEL)] = settings.DEFAULT_CHANNEL, 36 | private_key: Annotated[Optional[str], typer.Option(help=help_strings.PRIVATE_KEY)] = settings.PRIVATE_KEY_STRING, 37 | private_key_file: Annotated[ 38 | Optional[Path], typer.Option(help=help_strings.PRIVATE_KEY_FILE) 39 | ] = settings.PRIVATE_KEY_FILE, 40 | ref: Annotated[Optional[str], typer.Option(help=help_strings.REF)] = None, 41 | debug: Annotated[bool, typer.Option()] = False, 42 | ): 43 | """Persist a file from IPFS on aleph.im.""" 44 | 45 | setup_logging(debug) 46 | 47 | account: AccountFromPrivateKey = _load_account(private_key, private_key_file) 48 | 49 | async with AuthenticatedAlephHttpClient(account=account, api_server=settings.API_HOST) as client: 50 | result: StoreMessage 51 | status: MessageStatus 52 | result, status = await client.create_store( 53 | file_hash=item_hash, 54 | storage_engine=StorageEnum.ipfs, 55 | channel=channel, 56 | ref=ref, 57 | ) 58 | logger.debug("Upload finished") 59 | typer.echo(f"{result.model_dump_json(indent=4)}") 60 | 61 | 62 | @app.command() 63 | async def upload( 64 | path: Annotated[Path, typer.Argument(help="Path of the file to upload")], 65 | channel: Annotated[Optional[str], typer.Option(help=help_strings.CHANNEL)] = settings.DEFAULT_CHANNEL, 66 | private_key: Annotated[Optional[str], typer.Option(help=help_strings.PRIVATE_KEY)] = settings.PRIVATE_KEY_STRING, 67 | private_key_file: Annotated[ 68 | Optional[Path], typer.Option(help=help_strings.PRIVATE_KEY_FILE) 69 | ] = settings.PRIVATE_KEY_FILE, 70 | ref: Annotated[Optional[str], typer.Option(help=help_strings.REF)] = None, 71 | debug: Annotated[bool, typer.Option()] = False, 72 | ): 73 | """Upload and store a file on aleph.im.""" 74 | 75 | setup_logging(debug) 76 | 77 | account: AccountFromPrivateKey = _load_account(private_key, private_key_file) 78 | 79 | async with AuthenticatedAlephHttpClient(account=account, api_server=settings.API_HOST) as client: 80 | if not path.is_file(): 81 | typer.echo(f"Error: File not found: '{path}'") 82 | raise typer.Exit(code=1) 83 | 84 | with open(path, "rb") as fd: 85 | logger.debug("Reading file") 86 | # TODO: Read in lazy mode instead of copying everything in memory 87 | file_content = fd.read() 88 | storage_engine = StorageEnum.ipfs if len(file_content) > 4 * 1024 * 1024 else StorageEnum.storage 89 | logger.debug("Uploading file") 90 | result: StoreMessage 91 | status: MessageStatus 92 | try: 93 | result, status = await client.create_store( 94 | file_content=file_content, 95 | storage_engine=storage_engine, 96 | channel=channel, 97 | guess_mime_type=True, 98 | ref=ref, 99 | ) 100 | logger.debug("Upload finished") 101 | typer.echo(f"{result.model_dump_json(indent=4)}") 102 | except ClientResponseError as e: 103 | typer.echo(f"{e}") 104 | 105 | if e.status == 413: 106 | typer.echo("File is too large to be uploaded. Please use aleph file pin") 107 | else: 108 | typer.echo(f"Error uploading file\nstatus: {e.status}\nmessage: {e.message}") 109 | 110 | 111 | @app.command() 112 | async def download( 113 | hash: Annotated[str, typer.Argument(help="hash to download from aleph.")], 114 | use_ipfs: Annotated[bool, typer.Option(help="Download using IPFS instead of storage")] = False, 115 | output_path: Annotated[Path, typer.Option(help="Output directory path")] = Path("."), 116 | file_name: Annotated[Optional[str], typer.Option(help="Output file name (without extension)")] = None, 117 | file_extension: Annotated[Optional[str], typer.Option(help="Output file extension")] = None, 118 | only_info: Annotated[bool, typer.Option()] = False, 119 | verbose: Annotated[bool, typer.Option()] = True, 120 | debug: Annotated[bool, typer.Option()] = False, 121 | ) -> Optional[StoredContent]: 122 | """Download a file from aleph.im or display related infos.""" 123 | 124 | setup_logging(debug) 125 | 126 | if not only_info: 127 | output_path.mkdir(parents=True, exist_ok=True) 128 | 129 | file_name = file_name if file_name else hash 130 | file_extension = file_extension if file_extension else "" 131 | 132 | output_file_path = output_path / f"{file_name}{file_extension}" 133 | 134 | async with AlephHttpClient(api_server=settings.API_HOST) as client: 135 | logger.info(f"Downloading {hash} ...") 136 | with open(output_file_path, "wb") as fd: 137 | if not use_ipfs: 138 | await client.download_file_to_buffer(hash, fd) 139 | else: 140 | await client.download_file_ipfs_to_buffer(hash, fd) 141 | 142 | logger.debug("File downloaded successfully.") 143 | else: 144 | async with AlephHttpClient(api_server=settings.API_HOST) as client: 145 | content = await client.get_stored_content(hash) 146 | if verbose: 147 | typer.echo( 148 | f"Filename: {content.filename}\nHash: {content.hash}\nURL: {content.url}" 149 | if safe_getattr(content, "url") 150 | else safe_getattr(content, "error") 151 | ) 152 | return content 153 | return None 154 | 155 | 156 | @app.command() 157 | async def forget( 158 | item_hash: Annotated[ 159 | str, 160 | typer.Argument( 161 | help="Hash(es) to forget. Must be a comma separated list. Example: `123...abc` or `123...abc,456...xyz`" 162 | ), 163 | ], 164 | reason: Annotated[str, typer.Argument(help="reason to forget")] = "User deletion", 165 | channel: Annotated[Optional[str], typer.Option(help=help_strings.CHANNEL)] = settings.DEFAULT_CHANNEL, 166 | private_key: Annotated[Optional[str], typer.Option(help=help_strings.PRIVATE_KEY)] = settings.PRIVATE_KEY_STRING, 167 | private_key_file: Annotated[ 168 | Optional[Path], typer.Option(help=help_strings.PRIVATE_KEY_FILE) 169 | ] = settings.PRIVATE_KEY_FILE, 170 | debug: Annotated[bool, typer.Option()] = False, 171 | ): 172 | """forget a file and his message on aleph.im.""" 173 | 174 | setup_logging(debug) 175 | 176 | account: AccountFromPrivateKey = _load_account(private_key, private_key_file) 177 | 178 | hashes = [ItemHash(item_hash) for item_hash in item_hash.split(",")] 179 | 180 | async with AuthenticatedAlephHttpClient(account=account, api_server=settings.API_HOST) as client: 181 | value = await client.forget(hashes=hashes, reason=reason, channel=channel) 182 | typer.echo(f"{value[0].model_dump_json(indent=4)}") 183 | 184 | 185 | class GetAccountFilesQueryParams(BaseModel): 186 | pagination: int = Field( 187 | default=100, 188 | ge=0, 189 | description="Maximum number of files to return. Specifying 0 removes this limit.", 190 | ) 191 | page: int = Field(default=1, ge=1, description="Offset in pages. Starts at 1.") 192 | sort_order: int = Field( 193 | default=-1, 194 | description=( 195 | "Order in which files should be listed: -1 means most recent messages first, 1 means older messages first." 196 | ), 197 | ) 198 | 199 | 200 | def _show_files(files_data: dict) -> None: 201 | table = Table(title="Files Information", box=box.SIMPLE_HEAVY) 202 | table.add_column("File Hash", style="cyan", no_wrap=True, min_width=None) 203 | table.add_column("Size (MB)", style="magenta", min_width=None) 204 | table.add_column("Type", style="green", min_width=None) 205 | table.add_column("Created", style="blue", min_width=None) 206 | table.add_column("Item Hash", style="yellow", min_width=None, no_wrap=True) 207 | 208 | console = Console() 209 | 210 | # Add files to the table 211 | for file_info in files_data["files"]: 212 | created = datetime.strptime(file_info["created"], "%Y-%m-%dT%H:%M:%S.%f%z") 213 | formatted_created = created.strftime("%Y-%m-%d %H:%M:%S") 214 | size_in_mb = float(file_info["size"]) / (1024 * 1024) 215 | table.add_row( 216 | file_info["file_hash"], 217 | f"{size_in_mb:.4f} MB", 218 | file_info["type"], 219 | formatted_created, 220 | file_info["item_hash"], 221 | ) 222 | 223 | pagination_page = files_data["pagination_page"] 224 | pagination_total = files_data["pagination_total"] 225 | pagination_per_page = files_data["pagination_per_page"] 226 | address = files_data["address"] 227 | total_size = float(files_data["total_size"]) / (1024 * 1024) 228 | 229 | console.print( 230 | f"\n[bold]Address:[/bold] {address}", 231 | ) 232 | console.print(f"[bold]Total Size:[/bold] ~ {total_size:.4f} MB") 233 | 234 | console.print("\n[bold]Pagination:[/bold]") 235 | console.print( 236 | f"[bold]Page:[/bold] {pagination_page}", 237 | ) 238 | console.print( 239 | f"[bold]Total Item:[/bold] {pagination_total}", 240 | ) 241 | console.print(f"[bold]Items Max Per Page:[/bold] {pagination_per_page}") 242 | 243 | console.print(table) 244 | 245 | 246 | @app.command(name="list") 247 | async def list_files( 248 | address: Annotated[Optional[str], typer.Option(help="Address")] = None, 249 | private_key: Annotated[Optional[str], typer.Option(help=help_strings.PRIVATE_KEY)] = settings.PRIVATE_KEY_STRING, 250 | private_key_file: Annotated[ 251 | Optional[Path], typer.Option(help=help_strings.PRIVATE_KEY_FILE) 252 | ] = settings.PRIVATE_KEY_FILE, 253 | pagination: Annotated[int, typer.Option(help="Maximum number of files to return.")] = 100, 254 | page: Annotated[int, typer.Option(help="Offset in pages.")] = 1, 255 | sort_order: Annotated[ 256 | int, 257 | typer.Option( 258 | help="Order in which files should be listed: -1 means most recent messages first," 259 | " 1 means older messages first." 260 | ), 261 | ] = -1, 262 | json: Annotated[bool, typer.Option(help="Print as json instead of rich table")] = False, 263 | ): 264 | """List all files for a given address""" 265 | account: AccountFromPrivateKey = _load_account(private_key, private_key_file) 266 | 267 | if account and not address: 268 | address = account.get_address() 269 | 270 | if address: 271 | # Build the query parameters 272 | query_params = GetAccountFilesQueryParams(pagination=pagination, page=page, sort_order=sort_order) 273 | 274 | uri = f"{settings.API_HOST}/api/v0/addresses/{address}/files" 275 | async with aiohttp.ClientSession() as session: 276 | response = await session.get(uri, params=query_params.model_dump()) 277 | if response.status == 200: 278 | files_data = await response.json() 279 | formatted_files_data = json_lib.dumps(files_data, indent=4) 280 | if not json: 281 | _show_files(files_data) 282 | else: 283 | typer.echo(formatted_files_data) 284 | else: 285 | typer.echo(f"Failed to retrieve files for address {address}. Status code: {response.status}") 286 | else: 287 | typer.echo("Error: Please provide either a private key, private key file, or an address.") 288 | -------------------------------------------------------------------------------- /src/aleph_client/commands/help_strings.py: -------------------------------------------------------------------------------- 1 | IPFS_HASH = "IPFS Content identifier (CID)" 2 | CHANNEL = "Aleph.im network channel where the message is or will be broadcasted" 3 | PRIVATE_KEY = "Your private key. Cannot be used with --private-key-file" 4 | PRIVATE_KEY_FILE = "Path to your private key file" 5 | REF = "Item hash of the message to update" 6 | SIGNABLE_MESSAGE = "Message to sign" 7 | CUSTOM_DOMAIN_TARGET_TYPES = "IPFS|PROGRAM|INSTANCE" 8 | CUSTOM_DOMAIN_OWNER_ADDRESS = "Owner address. Defaults to current account address" 9 | CUSTOM_DOMAIN_NAME = "Domain name. ex: aleph.im" 10 | CUSTOM_DOMAIN_ITEM_HASH = "Item hash" 11 | SKIP_VOLUME = "Skip prompt to attach more volumes" 12 | PERSISTENT_VOLUME = "Persistent volumes are allocated on the host machine and are not deleted when the VM is stopped.\nRequires at least `name`, `mount` path, and `size_mib`. To add multiple, reuse the same argument.\nExample: --persistent-volume name=data,mount=/opt/data,size_mib=1000.\nFor more info, see the docs: https://docs.aleph.im/computing/volumes/persistent/" 13 | EPHEMERAL_VOLUME = "Ephemeral volumes are allocated on the host machine when the VM is started and deleted when the VM is stopped.\nRequires at least `mount` path and `size_mib`. To add multiple, reuse the same argument.\nExample: --ephemeral-volume mount=/opt/tmp,size_mib=100" 14 | IMMUTABLE_VOLUME = "Immutable volumes are pinned on the network and can be used by multiple VMs at the same time. They are read-only and useful for setting up libraries or other dependencies.\nRequires at least `mount` path and `ref` (volume message hash). `use_latest` is True by default, to use the latest version of the volume, if it has been amended. To add multiple, reuse the same argument.\nExample: --immutable-volume mount=/opt/packages,ref=25a3...8d94.\nFor more info, see the docs: https://docs.aleph.im/computing/volumes/immutable/" 15 | SKIP_ENV_VAR = "Skip prompt to set environment variables" 16 | ENVIRONMENT_VARIABLES = "Environment variables to pass. They will be public and visible in the message, so don't include secrets. Must be a comma separated list. Example: `KEY=value` or `KEY=value,KEY=value`" 17 | ASK_FOR_CONFIRMATION = "Prompt user for confirmation" 18 | IPFS_CATCH_ALL_PATH = "Choose a relative path to catch all unmatched route or a 404 error" 19 | PAYMENT_TYPE = "Payment method, either holding tokens, NFTs, or Pay-As-You-Go via token streaming" 20 | HYPERVISOR = "Hypervisor to use to launch your instance. Always defaults to QEMU, since Firecracker is now deprecated for instances" 21 | INSTANCE_NAME = "Name of your new instance" 22 | ROOTFS = ( 23 | "Hash of the rootfs to use for your instance. Defaults to Ubuntu 22. You can also create your own rootfs and pin it" 24 | ) 25 | COMPUTE_UNITS = "Number of compute units to allocate. Compute units correspond to a tier that includes vcpus, memory, disk and gpu presets. For reference, run: `aleph pricing --help`" 26 | ROOTFS_SIZE = "Rootfs size in MiB to allocate. Set to 0 to use default tier value and to not get prompted" 27 | VCPUS = "Number of virtual CPUs to allocate" 28 | MEMORY = "Maximum memory (RAM) in MiB to allocate" 29 | TIMEOUT_SECONDS = "If vm is not called after [timeout_seconds] it will shutdown" 30 | SSH_PUBKEY_FILE = "Path to a public ssh key to be added to the instance" 31 | CRN_HASH = "Hash of the CRN to deploy to (only applicable for confidential and/or Pay-As-You-Go instances)" 32 | CRN_URL = "URL of the CRN to deploy to (only applicable for confidential and/or Pay-As-You-Go instances)" 33 | CRN_AUTO_TAC = "Automatically accept the Terms & Conditions of the CRN if you read them beforehand" 34 | CONFIDENTIAL_OPTION = "Launch a confidential instance (requires creating an encrypted volume)" 35 | CONFIDENTIAL_FIRMWARE = "Hash to UEFI Firmware to launch confidential instance" 36 | CONFIDENTIAL_FIRMWARE_HASH = "Hash of the UEFI Firmware content, to validate measure (ignored if path is provided)" 37 | CONFIDENTIAL_FIRMWARE_PATH = "Path to the UEFI Firmware content, to validate measure (instead of the hash)" 38 | GPU_OPTION = "Launch an instance attaching a GPU to it" 39 | GPU_PREMIUM_OPTION = "Use Premium GPUs (VRAM > 48GiB)" 40 | KEEP_SESSION = "Keeping the already initiated session" 41 | VM_SECRET = "Secret password to start the VM" 42 | CRN_URL_VM_DELETION = "Domain of the CRN where an associated VM is running. It ensures your VM will be stopped and erased on the CRN before the instance message is actually deleted" 43 | VM_ID = "Item hash of your VM. If provided, skip the instance creation, else create a new one" 44 | VM_NOT_READY = "VM not allocated, initialized, or started" 45 | VM_SCHEDULED = "VM scheduled but not available yet" 46 | CRN_UNKNOWN = "Unknown" 47 | CRN_PENDING = "Pending..." 48 | ALLOCATION_AUTO = "Auto - Scheduler" 49 | ALLOCATION_MANUAL = "Manual - Selection" 50 | PAYMENT_CHAIN = "Chain you want to use to pay for your instance" 51 | PAYMENT_CHAIN_USED = "Chain you are using to pay for your instance" 52 | PAYMENT_CHAIN_PROGRAM = "Chain you want to use to pay for your program" 53 | PAYMENT_CHAIN_PROGRAM_USED = "Chain you are using to pay for your program" 54 | ORIGIN_CHAIN = "Chain of origin of your private key (ensuring correct parsing)" 55 | ADDRESS_CHAIN = "Chain for the address" 56 | ADDRESS_PAYER = "Address of the payer. In order to delegate the payment, your account must be authorized beforehand to publish on the behalf of this address. See the docs for more info: https://docs.aleph.im/protocol/permissions/" 57 | CREATE_REPLACE = "Overwrites private key file if it already exists" 58 | CREATE_ACTIVE = "Loads the new private key after creation" 59 | PROMPT_CRN_URL = "URL of the CRN (Compute node) on which the instance is running" 60 | PROMPT_PROGRAM_CRN_URL = "URL of the CRN (Compute node) on which the program is running" 61 | PROGRAM_PATH = "Path to your source code. Can be a directory, a .squashfs file or a .zip archive" 62 | PROGRAM_ENTRYPOINT = "Your program entrypoint. Example: `main:app` for Python programs, else `run.sh` for a script containing your launch command" 63 | PROGRAM_RUNTIME = "Hash of the runtime to use for your program. You can also create your own runtime and pin it. Currently defaults to `{runtime_id}` (Use `aleph program runtime-checker` to inspect it)" 64 | PROGRAM_INTERNET = "Enable internet access for your program. By default, internet access is disabled" 65 | PROGRAM_PERSISTENT = "Create your program as persistent. By default, programs are ephemeral (serverless): they only start when called and then shutdown after the defined timeout delay." 66 | PROGRAM_UPDATABLE = "Allow program updates. By default, only the source code can be modified without requiring redeployement (same item hash). When enabled (set to True), this option allows to update any other field. However, such modifications will require a program redeployment (new item hash)" 67 | PROGRAM_BETA = "If true, you will be prompted to add message subscriptions to your program" 68 | PROGRAM_KEEP_CODE = "Keep the source code intact instead of deleting it" 69 | PROGRAM_KEEP_PREV = "Keep the previous program intact instead of deleting it" 70 | TARGET_ADDRESS = "Target address. Defaults to current account address" 71 | AGGREGATE_SECURITY_KEY_PROTECTED = ( 72 | "The aggregate key `security` is protected. Use `aleph aggregate [allow|revoke]` to manage it." 73 | ) 74 | INVALID_KEY_FORMAT = "Invalid key format: {}" 75 | -------------------------------------------------------------------------------- /src/aleph_client/commands/instance/display.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | import logging 5 | from typing import Optional 6 | 7 | from textual.app import App 8 | from textual.containers import Horizontal 9 | from textual.css.query import NoMatches 10 | from textual.reactive import reactive 11 | from textual.widgets import DataTable, Footer, Label, ProgressBar 12 | from textual.widgets._data_table import RowKey 13 | 14 | from aleph_client.commands.instance.network import ( 15 | fetch_crn_list, 16 | fetch_latest_crn_version, 17 | ) 18 | from aleph_client.commands.node import _format_score 19 | from aleph_client.models import CRNInfo 20 | 21 | logger = logging.getLogger(__name__) 22 | 23 | 24 | class CRNTable(App[tuple[CRNInfo, int]]): 25 | table: DataTable 26 | tasks: set[asyncio.Task] = set() 27 | crns: dict[RowKey, tuple[CRNInfo, int]] = {} 28 | current_crn_version: str 29 | total_crns: int 30 | active_crns: int = 0 31 | filtered_crns: int = 0 32 | label_start = reactive("Loading CRNs list ") 33 | label_end = reactive("") 34 | only_reward_address: bool = False 35 | only_qemu: bool = False 36 | only_confidentials: bool = False 37 | only_gpu: bool = False 38 | only_gpu_model: Optional[str] = None 39 | current_sorts: set = set() 40 | loader_label_start: Label 41 | loader_label_end: Label 42 | progress_bar: ProgressBar 43 | 44 | BINDINGS = [ 45 | ("s", "sort_by_score", "Sort By Score"), 46 | ("n", "sort_by_name", "Sort By Name"), 47 | ("v", "sort_by_version", "Sort By Version"), 48 | ("a", "sort_by_address", "Sort By Address"), 49 | ("c", "sort_by_confidential", "Sort By 🔒 Confidential"), 50 | ## ("q", "sort_by_qemu", "Sort By Qemu"), 51 | ("g", "sort_by_gpu", "Sort By GPU"), 52 | ("u", "sort_by_url", "Sort By URL"), 53 | ("x", "quit", "Exit"), 54 | ] 55 | 56 | def __init__( 57 | self, 58 | only_latest_crn_version: bool = False, 59 | only_reward_address: bool = False, 60 | only_qemu: bool = False, 61 | only_confidentials: bool = False, 62 | only_gpu: bool = False, 63 | only_gpu_model: Optional[str] = None, 64 | ): 65 | super().__init__() 66 | self.only_latest_crn_version = only_latest_crn_version 67 | self.only_reward_address = only_reward_address 68 | self.only_qemu = only_qemu 69 | self.only_confidentials = only_confidentials 70 | self.only_gpu = only_gpu 71 | self.only_gpu_model = only_gpu_model 72 | 73 | def compose(self): 74 | """Create child widgets for the app.""" 75 | self.table = DataTable(cursor_type="row", name="Select CRN") 76 | self.table.add_column("Score", key="score") 77 | self.table.add_column("Name", key="name") 78 | self.table.add_column("Version", key="version") 79 | self.table.add_column("Reward Address", key="stream_reward_address") 80 | self.table.add_column("🔒", key="confidential_computing") 81 | self.table.add_column("GPU", key="gpu_support") 82 | ## self.table.add_column("Qemu", key="qemu_support") ## Qemu computing enabled by default on CRNs 83 | self.table.add_column("Cores", key="cpu") 84 | self.table.add_column("Free RAM 🌡", key="ram") 85 | self.table.add_column("Free Disk 💿", key="hdd") 86 | self.table.add_column("URL", key="url") 87 | self.table.add_column("Terms & Conditions 📝", key="tac") 88 | yield Label( 89 | f"Choose a Compute Resource Node (CRN) {'x GPU ' if self.only_gpu_model else ''}to run your instance" 90 | ) 91 | with Horizontal(): 92 | self.loader_label_start = Label(self.label_start) 93 | yield self.loader_label_start 94 | self.progress_bar = ProgressBar(show_eta=False) 95 | yield self.progress_bar 96 | self.loader_label_end = Label(self.label_end) 97 | yield self.loader_label_end 98 | yield self.table 99 | yield Footer() 100 | 101 | async def on_mount(self): 102 | self.table.styles.height = "95%" 103 | task = asyncio.create_task(self.fetch_node_list()) 104 | self.tasks.add(task) 105 | task.add_done_callback(self.tasks.discard) 106 | 107 | async def fetch_node_list(self): 108 | crn_list = await fetch_crn_list() 109 | self.crns = ( 110 | {RowKey(crn.hash): (crn, 0) for crn in crn_list} 111 | if not self.only_gpu_model 112 | else { 113 | RowKey(f"{crn.hash}_{gpu_id}"): (crn, gpu_id) 114 | for crn in crn_list 115 | for gpu_id in range(len(crn.compatible_available_gpus)) 116 | } 117 | ) 118 | self.current_crn_version = await fetch_latest_crn_version() 119 | 120 | # Initialize the progress bar 121 | self.total_crns = len(self.crns) 122 | self.progress_bar.total = self.total_crns 123 | self.loader_label_start.update( 124 | f"Fetching data of {self.total_crns} CRNs {'x GPUs ' if self.only_gpu_model else ''}" 125 | ) 126 | self.tasks = set() 127 | 128 | # Fetch all CRNs 129 | for crn, gpu_id in list(self.crns.values()): 130 | task = asyncio.create_task(self.add_crn_info(crn, gpu_id)) 131 | self.tasks.add(task) 132 | task.add_done_callback(self.make_progress) 133 | task.add_done_callback(self.tasks.discard) 134 | 135 | async def add_crn_info(self, crn: CRNInfo, gpu_id: int): 136 | self.active_crns += 1 137 | # Skip CRNs with legacy version 138 | if self.only_latest_crn_version and crn.version < self.current_crn_version: 139 | logger.debug(f"Skipping CRN {crn.hash}, legacy version") 140 | return 141 | # Skip CRNs without machine usage 142 | if not crn.machine_usage: 143 | logger.debug(f"Skipping CRN {crn.hash}, no machine usage") 144 | return 145 | # Skip CRNs without ipv6 connectivity 146 | if not crn.ipv6: 147 | logger.debug(f"Skipping CRN {crn.hash}, no ipv6 connectivity") 148 | return 149 | # Skip CRNs without reward address if only_reward_address is set 150 | if self.only_reward_address and not crn.stream_reward_address: 151 | logger.debug(f"Skipping CRN {crn.hash}, no reward address") 152 | return 153 | # Skip non-qemu CRNs if only_qemu is set 154 | if self.only_qemu and not crn.qemu_support: 155 | logger.debug(f"Skipping CRN {crn.hash}, no qemu support") 156 | return 157 | # Skip non-confidential CRNs if only_confidentials is set 158 | if self.only_confidentials and not crn.confidential_computing: 159 | logger.debug(f"Skipping CRN {crn.hash}, no confidential support") 160 | return 161 | # Skip non-gpu CRNs if only-gpu is set 162 | if self.only_gpu and not (crn.gpu_support and crn.compatible_available_gpus): 163 | logger.debug(f"Skipping CRN {crn.hash}, no GPU support or without GPU available") 164 | return 165 | # Skip CRNs without compatible GPU if only-gpu-model is set 166 | elif ( 167 | self.only_gpu 168 | and self.only_gpu_model 169 | and self.only_gpu_model != crn.compatible_available_gpus[gpu_id]["model"] 170 | ): 171 | logger.debug(f"Skipping CRN {crn.hash}, no {self.only_gpu_model} GPU support") 172 | return 173 | self.filtered_crns += 1 174 | 175 | # Fetch terms and conditions 176 | tac = await crn.terms_and_conditions_content 177 | 178 | self.table.add_row( 179 | _format_score(crn.score), 180 | crn.name, 181 | crn.version, 182 | crn.stream_reward_address, 183 | "✅" if crn.confidential_computing else "✖", 184 | # "✅" if crn.qemu_support else "✖", ## Qemu computing enabled by default on crns 185 | ( 186 | crn.compatible_available_gpus[gpu_id]["device_name"] 187 | if self.only_gpu_model 188 | else "✅" if crn.gpu_support else "✖" 189 | ), 190 | crn.display_cpu, 191 | crn.display_ram, 192 | crn.display_hdd, 193 | crn.url, 194 | tac.url if tac else "✖", 195 | key=f"{crn.hash}_{gpu_id}" if self.only_gpu_model else crn.hash, 196 | ) 197 | 198 | def make_progress(self, task): 199 | """Called automatically to advance the progress bar.""" 200 | try: 201 | self.progress_bar.advance(1) 202 | self.loader_label_end.update(f" Available: {self.active_crns} Match: {self.filtered_crns}") 203 | except NoMatches: 204 | pass 205 | if len(self.tasks) == 0: 206 | self.loader_label_start.update(f"Fetched {self.total_crns} CRNs ") 207 | 208 | def on_data_table_row_selected(self, message: DataTable.RowSelected): 209 | """Return the selected row""" 210 | selected_crn: Optional[CRNInfo] = self.crns.get(message.row_key) 211 | self.exit(selected_crn) 212 | 213 | def sort_reverse(self, sort_type: str) -> bool: 214 | """Determine if `sort_type` is ascending or descending.""" 215 | reverse = sort_type in self.current_sorts 216 | if reverse: 217 | self.current_sorts.remove(sort_type) 218 | else: 219 | self.current_sorts.add(sort_type) 220 | return reverse 221 | 222 | def sort_by(self, column, sort_func=lambda row: row.lower(), invert=False): 223 | table = self.query_one(DataTable) 224 | reverse = self.sort_reverse(column) 225 | table.sort( 226 | column, 227 | key=sort_func, 228 | reverse=not reverse if invert else reverse, 229 | ) 230 | 231 | def action_sort_by_score(self): 232 | self.sort_by("score", sort_func=lambda row: float(row.plain.rstrip("%")), invert=True) 233 | 234 | def action_sort_by_name(self): 235 | self.sort_by("name") 236 | 237 | def action_sort_by_version(self): 238 | self.sort_by("version") 239 | 240 | def action_sort_by_address(self): 241 | self.sort_by("stream_reward_address") 242 | 243 | def action_sort_by_confidential(self): 244 | self.sort_by("confidential_computing") 245 | 246 | def action_sort_by_qemu(self): 247 | self.sort_by("qemu_support") 248 | 249 | def action_sort_by_gpu(self): 250 | self.sort_by("gpu_support") 251 | 252 | def action_sort_by_url(self): 253 | self.sort_by("url") 254 | -------------------------------------------------------------------------------- /src/aleph_client/commands/instance/network.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | from ipaddress import IPv6Interface 5 | from json import JSONDecodeError 6 | from typing import Optional 7 | 8 | from aiohttp import ( 9 | ClientConnectorError, 10 | ClientResponseError, 11 | ClientSession, 12 | ClientTimeout, 13 | InvalidURL, 14 | ) 15 | from aleph.sdk import AlephHttpClient 16 | from aleph.sdk.conf import settings 17 | from aleph.sdk.exceptions import ForgottenMessageError, MessageNotFoundError 18 | from aleph.sdk.utils import safe_getattr 19 | from aleph_message.models import InstanceMessage 20 | from aleph_message.models.execution.base import PaymentType 21 | from aleph_message.models.item_hash import ItemHash 22 | from click import echo 23 | from pydantic import ValidationError 24 | from typer import Exit 25 | 26 | from aleph_client.commands import help_strings 27 | from aleph_client.commands.files import download 28 | from aleph_client.models import CRNInfo 29 | from aleph_client.utils import ( 30 | async_lru_cache, 31 | extract_valid_eth_address, 32 | fetch_json, 33 | sanitize_url, 34 | ) 35 | 36 | logger = logging.getLogger(__name__) 37 | 38 | latest_crn_version_link = "https://api.github.com/repos/aleph-im/aleph-vm/releases/latest" 39 | 40 | settings_link = ( 41 | f"{sanitize_url(settings.API_HOST)}" 42 | "/api/v0/aggregates/0xFba561a84A537fCaa567bb7A2257e7142701ae2A.json?keys=settings" 43 | ) 44 | 45 | crn_list_link = ( 46 | f"{sanitize_url(settings.CRN_URL_FOR_PROGRAMS)}" 47 | "/vm/bec08b08bb9f9685880f3aeb9c1533951ad56abef2a39c97f5a93683bdaa5e30/crns.json" 48 | ) 49 | 50 | PATH_ABOUT_EXECUTIONS_LIST = "/about/executions/list" 51 | 52 | 53 | @async_lru_cache 54 | async def call_program_crn_list() -> Optional[dict]: 55 | """Call program to fetch the compute resource node list. 56 | 57 | Returns: 58 | dict: Dictionary containing the compute resource node list. 59 | """ 60 | 61 | try: 62 | async with ClientSession(timeout=ClientTimeout(total=60)) as session: 63 | logger.debug("Fetching crn list...") 64 | async with session.get(crn_list_link) as resp: 65 | if resp.status != 200: 66 | error = "Unable to fetch crn list from program" 67 | raise Exception(error) 68 | return await resp.json() 69 | except InvalidURL as e: 70 | error = f"Invalid URL: {crn_list_link}: {e}" 71 | except TimeoutError as e: 72 | error = f"Timeout while fetching: {crn_list_link}: {e}" 73 | except ClientConnectorError as e: 74 | error = f"Error on connection: {crn_list_link}: {e}" 75 | except ClientResponseError as e: 76 | error = f"Error on response: {crn_list_link}: {e}" 77 | except JSONDecodeError as e: 78 | error = f"Error when decoding JSON: {crn_list_link}: {e}" 79 | except Exception as e: 80 | error = f"Unexpected error while fetching: {crn_list_link}: {e}" 81 | raise Exception(error) 82 | 83 | 84 | @async_lru_cache 85 | async def fetch_latest_crn_version() -> str: 86 | """Fetch the latest crn version. 87 | 88 | Returns: 89 | str: Latest crn version as x.x.x. 90 | """ 91 | 92 | async with ClientSession() as session: 93 | try: 94 | data = await fetch_json(session, latest_crn_version_link) 95 | version = data.get("tag_name") 96 | if not version: 97 | msg = "No tag_name found in GitHub release data" 98 | raise ValueError(msg) 99 | return version 100 | except Exception as e: 101 | logger.error(f"Error while fetching latest crn version: {e}") 102 | raise Exit(code=1) from e 103 | 104 | 105 | @async_lru_cache 106 | async def fetch_crn_list( 107 | latest_crn_version: bool = False, 108 | ipv6: bool = False, 109 | stream_address: bool = False, 110 | confidential: bool = False, 111 | gpu: bool = False, 112 | ) -> list[CRNInfo]: 113 | """Fetch compute resource node list, unfiltered by default. 114 | 115 | Args: 116 | latest_crn_version (bool): Filter by latest crn version. 117 | ipv6 (bool): Filter invalid IPv6 configuration. 118 | stream_address (bool): Filter invalid payment receiver address. 119 | confidential (bool): Filter by confidential computing support. 120 | gpu (bool): Filter by GPU support. 121 | Returns: 122 | list[CRNInfo]: List of compute resource nodes. 123 | """ 124 | 125 | data = await call_program_crn_list() 126 | current_crn_version = await fetch_latest_crn_version() 127 | crns = [] 128 | for crn in data.get("crns"): 129 | if latest_crn_version and (crn.get("version") or "0.0.0") < current_crn_version: 130 | continue 131 | if ipv6: 132 | ipv6_check = crn.get("ipv6_check") 133 | if not ipv6_check or not all(ipv6_check.values()): 134 | continue 135 | if stream_address and not extract_valid_eth_address(crn.get("payment_receiver_address") or ""): 136 | continue 137 | if confidential and not crn.get("confidential_support"): 138 | continue 139 | if gpu and not (crn.get("gpu_support") and crn.get("compatible_available_gpus")): 140 | continue 141 | try: 142 | crns.append(CRNInfo.from_unsanitized_input(crn)) 143 | except ValidationError: 144 | logger.debug(f"Invalid CRN: {crn}") 145 | continue 146 | return crns 147 | 148 | 149 | async def fetch_crn_info(crn_url: Optional[str] = None, crn_hash: Optional[str] = None) -> Optional[CRNInfo]: 150 | """Retrieve a compute resource node by URL. 151 | 152 | Args: 153 | crn_url (Optional[str]): URL of the compute resource node. 154 | crn_hash (Optional[str]): Hash of the compute resource node. 155 | Returns: 156 | Union[CRNInfo, None]: The compute resource node or None if not found. 157 | """ 158 | 159 | crn_url = sanitize_url(crn_url) 160 | crn_list = await fetch_crn_list() 161 | for crn in crn_list: 162 | if crn.url == crn_url or crn.hash == crn_hash: 163 | return crn 164 | return None 165 | 166 | 167 | async def fetch_vm_info(message: InstanceMessage) -> tuple[str, dict[str, str]]: 168 | """Fetches VM information given an instance message. 169 | 170 | Args: 171 | message: Instance message. 172 | Returns: 173 | VM information. 174 | """ 175 | 176 | async with ClientSession() as session: 177 | chain = safe_getattr(message, "content.payment.chain.value") 178 | hold = safe_getattr(message, "content.payment.type.value") 179 | crn_hash = safe_getattr(message, "content.requirements.node.node_hash") 180 | created_at = safe_getattr(message, "content.time") 181 | 182 | is_hold = hold == PaymentType.hold.value 183 | firmware = safe_getattr(message, "content.environment.trusted_execution.firmware") 184 | is_confidential = firmware and len(firmware) == 64 185 | has_gpu = safe_getattr(message, "content.requirements.gpu") 186 | tac_hash = safe_getattr(message, "content.requirements.node.terms_and_conditions") 187 | 188 | info = { 189 | "crn_hash": str(crn_hash) if crn_hash else "", 190 | "created_at": str(created_at), 191 | "payment": str(hold), 192 | "chain": str(chain), 193 | "confidential": str(firmware) if is_confidential else "", 194 | "allocation_type": "", 195 | "ipv6_logs": "", 196 | "crn_url": "", 197 | "tac_hash": str(tac_hash) if tac_hash else "", 198 | "tac_url": "", 199 | "tac_accepted": "", 200 | } 201 | try: 202 | # Fetch from the scheduler API directly if no payment or no receiver (hold-tier non-confidential) 203 | if is_hold and not is_confidential and not has_gpu: 204 | try: 205 | url = f"https://scheduler.api.aleph.cloud/api/v0/allocation/{message.item_hash}" 206 | info["allocation_type"] = help_strings.ALLOCATION_AUTO 207 | allocation = await fetch_json(session, url) 208 | url = "https://scheduler.api.aleph.cloud/api/v0/nodes" 209 | nodes = await fetch_json(session, url) 210 | info["ipv6_logs"] = allocation["vm_ipv6"] 211 | for node in nodes["nodes"]: 212 | if node["ipv6"].split("::")[0] == ":".join(str(info["ipv6_logs"]).split(":")[:4]): 213 | info["crn_url"] = sanitize_url(node["url"]) 214 | break 215 | except (ClientResponseError, ClientConnectorError) as e: 216 | info["crn_url"] = help_strings.CRN_PENDING 217 | info["ipv6_logs"] = help_strings.VM_SCHEDULED 218 | logger.debug(f"Error while calling Scheduler API ({url}): {e}") 219 | else: 220 | # Fetch from the CRN program endpoint if PAYG-tier or confidential or GPU 221 | info["allocation_type"] = help_strings.ALLOCATION_MANUAL 222 | node_list = await fetch_crn_list() 223 | for node in node_list: 224 | if node.hash == crn_hash: 225 | info["crn_url"] = node.url 226 | break 227 | if info["crn_url"]: 228 | path = f"{info['crn_url']}{PATH_ABOUT_EXECUTIONS_LIST}" 229 | executions = await fetch_json(session, path) 230 | if message.item_hash in executions: 231 | interface = IPv6Interface(executions[message.item_hash]["networking"]["ipv6"]) 232 | info["ipv6_logs"] = str(interface.ip + 1) 233 | else: 234 | info["crn_url"] = help_strings.CRN_UNKNOWN 235 | if not info["ipv6_logs"]: 236 | info["ipv6_logs"] = help_strings.VM_NOT_READY 237 | # Terms and conditions 238 | if tac_hash: 239 | tac = await download(tac_hash, only_info=True, verbose=False) 240 | tac_url = safe_getattr(tac, "url") or f"missing → {tac_hash}" 241 | info.update({"tac_url": tac_url, "tac_accepted": "Yes"}) 242 | except (ClientResponseError, ClientConnectorError) as e: 243 | info["ipv6_logs"] = f"Not available. Server error: {e}" 244 | return message.item_hash, info 245 | 246 | 247 | async def find_crn_of_vm(vm_id: str) -> Optional[str]: 248 | """Finds the CRN where the VM is running given its item hash. 249 | 250 | Args: 251 | vm_id (str): Item hash of the VM. 252 | Returns: 253 | str: CRN url or None if not found. 254 | """ 255 | 256 | async with AlephHttpClient(api_server=settings.API_HOST) as client: 257 | message: Optional[InstanceMessage] = None 258 | try: 259 | message = await client.get_message(item_hash=ItemHash(vm_id), message_type=InstanceMessage) 260 | except MessageNotFoundError: 261 | echo("Instance does not exist on aleph.im") 262 | except ForgottenMessageError: 263 | echo("Instance has been deleted on aleph.im") 264 | if not message: 265 | raise Exit(code=1) 266 | _, info = await fetch_vm_info(message) 267 | is_valid = info["crn_url"] not in [help_strings.CRN_PENDING, help_strings.CRN_UNKNOWN] 268 | return str(info["crn_url"]) if is_valid else None 269 | 270 | 271 | @async_lru_cache 272 | async def fetch_settings() -> dict: 273 | """Fetch the settings from aggregate for flows and gpu instances. 274 | 275 | Returns: 276 | dict: Dictionary containing the settings. 277 | """ 278 | 279 | async with ClientSession() as session: 280 | try: 281 | data = await fetch_json(session, settings_link) 282 | return data.get("data", {}).get("settings") 283 | except Exception as e: 284 | logger.error(f"Error while fetching settings: {e}") 285 | raise Exit(code=1) from e 286 | -------------------------------------------------------------------------------- /src/aleph_client/commands/message.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | import json 5 | import os.path 6 | import subprocess 7 | import tempfile 8 | import time 9 | from pathlib import Path 10 | from typing import Annotated, Optional 11 | 12 | import typer 13 | from aleph.sdk import AlephHttpClient, AuthenticatedAlephHttpClient 14 | from aleph.sdk.account import _load_account 15 | from aleph.sdk.conf import settings 16 | from aleph.sdk.exceptions import ForgottenMessageError, MessageNotFoundError 17 | from aleph.sdk.query.filters import MessageFilter 18 | from aleph.sdk.query.responses import MessagesResponse 19 | from aleph.sdk.types import AccountFromPrivateKey, StorageEnum 20 | from aleph.sdk.utils import extended_json_encoder 21 | from aleph_message.models import AlephMessage, ProgramMessage 22 | from aleph_message.models.base import MessageType 23 | from aleph_message.models.item_hash import ItemHash 24 | from aleph_message.status import MessageStatus 25 | 26 | from aleph_client.commands import help_strings 27 | from aleph_client.commands.utils import ( 28 | colorful_json, 29 | colorized_status, 30 | input_multiline, 31 | setup_logging, 32 | str_to_datetime, 33 | ) 34 | from aleph_client.utils import AsyncTyper 35 | 36 | app = AsyncTyper(no_args_is_help=True) 37 | 38 | 39 | @app.command() 40 | async def get( 41 | item_hash: Annotated[str, typer.Argument(help="Item hash of the message")], 42 | ): 43 | async with AlephHttpClient(api_server=settings.API_HOST) as client: 44 | message: Optional[AlephMessage] = None 45 | try: 46 | message, status = await client.get_message(item_hash=ItemHash(item_hash), with_status=True) 47 | except MessageNotFoundError: 48 | typer.echo("Message does not exist on aleph.im") 49 | except ForgottenMessageError: 50 | typer.echo("Message has been forgotten on aleph.im") 51 | if message: 52 | typer.echo(f"Message Status: {colorized_status(status)}") 53 | if status == MessageStatus.REJECTED: 54 | reason = await client.get_message_error(item_hash=ItemHash(item_hash)) 55 | typer.echo(colorful_json(json.dumps(reason, indent=4))) 56 | else: 57 | typer.echo(colorful_json(json.dumps(message.model_dump(), indent=4, default=extended_json_encoder))) 58 | 59 | 60 | @app.command() 61 | async def find( 62 | pagination: Annotated[int, typer.Option()] = 200, 63 | page: Annotated[int, typer.Option()] = 1, 64 | message_types: Annotated[Optional[str], typer.Option()] = None, 65 | content_types: Annotated[Optional[str], typer.Option()] = None, 66 | content_keys: Annotated[Optional[str], typer.Option()] = None, 67 | refs: Annotated[Optional[str], typer.Option()] = None, 68 | addresses: Annotated[Optional[str], typer.Option()] = None, 69 | tags: Annotated[Optional[str], typer.Option()] = None, 70 | hashes: Annotated[Optional[str], typer.Option()] = None, 71 | channels: Annotated[Optional[str], typer.Option()] = None, 72 | chains: Annotated[Optional[str], typer.Option()] = None, 73 | start_date: Annotated[Optional[str], typer.Option()] = None, 74 | end_date: Annotated[Optional[str], typer.Option()] = None, 75 | ignore_invalid_messages: Annotated[bool, typer.Option()] = True, 76 | ): 77 | parsed_message_types = ( 78 | [MessageType(message_type) for message_type in message_types.split(",")] if message_types else None 79 | ) 80 | parsed_content_types = content_types.split(",") if content_types else None 81 | parsed_content_keys = content_keys.split(",") if content_keys else None 82 | parsed_refs = refs.split(",") if refs else None 83 | parsed_addresses = addresses.split(",") if addresses else None 84 | parsed_tags = tags.split(",") if tags else None 85 | parsed_hashes = hashes.split(",") if hashes else None 86 | parsed_channels = channels.split(",") if channels else None 87 | parsed_chains = chains.split(",") if chains else None 88 | 89 | start_time = str_to_datetime(start_date) 90 | end_time = str_to_datetime(end_date) 91 | 92 | async with AlephHttpClient(api_server=settings.API_HOST) as client: 93 | response: MessagesResponse = await client.get_messages( 94 | page_size=pagination, 95 | page=page, 96 | message_filter=MessageFilter( 97 | message_types=parsed_message_types, 98 | content_types=parsed_content_types, 99 | content_keys=parsed_content_keys, 100 | refs=parsed_refs, 101 | addresses=parsed_addresses, 102 | tags=parsed_tags, 103 | hashes=parsed_hashes, 104 | channels=parsed_channels, 105 | chains=parsed_chains, 106 | start_date=start_time, 107 | end_date=end_time, 108 | ), 109 | ignore_invalid_messages=ignore_invalid_messages, 110 | ) 111 | typer.echo( 112 | colorful_json(json.dumps(response.model_dump(), sort_keys=True, indent=4, default=extended_json_encoder)) 113 | ) 114 | 115 | 116 | @app.command() 117 | async def post( 118 | path: Annotated[ 119 | Optional[Path], 120 | typer.Option(help="Path to the content you want to post. If omitted, you can input your content directly"), 121 | ] = None, 122 | type: Annotated[str, typer.Option(help="Text representing the message object type")] = "test", 123 | ref: Annotated[Optional[str], typer.Option(help=help_strings.REF)] = None, 124 | channel: Annotated[Optional[str], typer.Option(help=help_strings.CHANNEL)] = settings.DEFAULT_CHANNEL, 125 | private_key: Annotated[Optional[str], typer.Option(help=help_strings.PRIVATE_KEY)] = settings.PRIVATE_KEY_STRING, 126 | private_key_file: Annotated[ 127 | Optional[Path], typer.Option(help=help_strings.PRIVATE_KEY_FILE) 128 | ] = settings.PRIVATE_KEY_FILE, 129 | debug: Annotated[bool, typer.Option()] = False, 130 | ): 131 | """Post a message on aleph.im.""" 132 | 133 | setup_logging(debug) 134 | 135 | account: AccountFromPrivateKey = _load_account(private_key, private_key_file) 136 | storage_engine: StorageEnum 137 | content: dict 138 | 139 | if path: 140 | if not path.is_file(): 141 | typer.echo(f"Error: File not found: '{path}'") 142 | raise typer.Exit(code=1) 143 | 144 | file_size = os.path.getsize(path) 145 | storage_engine = StorageEnum.ipfs if file_size > 4 * 1024 * 1024 else StorageEnum.storage 146 | 147 | with open(path, encoding="utf-8") as fd: 148 | content = json.load(fd) 149 | 150 | else: 151 | content_raw = input_multiline() 152 | storage_engine = StorageEnum.ipfs if len(content_raw) > 4 * 1024 * 1024 else StorageEnum.storage 153 | try: 154 | content = json.loads(content_raw) 155 | except json.decoder.JSONDecodeError as e: 156 | typer.echo("Not valid JSON") 157 | raise typer.Exit(code=2) from e 158 | 159 | async with AuthenticatedAlephHttpClient(account=account, api_server=settings.API_HOST) as client: 160 | result, status = await client.create_post( 161 | post_content=content, 162 | post_type=type, 163 | ref=ref, 164 | channel=channel, 165 | inline=True, 166 | storage_engine=storage_engine, 167 | ) 168 | 169 | typer.echo(json.dumps(result.model_dump(), indent=4, default=extended_json_encoder)) 170 | 171 | 172 | @app.command() 173 | async def amend( 174 | item_hash: Annotated[str, typer.Argument(help="Hash reference of the message to amend")], 175 | private_key: Annotated[Optional[str], typer.Option(help=help_strings.PRIVATE_KEY)] = settings.PRIVATE_KEY_STRING, 176 | private_key_file: Annotated[ 177 | Optional[Path], typer.Option(help=help_strings.PRIVATE_KEY_FILE) 178 | ] = settings.PRIVATE_KEY_FILE, 179 | debug: Annotated[bool, typer.Option()] = False, 180 | ): 181 | """Amend an existing aleph.im message.""" 182 | 183 | setup_logging(debug) 184 | 185 | account: AccountFromPrivateKey = _load_account(private_key, private_key_file) 186 | 187 | async with AlephHttpClient(api_server=settings.API_HOST) as client: 188 | existing_message: Optional[AlephMessage] = None 189 | try: 190 | existing_message = await client.get_message(item_hash=item_hash) 191 | except MessageNotFoundError: 192 | typer.echo("Message does not exist on aleph.im") 193 | except ForgottenMessageError: 194 | typer.echo("Message has been forgotten on aleph.im") 195 | if existing_message: 196 | editor: str = os.getenv("EDITOR", default="nano") 197 | with tempfile.NamedTemporaryFile(suffix="json") as fd: 198 | # Fill in message template 199 | fd.write(existing_message.content.model_dump_json(indent=4).encode()) 200 | fd.seek(0) 201 | 202 | # Launch editor 203 | subprocess.run([editor, fd.name], check=True) 204 | 205 | # Read new message 206 | fd.seek(0) 207 | new_content_json = fd.read() 208 | 209 | content_type = type(existing_message).__annotations__["content"] 210 | new_content_dict = json.loads(new_content_json) 211 | new_content = content_type(**new_content_dict) 212 | 213 | if isinstance(existing_message, ProgramMessage): 214 | new_content.replaces = existing_message.item_hash 215 | else: 216 | new_content.ref = existing_message.item_hash 217 | 218 | new_content.time = time.time() 219 | new_content.type = "amend" 220 | 221 | typer.echo(new_content) 222 | async with AuthenticatedAlephHttpClient(account=account, api_server=settings.API_HOST) as account_client: 223 | message, status, response = await account_client.submit( 224 | content=new_content.dict(), 225 | message_type=existing_message.type, 226 | channel=existing_message.channel, 227 | ) 228 | typer.echo(f"{message.model_dump_json(indent=4)}") 229 | 230 | 231 | @app.command() 232 | async def forget( 233 | hashes: Annotated[str, typer.Argument(help="Comma separated list of hash references of messages to forget")], 234 | reason: Annotated[ 235 | Optional[str], typer.Option(help="A description of why the messages are being forgotten.") 236 | ] = None, 237 | channel: Annotated[Optional[str], typer.Option(help=help_strings.CHANNEL)] = settings.DEFAULT_CHANNEL, 238 | private_key: Annotated[Optional[str], typer.Option(help=help_strings.PRIVATE_KEY)] = settings.PRIVATE_KEY_STRING, 239 | private_key_file: Annotated[ 240 | Optional[Path], typer.Option(help=help_strings.PRIVATE_KEY_FILE) 241 | ] = settings.PRIVATE_KEY_FILE, 242 | debug: Annotated[bool, typer.Option()] = False, 243 | ): 244 | """Forget an existing aleph.im message.""" 245 | 246 | setup_logging(debug) 247 | 248 | hash_list: list[ItemHash] = [ItemHash(h) for h in hashes.split(",")] 249 | 250 | account: AccountFromPrivateKey = _load_account(private_key, private_key_file) 251 | async with AuthenticatedAlephHttpClient(account=account, api_server=settings.API_HOST) as client: 252 | await client.forget(hashes=hash_list, reason=reason, channel=channel) 253 | 254 | 255 | @app.command() 256 | async def watch( 257 | ref: Annotated[str, typer.Argument(help="Hash reference of the message to watch")], 258 | indent: Annotated[Optional[int], typer.Option(help="Number of indents to use")] = None, 259 | debug: Annotated[bool, typer.Option()] = False, 260 | ): 261 | """Watch a hash for amends and print amend hashes""" 262 | 263 | setup_logging(debug) 264 | 265 | async with AlephHttpClient(api_server=settings.API_HOST) as client: 266 | original: Optional[AlephMessage] = None 267 | try: 268 | original = await client.get_message(item_hash=ref) 269 | except MessageNotFoundError: 270 | typer.echo("Message does not exist on aleph.im") 271 | except ForgottenMessageError: 272 | typer.echo("Message has been forgotten on aleph.im") 273 | if original: 274 | async for message in client.watch_messages( 275 | message_filter=MessageFilter(refs=[ref], addresses=[original.content.address]) 276 | ): 277 | typer.echo(f"{message.model_dump_json(indent=indent)}") 278 | 279 | 280 | @app.command() 281 | def sign( 282 | message: Annotated[Optional[str], typer.Option(help=help_strings.SIGNABLE_MESSAGE)] = None, 283 | private_key: Annotated[Optional[str], typer.Option(help=help_strings.PRIVATE_KEY)] = settings.PRIVATE_KEY_STRING, 284 | private_key_file: Annotated[ 285 | Optional[Path], typer.Option(help=help_strings.PRIVATE_KEY_FILE) 286 | ] = settings.PRIVATE_KEY_FILE, 287 | debug: Annotated[bool, typer.Option()] = False, 288 | ): 289 | """Sign an aleph message with a private key. If no --message is provided, the message will be read from stdin.""" 290 | 291 | setup_logging(debug) 292 | 293 | account: AccountFromPrivateKey = _load_account(private_key, private_key_file) 294 | 295 | if message is None: 296 | message = input_multiline() 297 | try: 298 | data = json.loads(message) 299 | except json.JSONDecodeError as error: 300 | typer.echo("Error: Message isn't a valid JSON") 301 | raise typer.Exit(code=1) from error 302 | 303 | coroutine = account.sign_message(data) 304 | signed_message = asyncio.run(coroutine) 305 | typer.echo(json.dumps(signed_message, indent=4, default=extended_json_encoder)) 306 | -------------------------------------------------------------------------------- /src/aleph_client/commands/node.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import datetime 4 | import json as json_lib 5 | import logging 6 | import re 7 | import unicodedata 8 | from typing import Annotated, Optional 9 | 10 | import aiohttp 11 | import typer 12 | from aleph.sdk.conf import settings 13 | from rich import text 14 | from rich.console import Console 15 | from rich.markup import escape 16 | from rich.table import Table 17 | 18 | from aleph_client.commands.utils import setup_logging 19 | from aleph_client.utils import AsyncTyper, sanitize_url 20 | 21 | logger = logging.getLogger(__name__) 22 | app = AsyncTyper(no_args_is_help=True) 23 | 24 | node_link = ( 25 | f"{sanitize_url(settings.API_HOST)}/api/v0/aggregates/0xa1B3bb7d2332383D96b7796B908fB7f7F3c2Be10.json?" 26 | "keys=corechannel" 27 | ) 28 | 29 | 30 | class NodeInfo: 31 | def __init__(self, **kwargs): 32 | self.data = kwargs.get("data", {}) 33 | self.nodes = self.data.get("corechannel", {}).get("resource_nodes", []) 34 | self.nodes.sort(key=lambda x: x.get("score", 0), reverse=True) 35 | self.core_node = self.data.get("corechannel", {}).get("nodes", []) 36 | self.core_node.sort(key=lambda x: x.get("score", 0), reverse=True) 37 | 38 | 39 | async def _fetch_nodes() -> NodeInfo: 40 | """Fetch node aggregates and format it as NodeInfo""" 41 | async with aiohttp.ClientSession() as session: 42 | async with session.get(node_link) as resp: 43 | if resp.status != 200: 44 | logger.error("Unable to fetch node information") 45 | raise typer.Exit(1) 46 | 47 | data = await resp.json() 48 | return NodeInfo(**data) 49 | 50 | 51 | def _escape_and_normalize(string: str) -> str: 52 | sanitized_text = escape(string) 53 | normalized_text = unicodedata.normalize("NFC", sanitized_text) 54 | return normalized_text 55 | 56 | 57 | def _remove_ansi_escape(string: str) -> str: 58 | ansi_escape = re.compile(r"\x1B\[[0-?]*[ -/]*[@-~]") 59 | return ansi_escape.sub("", string) 60 | 61 | 62 | def _format_score(score: float) -> text.Text: 63 | if score < 0.5: 64 | return text.Text(f"{score:.2%}", style="red", justify="right") 65 | elif score < 0.75: 66 | return text.Text(f"{score:.2%}", style="orange", justify="right") 67 | else: 68 | return text.Text(f"{score:.2%}", style="green", justify="right") 69 | 70 | 71 | def _format_status(status): 72 | if status.lower() == "linked" or status.lower() == "active": 73 | return text.Text(status, style="green", justify="left") 74 | return text.Text(status, style="red", justify="left") 75 | 76 | 77 | def _show_compute(node_info): 78 | table = Table(title="Compute Node Information") 79 | table.add_column("Score", style="green", no_wrap=True, justify="right") 80 | table.add_column("Name", style="#029AFF", justify="left") 81 | table.add_column("Creation Time", style="#029AFF", justify="center") 82 | table.add_column("Decentralization", style="green", justify="right") 83 | table.add_column("Status", style="green", justify="right") 84 | table.add_column("Item Hash", style="green", justify="center") 85 | table.add_column("URL", style="orchid", justify="center") 86 | 87 | for node in node_info.nodes: 88 | # Prevent escaping with name 89 | node_name = node["name"] 90 | node_name = _escape_and_normalize(node_name) 91 | node_name = _remove_ansi_escape(node_name) 92 | node_hash = node["hash"] 93 | 94 | # Format Value 95 | creation_time = datetime.datetime.fromtimestamp(node["time"], tz=datetime.timezone.utc).strftime( 96 | "%Y-%m-%d %H:%M:%S" 97 | ) 98 | score = _format_score(node["score"]) 99 | decentralization = _format_score(node["decentralization"]) 100 | status = _format_status(node["status"]) 101 | node_url = node["address"] 102 | table.add_row( 103 | score, 104 | node_name, 105 | creation_time, 106 | decentralization, 107 | status, 108 | node_hash, 109 | node_url, 110 | ) 111 | 112 | console = Console() 113 | console.print(table) 114 | 115 | 116 | def _filter_node( 117 | active: bool, 118 | address: Optional[str], 119 | core_info, 120 | payg_receiver=Optional[str], 121 | crn_url=Optional[str], 122 | crn_hash=Optional[str], 123 | ccn_hash=Optional[str], 124 | ): 125 | result = [] 126 | try: 127 | node_url = not crn_url or sanitize_url(crn_url) 128 | except Exception as e: 129 | logger.debug(e) 130 | for node in core_info: 131 | try: 132 | if "total_staked" in node: # CCN 133 | if ( 134 | (not active or (node["status"] == "active" and node["score"] > 0)) 135 | and (not address or node["owner"] == address) 136 | and (not ccn_hash or node["hash"] == ccn_hash) 137 | ): 138 | result.append(node) 139 | elif "parent" in node: # CRN 140 | sanitized_url = "address" in node and sanitize_url(node["address"]) 141 | if sanitized_url: 142 | node["address"] = sanitized_url 143 | if ( 144 | (not active or (node["status"] == "linked" and node["score"] > 0)) 145 | and (not address or node["owner"] == address) 146 | and (not payg_receiver or node["stream_reward"] == payg_receiver) 147 | and (not crn_url or node["address"] == node_url) 148 | and (not crn_hash or node["hash"] == crn_hash) 149 | and (not ccn_hash or node["parent"] == ccn_hash) 150 | ): 151 | result.append(node) 152 | except Exception as e: 153 | logger.debug(e) 154 | return result 155 | 156 | 157 | def _show_core(node_info): 158 | table = Table(title="Core Channel Node Information") 159 | table.add_column("Score", style="green", no_wrap=True, justify="right") 160 | table.add_column("Name", style="#029AFF", justify="left") 161 | table.add_column("Staked", style="#029AFF", justify="left") 162 | table.add_column("Linked", style="#029AFF", justify="left") 163 | table.add_column("Creation Time", style="#029AFF", justify="center") 164 | table.add_column("Status", style="green", justify="right") 165 | table.add_column("Item Hash", style="green", justify="center") 166 | 167 | for node in node_info: 168 | # Prevent escaping with name 169 | node_name = node["name"] 170 | node_name = _escape_and_normalize(node_name) 171 | node_name = _remove_ansi_escape(node_name) 172 | node_hash = node["hash"] 173 | 174 | # Format Value 175 | creation_time = datetime.datetime.fromtimestamp(node["time"], tz=datetime.timezone.utc).strftime( 176 | "%Y-%m-%d %H:%M:%S" 177 | ) 178 | score = _format_score(node["score"]) 179 | status = _format_status(node["status"]) 180 | 181 | table.add_row( 182 | score, 183 | node_name, 184 | f"{int(node['total_staked']):,}", 185 | str(len(node["resource_nodes"])), 186 | creation_time, 187 | status, 188 | node_hash, 189 | ) 190 | 191 | console = Console() 192 | console.print(table) 193 | 194 | 195 | @app.command() 196 | async def compute( 197 | json: Annotated[bool, typer.Option(help="Print as json instead of rich table")] = False, 198 | active: Annotated[bool, typer.Option(help="Only show active nodes")] = False, 199 | address: Annotated[Optional[str], typer.Option(help="Owner address to filter by")] = None, 200 | payg_receiver: Annotated[ 201 | Optional[str], typer.Option(help="PAYG (Pay-As-You-Go) receiver address to filter by") 202 | ] = None, 203 | crn_url: Annotated[Optional[str], typer.Option(help="CRN URL to filter by")] = None, 204 | crn_hash: Annotated[Optional[str], typer.Option(help="CRN hash to filter by")] = None, 205 | ccn_hash: Annotated[Optional[str], typer.Option(help="CCN hash to filter by")] = None, 206 | debug: Annotated[bool, typer.Option()] = False, 207 | ): 208 | """Get all compute node (CRN) on aleph network""" 209 | 210 | setup_logging(debug) 211 | 212 | compute_info: NodeInfo = await _fetch_nodes() 213 | compute_info.nodes = _filter_node( 214 | core_info=compute_info.nodes, 215 | active=active, 216 | address=address, 217 | payg_receiver=payg_receiver, 218 | crn_url=crn_url, 219 | crn_hash=crn_hash, 220 | ccn_hash=ccn_hash, 221 | ) 222 | 223 | if not json: 224 | _show_compute(compute_info) 225 | else: 226 | typer.echo(json_lib.dumps(compute_info.nodes, indent=4)) 227 | 228 | 229 | @app.command() 230 | async def core( 231 | json: Annotated[bool, typer.Option(help="Print as json instead of rich table")] = False, 232 | active: Annotated[bool, typer.Option(help="Only show active nodes")] = False, 233 | address: Annotated[Optional[str], typer.Option(help="Owner address to filter by")] = None, 234 | ccn_hash: Annotated[Optional[str], typer.Option(help="CCN hash to filter by")] = None, 235 | debug: Annotated[bool, typer.Option()] = False, 236 | ): 237 | """Get all core node (CCN) on aleph""" 238 | setup_logging(debug) 239 | 240 | core_info: NodeInfo = await _fetch_nodes() 241 | core_info.core_node = _filter_node(core_info=core_info.core_node, active=active, address=address, ccn_hash=ccn_hash) 242 | 243 | if not json: 244 | _show_core(node_info=core_info.core_node) 245 | else: 246 | typer.echo(json_lib.dumps(core_info.core_node, indent=4)) 247 | -------------------------------------------------------------------------------- /src/aleph_client/commands/program_utils/runtime_checker.squashfs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleph-im/aleph-client/c444fd7aa3ee6b0ebb4ff94b172955b9174ceddd/src/aleph_client/commands/program_utils/runtime_checker.squashfs -------------------------------------------------------------------------------- /src/aleph_client/commands/program_utils/runtime_checker/main.py: -------------------------------------------------------------------------------- 1 | import platform 2 | import subprocess 3 | 4 | from fastapi import FastAPI 5 | 6 | app = FastAPI() 7 | 8 | extra_checks = { 9 | "Docker": "docker --version", 10 | "Nodejs": "node --version", 11 | "Rust": "rustc --version", 12 | "Go": "go version", 13 | } 14 | 15 | 16 | @app.get("/") 17 | async def versions() -> dict[str, str]: 18 | results = {} 19 | 20 | # Distribution 21 | try: 22 | results["Distribution"] = platform.freedesktop_os_release()["PRETTY_NAME"] # type: ignore 23 | except Exception: 24 | results["Distribution"] = "Not available" 25 | 26 | # Python 27 | results["Python"] = platform.python_version() 28 | 29 | # Others 30 | for label, command in extra_checks.items(): 31 | try: 32 | results[label] = subprocess.check_output(command.split(" ")).decode("utf-8").strip() 33 | except Exception: 34 | results[label] = "Not installed" 35 | 36 | return results 37 | -------------------------------------------------------------------------------- /src/aleph_client/models.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Any, Optional 3 | 4 | from aiohttp import InvalidURL 5 | from aleph.sdk.types import StoredContent 6 | from aleph_message.models import ItemHash 7 | from aleph_message.models.execution.environment import CpuProperties, GpuDeviceClass 8 | from pydantic import BaseModel 9 | from rich.console import Console 10 | from rich.panel import Panel 11 | from rich.prompt import Prompt 12 | from rich.text import Text 13 | from typer import echo 14 | 15 | from aleph_client.commands.files import download 16 | from aleph_client.commands.node import _escape_and_normalize, _remove_ansi_escape 17 | from aleph_client.utils import extract_valid_eth_address, sanitize_url 18 | 19 | 20 | class LoadAverage(BaseModel): 21 | load1: float 22 | load5: float 23 | load15: float 24 | 25 | 26 | class CoreFrequencies(BaseModel): 27 | min: float 28 | max: float 29 | 30 | 31 | class CpuUsage(BaseModel): 32 | count: int 33 | load_average: LoadAverage 34 | core_frequencies: CoreFrequencies 35 | 36 | 37 | class MemoryUsage(BaseModel): 38 | total_kB: int 39 | available_kB: int 40 | 41 | 42 | class DiskUsage(BaseModel): 43 | total_kB: int 44 | available_kB: int 45 | 46 | 47 | class UsagePeriod(BaseModel): 48 | start_timestamp: datetime 49 | duration_seconds: float 50 | 51 | 52 | class MachineProperties(BaseModel): 53 | cpu: CpuProperties 54 | 55 | 56 | class GpuDevice(BaseModel): 57 | vendor: str 58 | model: str 59 | device_name: str 60 | device_class: GpuDeviceClass 61 | pci_host: str 62 | device_id: str 63 | compatible: bool 64 | 65 | 66 | class GPUProperties(BaseModel): 67 | devices: list[GpuDevice] 68 | available_devices: list[GpuDevice] 69 | 70 | 71 | class MachineUsage(BaseModel): 72 | cpu: CpuUsage 73 | mem: MemoryUsage 74 | disk: DiskUsage 75 | period: UsagePeriod 76 | properties: MachineProperties 77 | gpu: Optional[GPUProperties] 78 | active: bool = True 79 | 80 | 81 | class MachineInfo(BaseModel): 82 | hash: str 83 | machine_usage: MachineUsage 84 | score: float 85 | name: str 86 | version: Optional[str] 87 | reward_address: str 88 | url: str 89 | 90 | @classmethod 91 | def from_unsanitized_input( 92 | cls, 93 | machine_usage: MachineUsage, 94 | score: float, 95 | name: str, 96 | version: Optional[str], 97 | reward_address: str, 98 | url: str, 99 | hash: str, 100 | ) -> "MachineInfo": 101 | """Create a MachineInfo instance from unsanitized input. 102 | 103 | User input from the account page or the API may contain malicious or unexpected data. 104 | This method ensures that the input is sanitized before creating a MachineInfo object. 105 | 106 | Args: 107 | machine_usage: MachineUsage object from the CRN API. 108 | score: Score of the CRN. 109 | name: Name of the CRN. 110 | version: Version of the CRN. 111 | reward_address: Reward address of the CRN. 112 | url: URL of the CRN. 113 | """ 114 | node_name: str = _remove_ansi_escape(_escape_and_normalize(name)) 115 | 116 | # The version field is optional, so we need to handle it separately 117 | raw_version: Optional[str] = version 118 | version = _remove_ansi_escape(_escape_and_normalize(raw_version)) if raw_version else None 119 | 120 | return cls( 121 | machine_usage=MachineUsage.model_validate(machine_usage), 122 | score=score, 123 | name=node_name, 124 | version=version, 125 | reward_address=reward_address, 126 | url=url, 127 | hash=hash, 128 | ) 129 | 130 | 131 | class CRNInfo(BaseModel): 132 | hash: ItemHash 133 | name: str 134 | owner: str 135 | url: str 136 | ccn_hash: Optional[str] 137 | status: Optional[str] 138 | version: Optional[str] 139 | score: float 140 | reward_address: str 141 | stream_reward_address: str 142 | machine_usage: Optional[MachineUsage] 143 | ipv6: bool 144 | qemu_support: bool 145 | confidential_computing: bool 146 | gpu_support: bool 147 | terms_and_conditions: Optional[str] 148 | compatible_available_gpus: Optional[list] 149 | 150 | @staticmethod 151 | def from_unsanitized_input( 152 | crn: dict[str, Any], 153 | ) -> "CRNInfo": 154 | payment_receiver_address = crn.get("payment_receiver_address") 155 | stream_reward_address = extract_valid_eth_address(payment_receiver_address) if payment_receiver_address else "" 156 | system_usage = crn.get("system_usage") 157 | machine_usage = MachineUsage.model_validate(system_usage) if system_usage else None 158 | ipv6_check = crn.get("ipv6_check") 159 | ipv6 = bool(ipv6_check and all(ipv6_check.values())) 160 | try: 161 | url = sanitize_url(crn["address"]) 162 | except InvalidURL: 163 | url = "" 164 | return CRNInfo( 165 | hash=crn["hash"], 166 | name=crn["name"], 167 | owner=crn["owner"], 168 | url=url, 169 | version=crn["version"], 170 | ccn_hash=crn["parent"], 171 | status=crn["status"], 172 | score=crn["score"], 173 | reward_address=crn["reward"], 174 | stream_reward_address=stream_reward_address, 175 | machine_usage=machine_usage, 176 | ipv6=ipv6, 177 | qemu_support=bool(crn["qemu_support"]), 178 | confidential_computing=bool(crn["confidential_support"]), 179 | gpu_support=bool(crn["gpu_support"]), 180 | terms_and_conditions=crn["terms_and_conditions"], 181 | compatible_available_gpus=crn["compatible_available_gpus"], 182 | ) 183 | 184 | @property 185 | def display_cpu(self) -> str: 186 | if self.machine_usage: 187 | return f"{self.machine_usage.cpu.count:>3}" 188 | return "" 189 | 190 | @property 191 | def display_ram(self) -> str: 192 | if self.machine_usage: 193 | return ( 194 | f"{self.machine_usage.mem.available_kB / 1_000_000:>3.0f} / " 195 | f"{self.machine_usage.mem.total_kB / 1_000_000:>3.0f} GB" 196 | ) 197 | return "" 198 | 199 | @property 200 | def display_hdd(self) -> str: 201 | if self.machine_usage: 202 | return ( 203 | f"{self.machine_usage.disk.available_kB / 1_000_000:>4.0f} / " 204 | f"{self.machine_usage.disk.total_kB / 1_000_000:>4.0f} GB" 205 | ) 206 | return "" 207 | 208 | @property 209 | async def terms_and_conditions_content(self) -> Optional[StoredContent]: 210 | if self.terms_and_conditions: 211 | return await download(self.terms_and_conditions, only_info=True, verbose=False) 212 | return None 213 | 214 | async def display_terms_and_conditions(self, auto_accept: bool = False) -> Optional[bool]: 215 | if self.terms_and_conditions: 216 | tac = await self.terms_and_conditions_content 217 | if tac: 218 | text = Text.assemble( 219 | "The selected CRN requires you to accept the following conditions and terms of use:\n", 220 | f"Filename: {tac.filename}\n" if tac.filename else "", 221 | Text.from_markup(f"↳ [orange1]{tac.url}[/orange1]"), 222 | ) 223 | console = Console() 224 | console.print( 225 | Panel(text, title="Terms & Conditions", border_style="blue", expand=False, title_align="left") 226 | ) 227 | 228 | if auto_accept: 229 | echo("To proceed, enter “Yes I read and accept”: Yes I read and accept") 230 | return True 231 | return Prompt.ask("To proceed, enter “Yes I read and accept”").lower() == "yes i read and accept" 232 | return None 233 | 234 | def display_crn_specs(self): 235 | console = Console() 236 | 237 | data = { 238 | "Hash": self.hash, 239 | "Name": self.name, 240 | "URL": self.url, 241 | "Version": self.version, 242 | "Score": self.score, 243 | "Stream Receiver": self.stream_reward_address, 244 | **( 245 | { 246 | "Available Cores": self.display_cpu, 247 | "Available RAM": self.display_ram, 248 | "Available Disk": self.display_hdd, 249 | } 250 | if isinstance(self.machine_usage, MachineUsage) 251 | else {} 252 | ), 253 | "Support Qemu": self.qemu_support, 254 | "Support Confidential": self.confidential_computing, 255 | "Support GPU": self.gpu_support, 256 | **( 257 | { 258 | "Terms & Conditions": self.terms_and_conditions, 259 | } 260 | if self.terms_and_conditions 261 | else {} 262 | ), 263 | } 264 | text = "\n".join(f"[orange3]{key}[/orange3]: {value}" for key, value in data.items()) 265 | 266 | console.print(Panel(text, title="Selected CRN", border_style="bright_cyan", expand=False, title_align="left")) 267 | -------------------------------------------------------------------------------- /src/aleph_client/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | import inspect 5 | import logging 6 | import os 7 | import re 8 | import subprocess 9 | import sys 10 | from asyncio import ensure_future 11 | from functools import lru_cache, partial, wraps 12 | from pathlib import Path 13 | from shutil import make_archive 14 | from typing import Optional, Union 15 | from urllib.parse import ParseResult, urlparse 16 | from zipfile import BadZipFile, ZipFile 17 | 18 | import aiohttp 19 | import typer 20 | from aiohttp import ClientSession 21 | from aleph.sdk.conf import MainConfiguration, load_main_configuration, settings 22 | from aleph.sdk.types import GenericMessage 23 | from aleph_message.models.base import MessageType 24 | from aleph_message.models.execution.base import Encoding 25 | 26 | logger = logging.getLogger(__name__) 27 | 28 | try: 29 | import magic 30 | except ImportError: 31 | logger.info("Could not import library 'magic', MIME type detection disabled") 32 | magic = None # type:ignore 33 | 34 | 35 | def try_open_zip(path: Path) -> None: 36 | """Try opening a zip to check if it is valid""" 37 | assert path.is_file() 38 | with open(path, "rb") as archive_file: 39 | with ZipFile(archive_file, "r") as archive: 40 | if not archive.namelist(): 41 | msg = "No file in the archive." 42 | raise BadZipFile(msg) 43 | 44 | 45 | def create_archive(path: Path) -> tuple[Path, Encoding]: 46 | """Create a zip archive from a directory""" 47 | if os.path.isdir(path): 48 | if settings.CODE_USES_SQUASHFS: 49 | logger.debug("Creating squashfs archive...") 50 | archive_path = Path(f"{path}.squashfs") 51 | subprocess.check_call(["/usr/bin/mksquashfs", path, archive_path, "-noappend"]) 52 | assert archive_path.is_file() 53 | return archive_path, Encoding.squashfs 54 | else: 55 | logger.debug("Creating zip archive...") 56 | make_archive(str(path), "zip", path) 57 | archive_path = Path(f"{path}.zip") 58 | return archive_path, Encoding.zip 59 | elif os.path.isfile(path): 60 | if path.suffix == ".squashfs" or (magic and magic.from_file(path).startswith("Squashfs filesystem")): 61 | return path, Encoding.squashfs 62 | else: 63 | try_open_zip(Path(path)) 64 | return path, Encoding.zip 65 | else: 66 | msg = "No file or directory to create the archive from" 67 | raise FileNotFoundError(msg) 68 | 69 | 70 | def get_message_type_value(message_type: type[GenericMessage]) -> MessageType: 71 | """Returns the value of the 'type' field of a message type class.""" 72 | type_literal = message_type.__annotations__["type"] 73 | return type_literal.__args__[0] # Get the value from a Literal 74 | 75 | 76 | class AsyncTyper(typer.Typer): 77 | @staticmethod 78 | def maybe_run_async(decorator, f): 79 | if inspect.iscoroutinefunction(f): 80 | 81 | @wraps(f) 82 | def runner(*args, **kwargs): 83 | return asyncio.run(f(*args, **kwargs)) 84 | 85 | decorator(runner) 86 | else: 87 | decorator(f) 88 | return f 89 | 90 | def callback(self, *args, **kwargs): 91 | decorator = super().callback(*args, **kwargs) 92 | return partial(self.maybe_run_async, decorator) 93 | 94 | def command(self, *args, **kwargs): 95 | decorator = super().command(*args, **kwargs) 96 | return partial(self.maybe_run_async, decorator) 97 | 98 | 99 | async def fetch_json(session: ClientSession, url: str) -> dict: 100 | async with session.get(url) as resp: 101 | resp.raise_for_status() 102 | return await resp.json() 103 | 104 | 105 | def extract_valid_eth_address(address: str) -> str: 106 | if address: 107 | pattern = r"0x[a-fA-F0-9]{40}" 108 | match = re.search(pattern, address) 109 | if match: 110 | return match.group(0) 111 | return "" 112 | 113 | 114 | async def list_unlinked_keys() -> tuple[list[Path], Optional[MainConfiguration]]: 115 | """ 116 | List private key files that are not linked to any chain type and return the active MainConfiguration. 117 | 118 | Returns: 119 | - A tuple containing: 120 | - A list of unlinked private key files as Path objects. 121 | - The active MainConfiguration object (the single account in the config file). 122 | """ 123 | config_home: Union[str, Path] = settings.CONFIG_HOME if settings.CONFIG_HOME else Path.home() 124 | private_key_dir = Path(config_home, "private-keys") 125 | 126 | if not private_key_dir.exists(): 127 | return [], None 128 | 129 | all_private_key_files = list(private_key_dir.glob("*.key")) 130 | 131 | config: MainConfiguration | None = load_main_configuration(Path(settings.CONFIG_FILE)) 132 | 133 | if not config: 134 | logger.warning("No config file found.") 135 | return all_private_key_files, None 136 | 137 | active_key_path = config.path 138 | 139 | unlinked_keys: list[Path] = [key_file for key_file in all_private_key_files if key_file != active_key_path] 140 | 141 | return unlinked_keys, config 142 | 143 | 144 | # Some users had fun adding URLs that are obviously not CRNs. 145 | # If you work for one of these companies, please send a large check to the Aleph team, 146 | # and we may consider removing your domain from the blacklist. Or just use a subdomain. 147 | FORBIDDEN_HOSTS = [ 148 | "amazon.com", 149 | "apple.com", 150 | "facebook.com", 151 | "google.com", 152 | "google.es", 153 | "microsoft.com", 154 | "openai.com", 155 | "twitter.com", 156 | "x.com", 157 | "youtube.com", 158 | ] 159 | 160 | 161 | def sanitize_url(url: str) -> str: 162 | """Ensure that the URL is valid and not obviously irrelevant. 163 | 164 | Args: 165 | url: URL to sanitize. 166 | Returns: 167 | Sanitized URL. 168 | """ 169 | if not url: 170 | msg = "Empty URL" 171 | raise aiohttp.InvalidURL(msg) 172 | parsed_url: ParseResult = urlparse(url) 173 | if parsed_url.scheme not in ["http", "https"]: 174 | msg = f"Invalid URL scheme: {parsed_url.scheme}" 175 | raise aiohttp.InvalidURL(msg) 176 | if parsed_url.hostname in FORBIDDEN_HOSTS: 177 | logger.debug( 178 | f"Invalid URL {url} hostname {parsed_url.hostname} is in the forbidden host list " 179 | f"({', '.join(FORBIDDEN_HOSTS)})" 180 | ) 181 | msg = "Invalid URL host" 182 | raise aiohttp.InvalidURL(msg) 183 | return url.strip("/") 184 | 185 | 186 | def async_lru_cache(async_function): 187 | 188 | @lru_cache(maxsize=0 if "pytest" in sys.modules else 1) 189 | def cached_async_function(*args, **kwargs): 190 | return ensure_future(async_function(*args, **kwargs)) 191 | 192 | return cached_async_function 193 | -------------------------------------------------------------------------------- /src/aleph_client/voucher.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | from decimal import Decimal 4 | from typing import Optional, Union 5 | 6 | import aiohttp 7 | from aleph.sdk.client.http import AlephHttpClient 8 | from aleph.sdk.conf import settings 9 | from aleph.sdk.query.filters import PostFilter 10 | from aleph.sdk.query.responses import Post, PostsResponse 11 | from aleph.sdk.types import Account 12 | from aleph_message.models import Chain 13 | from pydantic import BaseModel, Field 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | VOUCHER_METDATA_TEMPLATE_URL = "https://claim.twentysix.cloud/sbt/metadata/{}.json" 19 | VOUCHER_SOL_REGISTRY = "https://api.claim.twentysix.cloud/v1/registry/sol" 20 | VOUCHER_SENDER = "0xB34f25f2c935bCA437C061547eA12851d719dEFb" 21 | 22 | 23 | class VoucherAttribute(BaseModel): 24 | value: Union[str, Decimal] 25 | trait_type: str = Field(..., alias="trait_type") 26 | display_type: Optional[str] = Field(None, alias="display_type") 27 | 28 | 29 | class VoucherMetadata(BaseModel): 30 | name: str 31 | description: str 32 | external_url: str = Field(..., alias="external_url") 33 | image: str 34 | icon: str 35 | attributes: list[VoucherAttribute] 36 | 37 | 38 | class Voucher(BaseModel): 39 | id: str 40 | metadata_id: str = Field(..., alias="metadata_id") 41 | name: str 42 | description: str 43 | external_url: str = Field(..., alias="external_url") 44 | image: str 45 | icon: str 46 | attributes: list[VoucherAttribute] 47 | 48 | 49 | class VoucherManager: 50 | def __init__(self, account: Optional[Account], chain: Optional[Chain]): 51 | self.account = account or None 52 | self.chain = chain or None 53 | 54 | def _resolve_address(self, address: Optional[str] = None) -> str: 55 | """ 56 | Resolve the address to use. Prefer the provided address, fallback to account. 57 | """ 58 | if address: 59 | return address 60 | if self.account: 61 | return self.account.get_address() 62 | error_msg = "No address provided and no account available to resolve address." 63 | raise ValueError(error_msg) 64 | 65 | async def _fetch_voucher_update(self): 66 | """ 67 | Fetch the latest EVM voucher update (unfiltered). 68 | """ 69 | async with AlephHttpClient(api_server=settings.API_HOST) as client: 70 | post_filter = PostFilter(types=["vouchers-update"], addresses=[VOUCHER_SENDER]) 71 | vouchers_post: PostsResponse = await client.get_posts(post_filter=post_filter, page_size=1) 72 | if not vouchers_post.posts: 73 | return [] 74 | 75 | message_post: Post = vouchers_post.posts[0] 76 | nft_vouchers = message_post.content.get("nft_vouchers", {}) 77 | return list(nft_vouchers.items()) # [(voucher_id, voucher_data)] 78 | 79 | async def _fetch_solana_voucher(self): 80 | """ 81 | Fetch full Solana voucher registry (unfiltered). 82 | """ 83 | try: 84 | async with aiohttp.ClientSession() as session: 85 | try: 86 | async with session.get(VOUCHER_SOL_REGISTRY) as resp: 87 | if resp.status != 200: 88 | return {} 89 | 90 | try: 91 | return await resp.json() 92 | except aiohttp.client_exceptions.ContentTypeError: 93 | text_data = await resp.text() 94 | try: 95 | return json.loads(text_data) 96 | except json.JSONDecodeError: 97 | return {} 98 | except Exception: 99 | return {} 100 | except Exception: 101 | return {} 102 | 103 | async def get_all(self, address: Optional[str] = None) -> list[Voucher]: 104 | """ 105 | Retrieve all vouchers for the account / specific adress, across EVM and Solana chains. 106 | """ 107 | vouchers = [] 108 | 109 | # Get EVM vouchers 110 | evm_vouchers = await self.get_evm_voucher(address=address) 111 | vouchers.extend(evm_vouchers) 112 | 113 | # Get Solana vouchers 114 | solana_vouchers = await self.fetch_solana_vouchers(address=address) 115 | vouchers.extend(solana_vouchers) 116 | 117 | return vouchers 118 | 119 | async def fetch_vouchers_by_chain(self, chain: Chain): 120 | if chain == Chain.SOL: 121 | return await self.fetch_solana_vouchers() 122 | else: 123 | return await self.get_evm_voucher() 124 | 125 | async def get_evm_voucher(self, address: Optional[str] = None) -> list[Voucher]: 126 | """ 127 | Retrieve vouchers specific to EVM chains for a specific address. 128 | """ 129 | resolved_address = self._resolve_address(address=address) 130 | vouchers: list[Voucher] = [] 131 | 132 | nft_vouchers = await self._fetch_voucher_update() 133 | for voucher_id, voucher_data in nft_vouchers: 134 | if voucher_data.get("claimer") != resolved_address: 135 | continue 136 | 137 | metadata_id = voucher_data.get("metadata_id") 138 | metadata = await self.fetch_metadata(metadata_id) 139 | if not metadata: 140 | continue 141 | 142 | voucher = Voucher( 143 | id=voucher_id, 144 | metadata_id=metadata_id, 145 | name=metadata.name, 146 | description=metadata.description, 147 | external_url=metadata.external_url, 148 | image=metadata.image, 149 | icon=metadata.icon, 150 | attributes=metadata.attributes, 151 | ) 152 | vouchers.append(voucher) 153 | return vouchers 154 | 155 | async def fetch_solana_vouchers(self, address: Optional[str] = None) -> list[Voucher]: 156 | """ 157 | Fetch Solana vouchers for a specific address. 158 | """ 159 | resolved_address = self._resolve_address(address=address) 160 | vouchers: list[Voucher] = [] 161 | 162 | registry_data = await self._fetch_solana_voucher() 163 | 164 | claimed_tickets = registry_data.get("claimed_tickets", {}) 165 | batches = registry_data.get("batches", {}) 166 | 167 | for ticket_hash, ticket_data in claimed_tickets.items(): 168 | claimer = ticket_data.get("claimer") 169 | if claimer != resolved_address: 170 | continue 171 | 172 | batch_id = ticket_data.get("batch_id") 173 | metadata_id = None 174 | 175 | if str(batch_id) in batches: 176 | metadata_id = batches[str(batch_id)].get("metadata_id") 177 | 178 | if metadata_id: 179 | metadata = await self.fetch_metadata(metadata_id) 180 | if metadata: 181 | voucher = Voucher( 182 | id=ticket_hash, 183 | metadata_id=metadata_id, 184 | name=metadata.name, 185 | description=metadata.description, 186 | external_url=metadata.external_url, 187 | image=metadata.image, 188 | icon=metadata.icon, 189 | attributes=metadata.attributes, 190 | ) 191 | vouchers.append(voucher) 192 | 193 | return vouchers 194 | 195 | async def fetch_metadata(self, metadata_id: str) -> Optional[VoucherMetadata]: 196 | """ 197 | Fetch metadata for a given voucher. 198 | """ 199 | url = f"https://claim.twentysix.cloud/sbt/metadata/{metadata_id}.json" 200 | try: 201 | async with aiohttp.ClientSession() as session: 202 | try: 203 | async with session.get(url) as resp: 204 | if resp.status != 200: 205 | return None 206 | data = await resp.json() 207 | return VoucherMetadata.model_validate(data) 208 | except Exception as e: 209 | logger.error(f"Error fetching metadata: {e}") 210 | return None 211 | except Exception as e: 212 | logger.error(f"Error creating session: {e}") 213 | return None 214 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleph-im/aleph-client/c444fd7aa3ee6b0ebb4ff94b172955b9174ceddd/test.py -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleph-im/aleph-client/c444fd7aa3ee6b0ebb4ff94b172955b9174ceddd/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_post.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "POST", 3 | "hello": "world" 4 | } 5 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleph-im/aleph-client/c444fd7aa3ee6b0ebb4ff94b172955b9174ceddd/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/conftest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Dummy conftest.py for aleph_client. 3 | 4 | If you don't know what this is for, just leave it empty. 5 | Read more about conftest.py under: 6 | https://pytest.org/latest/plugins.html 7 | """ 8 | 9 | from collections.abc import Generator 10 | from pathlib import Path 11 | from tempfile import NamedTemporaryFile 12 | 13 | import pytest 14 | from aleph.sdk.chains.common import generate_key 15 | 16 | 17 | @pytest.fixture 18 | def new_config_file() -> Generator[Path, None, None]: 19 | with NamedTemporaryFile(suffix=".json") as config_file: 20 | yield Path(config_file.name) 21 | 22 | 23 | @pytest.fixture 24 | def empty_account_file() -> Generator[Path, None, None]: 25 | with NamedTemporaryFile(suffix=".key") as key_file: 26 | yield Path(key_file.name) 27 | 28 | 29 | @pytest.fixture 30 | def env_files(new_config_file: Path, empty_account_file: Path) -> Generator[tuple[Path, Path], None, None]: 31 | new_config_file.write_text(f'{{"path": "{empty_account_file}", "chain": "ETH"}}') 32 | empty_account_file.write_bytes(generate_key()) 33 | yield empty_account_file, new_config_file 34 | -------------------------------------------------------------------------------- /tests/unit/mocks.py: -------------------------------------------------------------------------------- 1 | from decimal import Decimal 2 | from typing import Any 3 | from unittest.mock import AsyncMock, MagicMock 4 | 5 | from aleph.sdk.chains.evm import EVMAccount 6 | from aleph.sdk.conf import settings 7 | from pydantic import BaseModel, ConfigDict 8 | 9 | from aleph_client.commands.node import NodeInfo 10 | 11 | # Change to Aleph testnet 12 | # settings.API_HOST = "https://api.twentysix.testnet.network" 13 | settings.API_HOST = "http://51.159.223.120:4024" # TODO: change it 14 | 15 | # Utils 16 | FAKE_PRIVATE_KEY = b"cafe" * 8 17 | FAKE_PUBKEY_FILE = "/path/fake/pubkey" 18 | FAKE_ADDRESS_EVM = "0x00001A0e6B9a46Be48a294D74D897d9C48678862" 19 | # FAKE_STORE_HASH = "102682ea8bcc0cec9c42f32fbd2660286b4eb31003108440988343726304607a" # Has to exist on Aleph Testnet 20 | # FAKE_STORE_HASH_CONTENT_FILE_CID = "QmX8K1c22WmQBAww5ShWQqwMiFif7XFrJD6iFBj7skQZXW" # From FAKE_STORE_HASH message 21 | # FAKE_STORE_HASH_PUBLISHER = "0x74F82AC22C1EB20dDb9799284FD8D60eaf48A8fb" # From FAKE_STORE_HASH message 22 | FAKE_STORE_HASH = "5b868dc8c2df0dd9bb810b7a31cc50c8ad1e6569905e45ab4fd2eee36fecc4d2" # TODO: change it 23 | FAKE_STORE_HASH_CONTENT_FILE_CID = "QmXSEnpQCnUfeGFoSjY1XAK1Cuad5CtAaqyachGTtsFSuA" # TODO: change it 24 | FAKE_STORE_HASH_PUBLISHER = "0xe0aaF578B287de16852dbc54Ae34a263FF2F4b9E" # TODO: change it 25 | FAKE_VM_HASH = "ab12" * 16 26 | FAKE_PROGRAM_HASH = "cd34" * 16 27 | FAKE_PROGRAM_HASH_2 = "ef56" * 16 28 | FAKE_CRN_HASH = "cb764fe80f76cd5ec395952263fcbf0f5d2cc0dfe1ed98c90e13734b3fb2df3e" 29 | FAKE_CRN_URL = "https://coco-1.crn.aleph.sh" 30 | FAKE_FLOW_HASH = "0xfake_flow_hash" 31 | 32 | 33 | class Dict(BaseModel): 34 | model_config = ConfigDict(extra="allow") 35 | 36 | def __init__(self, **data: Any): 37 | """Allow any key-value pair to be passed to the model.""" 38 | super().__init__(**data) 39 | 40 | def to_dict(self) -> dict[str, Any]: 41 | return self.model_dump() 42 | 43 | 44 | def create_test_account() -> EVMAccount: 45 | return EVMAccount(private_key=FAKE_PRIVATE_KEY) 46 | 47 | 48 | def create_mock_load_account(): 49 | mock_account = create_test_account() 50 | mock_loader = MagicMock(return_value=mock_account) 51 | mock_loader.return_value.get_super_token_balance = MagicMock(return_value=Decimal(10000 * (10**18))) 52 | mock_loader.return_value.can_transact = MagicMock(return_value=True) 53 | mock_loader.return_value.can_start_flow = MagicMock(return_value=True) 54 | mock_loader.return_value.manage_flow = AsyncMock(return_value=FAKE_FLOW_HASH) 55 | return mock_loader 56 | 57 | 58 | async def mock_fetch_nodes() -> NodeInfo: 59 | node_aggregate = { 60 | "address": "0xa1B3bb7d2332383D96b7796B908fB7f7F3c2Be10", 61 | "data": { 62 | "corechannel": { 63 | "nodes": [ 64 | { 65 | "hash": "37bcf3b0de2b95168557dccd757e3fb9310f6182eb35173dd929e535dc8d18cc", 66 | "name": "Aleph.Cloud.One", 67 | "time": 1608436347.148, 68 | "owner": "0x13CA00cD3BB1ded822AFF447a6fEC5ed9DaeCD65", 69 | "score": 0.95672722675568, 70 | "banner": "", 71 | "locked": False, 72 | "reward": "0x462b25B706688a7174d675e4787d2DBEE72aB71f", 73 | "status": "active", 74 | "address": "", 75 | "manager": "", 76 | "picture": "81410c35ea8d31569011c091d7c780e83b8e8d44bf292e6f8bf6316b162dda9e", 77 | "stakers": { 78 | "0x160f9C91858940BEBA3bacAD2Fc1c4D32635913b": 21359.3722761429, 79 | "0x161F0F8d70971EB7fE65Fa3558e48442c338EBde": 16778.2001223581, 80 | "0x2BACCdD22C27F84DE8a8EeC0aB7B2a4766E7C02d": 24072.424430756, 81 | }, 82 | "has_bonus": True, 83 | "authorized": [], 84 | "description": ( 85 | "Supporting Aleph from NULS POCM through to running a node. Moshe is a " 86 | "genius!\n\nPowered by Node Forge." 87 | ), 88 | "performance": 0.915326986415614, 89 | "multiaddress": "/ip4/51.79.82.13/tcp/4025/p2p/QmfKB9q89aCX3wqkiqgis9SHfx2MznGd6LTsqektdKUBg5", 90 | "total_staked": 1032817.18542335, 91 | "score_updated": True, 92 | "stream_reward": "", 93 | "inactive_since": None, 94 | "resource_nodes": [ 95 | "d1401d7f2e4487b1b956acf8de6a48de5bc5ed9637516f901dfe4eb9f74ac214", 96 | "3b06f6fb75902821eeeddf713837f6a2d38aedff8a7c66c7fa3192b461df6e6a", 97 | "3fe5eecb0dc99be68e197d1ccf037aa4274d30b0f94f955cf765545bebad33c3", 98 | "179317d603edf7c005286dcb79968be294218fdd73ccee3bef719006a0db664c", 99 | "936d1ac993deef3b09c06674e05aa742f4270ec337b1d60ec8021fccaf8f6479", 100 | ], 101 | "decentralization": 0.534862998440633, 102 | "registration_url": "", 103 | "terms_and_conditions": "", 104 | }, 105 | ], 106 | "resource_nodes": [ 107 | { 108 | "hash": "cb764fe80f76cd5ec395952263fcbf0f5d2cc0dfe1ed98c90e13734b3fb2df3e", 109 | "name": "Aleph.im Confidential Host 1", 110 | "time": 1723565390.963, 111 | "type": "compute", 112 | "owner": "0xFeF2b33478f906eDE5ee96110b2342861cF1569A", 113 | "score": 0.931334273816828, 114 | "banner": "", 115 | "locked": False, 116 | "parent": "c5a1295c20d5fb1df638e4ff7dee2239ab88c2843899bd26e4b0200a9f5ca82b", 117 | "reward": "0xFeF2b33478f906eDE5ee96110b2342861cF1569A", 118 | "status": "linked", 119 | "address": "https://coco-1.crn.aleph.sh/", 120 | "manager": "", 121 | "picture": "", 122 | "authorized": "", 123 | "description": "", 124 | "performance": 0.867383529585918, 125 | "multiaddress": "", 126 | "score_updated": True, 127 | "stream_reward": "0xFeF2b33478f906eDE5ee96110b2342861cF1569A", 128 | "inactive_since": None, 129 | "decentralization": 0.991886443254677, 130 | "registration_url": "", 131 | "terms_and_conditions": "", 132 | } 133 | ], 134 | } 135 | }, 136 | "info": {}, 137 | } 138 | return NodeInfo(**node_aggregate) 139 | -------------------------------------------------------------------------------- /tests/unit/test_aggregate.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import json 5 | from unittest.mock import AsyncMock, MagicMock, patch 6 | 7 | import aiohttp 8 | import pytest 9 | 10 | from aleph_client.commands.aggregate import ( 11 | authorize, 12 | forget, 13 | get, 14 | list_aggregates, 15 | permissions, 16 | post, 17 | revoke, 18 | ) 19 | 20 | from .mocks import FAKE_ADDRESS_EVM, create_mock_load_account 21 | 22 | FAKE_AGGREGATE_DATA = { 23 | "AI": { 24 | "subscription": "premium", 25 | "models": { 26 | "chatgpt": True, 27 | "claude": False, 28 | "libertai": True, 29 | }, 30 | "active": True, 31 | }, 32 | "security": {"authorizations": [{"address": FAKE_ADDRESS_EVM, "types": ["POST"]}]}, 33 | } 34 | 35 | 36 | @contextlib.asynccontextmanager 37 | async def mock_client_session_get(self, aggr_link): 38 | yield AsyncMock( 39 | status=200, 40 | raise_for_status=MagicMock(), 41 | json=AsyncMock(return_value={"data": FAKE_AGGREGATE_DATA}), 42 | ) 43 | 44 | 45 | def create_mock_auth_client(return_fetch=FAKE_AGGREGATE_DATA): 46 | mock_auth_client = AsyncMock( 47 | create_aggregate=AsyncMock(return_value=(MagicMock(), "processed")), 48 | fetch_aggregate=AsyncMock(return_value=return_fetch), 49 | ) 50 | mock_auth_client_class = MagicMock() 51 | mock_auth_client_class.return_value.__aenter__ = AsyncMock(return_value=mock_auth_client) 52 | return mock_auth_client_class, mock_auth_client 53 | 54 | 55 | @pytest.mark.parametrize( 56 | ids=["by_key_only", "by_key_and_subkey", "by_key_and_subkeys"], 57 | argnames="args", 58 | argvalues=[ 59 | {"key": "AI"}, # by key only 60 | {"key": "AI", "subkeys": "models"}, # with subkey 61 | {"key": "AI", "subkeys": "models,subscription"}, # with subkeys 62 | ], 63 | ) 64 | @pytest.mark.asyncio 65 | async def test_forget(capsys, args): 66 | mock_load_account = create_mock_load_account() 67 | mock_list_aggregates = AsyncMock(return_value=FAKE_AGGREGATE_DATA) 68 | mock_auth_client_class, mock_auth_client = create_mock_auth_client() 69 | 70 | @patch("aleph_client.commands.aggregate._load_account", mock_load_account) 71 | @patch("aleph_client.commands.aggregate.list_aggregates", mock_list_aggregates) 72 | @patch("aleph_client.commands.aggregate.AuthenticatedAlephHttpClient", mock_auth_client_class) 73 | async def run_forget(aggr_spec): 74 | print() # For better display when pytest -v -s 75 | return await forget(**aggr_spec) 76 | 77 | result = await run_forget(args) 78 | assert result is True 79 | mock_load_account.assert_called_once() 80 | if "subkeys" not in args: 81 | mock_list_aggregates.assert_called_once() 82 | mock_auth_client.create_aggregate.assert_called_once() 83 | captured = capsys.readouterr() 84 | assert captured.out.endswith("has been deleted\n") 85 | 86 | 87 | @pytest.mark.parametrize( 88 | ids=["by_key_only", "by_key_and_subkey"], 89 | argnames="args", 90 | argvalues=[ 91 | {"key": "AI", "content": '{"test": "ok"}'}, # by key only 92 | { 93 | "key": "AI", 94 | "subkey": "models", 95 | "content": '{"chatgpt": true, "claude": true, "libertai": true}', 96 | }, # with subkey 97 | ], 98 | ) 99 | @pytest.mark.asyncio 100 | async def test_post(capsys, args): 101 | mock_load_account = create_mock_load_account() 102 | mock_auth_client_class, mock_auth_client = create_mock_auth_client() 103 | 104 | @patch("aleph_client.commands.aggregate._load_account", mock_load_account) 105 | @patch("aleph_client.commands.aggregate.AuthenticatedAlephHttpClient", mock_auth_client_class) 106 | async def run_post(aggr_spec): 107 | print() # For better display when pytest -v -s 108 | return await post(**aggr_spec) 109 | 110 | result = await run_post(args) 111 | assert result is True 112 | mock_load_account.assert_called_once() 113 | mock_auth_client.create_aggregate.assert_called_once() 114 | captured = capsys.readouterr() 115 | assert captured.out.endswith("has been created/updated\n") 116 | 117 | 118 | @pytest.mark.parametrize( 119 | ids=["by_key_only", "by_key_and_subkey", "by_key_and_subkeys"], 120 | argnames=["args", "expected"], 121 | argvalues=[ 122 | ({"key": "AI"}, FAKE_AGGREGATE_DATA["AI"]), # by key only 123 | ( # with subkey 124 | {"key": "AI", "subkeys": "subscription"}, 125 | {"subscription": FAKE_AGGREGATE_DATA["AI"]["subscription"]}, # type: ignore 126 | ), 127 | ( # with subkeys 128 | {"key": "AI", "subkeys": "subscription,models"}, 129 | {"subscription": FAKE_AGGREGATE_DATA["AI"]["subscription"], "models": FAKE_AGGREGATE_DATA["AI"]["models"]}, # type: ignore 130 | ), 131 | ], 132 | ) 133 | @pytest.mark.asyncio 134 | async def test_get(capsys, args, expected): 135 | mock_load_account = create_mock_load_account() 136 | mock_auth_client_class, mock_auth_client = create_mock_auth_client(return_fetch=FAKE_AGGREGATE_DATA["AI"]) 137 | 138 | @patch("aleph_client.commands.aggregate._load_account", mock_load_account) 139 | @patch("aleph_client.commands.aggregate.AuthenticatedAlephHttpClient", mock_auth_client_class) 140 | async def run_get(aggr_spec): 141 | print() # For better display when pytest -v -s 142 | return await get(**aggr_spec) 143 | 144 | aggregate = await run_get(args) 145 | mock_load_account.assert_called_once() 146 | mock_auth_client.fetch_aggregate.assert_called_once() 147 | captured = capsys.readouterr() 148 | assert aggregate == expected and expected == json.loads(captured.out) 149 | 150 | 151 | @pytest.mark.asyncio 152 | async def test_list_aggregates(): 153 | mock_load_account = create_mock_load_account() 154 | 155 | @patch("aleph_client.commands.aggregate._load_account", mock_load_account) 156 | @patch.object(aiohttp.ClientSession, "get", mock_client_session_get) 157 | async def run_list_aggregates(): 158 | print() # For better display when pytest -v -s 159 | return await list_aggregates(address=FAKE_ADDRESS_EVM) 160 | 161 | aggregates = await run_list_aggregates() 162 | mock_load_account.assert_called_once() 163 | assert aggregates == FAKE_AGGREGATE_DATA 164 | 165 | 166 | @pytest.mark.asyncio 167 | async def test_authorize(capsys): 168 | mock_load_account = create_mock_load_account() 169 | mock_get = AsyncMock(return_value=FAKE_AGGREGATE_DATA["security"]) 170 | mock_post = AsyncMock(return_value=True) 171 | 172 | @patch("aleph_client.commands.aggregate._load_account", mock_load_account) 173 | @patch("aleph_client.commands.aggregate.get", mock_get) 174 | @patch("aleph_client.commands.aggregate.post", mock_post) 175 | async def run_authorize(): 176 | print() # For better display when pytest -v -s 177 | return await authorize(address=FAKE_ADDRESS_EVM, types="PROGRAM,FORGET") 178 | 179 | await run_authorize() 180 | mock_load_account.assert_called_once() 181 | mock_get.assert_called_once() 182 | mock_post.assert_called_once() 183 | captured = capsys.readouterr() 184 | assert captured.out.endswith(f"Permissions has been added for {FAKE_ADDRESS_EVM}\n") 185 | 186 | 187 | @pytest.mark.asyncio 188 | async def test_revoke(capsys): 189 | mock_load_account = create_mock_load_account() 190 | mock_get = AsyncMock(return_value=FAKE_AGGREGATE_DATA["security"]) 191 | mock_post = AsyncMock(return_value=True) 192 | 193 | @patch("aleph_client.commands.aggregate._load_account", mock_load_account) 194 | @patch("aleph_client.commands.aggregate.get", mock_get) 195 | @patch("aleph_client.commands.aggregate.post", mock_post) 196 | async def run_revoke(): 197 | print() # For better display when pytest -v -s 198 | return await revoke(address=FAKE_ADDRESS_EVM) 199 | 200 | await run_revoke() 201 | mock_load_account.assert_called_once() 202 | mock_get.assert_called_once() 203 | mock_post.assert_called_once() 204 | captured = capsys.readouterr() 205 | assert captured.out.endswith(f"Permissions has been deleted for {FAKE_ADDRESS_EVM}\n") 206 | 207 | 208 | @pytest.mark.asyncio 209 | async def test_permissions(): 210 | mock_load_account = create_mock_load_account() 211 | mock_get = AsyncMock(return_value=FAKE_AGGREGATE_DATA["security"]) 212 | 213 | @patch("aleph_client.commands.aggregate._load_account", mock_load_account) 214 | @patch("aleph_client.commands.aggregate.get", mock_get) 215 | async def run_permissions(): 216 | print() # For better display when pytest -v -s 217 | return await permissions(address=FAKE_ADDRESS_EVM, json=True) 218 | 219 | authorizations = await run_permissions() 220 | mock_load_account.assert_called_once() 221 | mock_get.assert_called_once() 222 | assert authorizations == FAKE_AGGREGATE_DATA["security"]["authorizations"] # type: ignore 223 | -------------------------------------------------------------------------------- /tests/unit/test_commands.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from pathlib import Path 4 | from tempfile import NamedTemporaryFile 5 | 6 | from aleph.sdk.chains.ethereum import ETHAccount 7 | from aleph.sdk.conf import settings 8 | from typer.testing import CliRunner 9 | 10 | from aleph_client.__main__ import app 11 | 12 | from .mocks import ( 13 | FAKE_STORE_HASH, 14 | FAKE_STORE_HASH_CONTENT_FILE_CID, 15 | FAKE_STORE_HASH_PUBLISHER, 16 | ) 17 | 18 | runner = CliRunner() 19 | 20 | 21 | def get_account(my_account_file: Path) -> ETHAccount: 22 | with open(my_account_file, "rb") as fd: 23 | private_key = fd.read() 24 | return ETHAccount(private_key=private_key) 25 | 26 | 27 | def get_test_message(account: ETHAccount): 28 | return { 29 | "chain": "ETH", 30 | "sender": account.get_address(), 31 | "type": "AGGREGATE", 32 | "item_hash": "0x1234", 33 | } 34 | 35 | 36 | def test_account_create(env_files): 37 | settings.CONFIG_FILE = env_files[1] 38 | old_key = env_files[0].read_bytes() 39 | result = runner.invoke( 40 | app, 41 | ["account", "create", "--replace", "--private-key-file", str(env_files[0]), "--chain", "ETH"], 42 | ) 43 | assert result.exit_code == 0, result.stdout 44 | new_key = env_files[0].read_bytes() 45 | assert new_key != old_key 46 | 47 | 48 | def test_account_import_evm(env_files): 49 | settings.CONFIG_FILE = env_files[1] 50 | old_key = env_files[0].read_bytes() 51 | result = runner.invoke( 52 | app, 53 | [ 54 | "account", 55 | "create", 56 | "--replace", 57 | "--private-key-file", 58 | str(env_files[0]), 59 | "--chain", 60 | "ETH", 61 | "--private-key", 62 | "0x5f5da4cee72286b9aec06fffe130e04e4b35583c1bf28b4d1992f6d69df1e076", 63 | ], 64 | ) 65 | assert result.exit_code == 0, result.stdout 66 | new_key = env_files[0].read_bytes() 67 | assert new_key != old_key 68 | 69 | 70 | def test_account_import_evm_base32(env_files): 71 | settings.CONFIG_FILE = env_files[1] 72 | old_key = env_files[0].read_bytes() 73 | result = runner.invoke( 74 | app, 75 | [ 76 | "account", 77 | "create", 78 | "--replace", 79 | "--private-key-file", 80 | str(env_files[0]), 81 | "--chain", 82 | "ETH", 83 | "--private-key", 84 | "JXINYIKE2QOUXCZRAFA2FG4AMYYPEOLS4OIGEZ2WK4WCQDWYSAMQ====", 85 | "--key-format", 86 | "base32", 87 | ], 88 | ) 89 | assert result.exit_code == 0, result.stdout 90 | new_key = env_files[0].read_bytes() 91 | assert new_key != old_key 92 | 93 | 94 | def test_account_import_evm_base64(env_files): 95 | settings.CONFIG_FILE = env_files[1] 96 | old_key = env_files[0].read_bytes() 97 | result = runner.invoke( 98 | app, 99 | [ 100 | "account", 101 | "create", 102 | "--replace", 103 | "--private-key-file", 104 | str(env_files[0]), 105 | "--chain", 106 | "ETH", 107 | "--private-key", 108 | "TdDcIUTUHUuLMQFBopuAZjDyOXLjkGJnVlcsKA7YkBk=", 109 | "--key-format", 110 | "base64", 111 | ], 112 | ) 113 | assert result.exit_code == 0, result.stdout 114 | new_key = env_files[0].read_bytes() 115 | assert new_key != old_key 116 | 117 | 118 | def test_account_import_evm_format_invalid(env_files): 119 | """Test that an invalid key format raises an error.""" 120 | settings.CONFIG_FILE = env_files[1] 121 | result = runner.invoke( 122 | app, 123 | [ 124 | "account", 125 | "create", 126 | "--replace", 127 | "--private-key-file", 128 | str(env_files[0]), 129 | "--chain", 130 | "ETH", 131 | "--private-key", 132 | "TdDcIUTUHUuLMQFBopuAZjDyOXLjkGJnVlcsKA7YkBk=", 133 | "--key-format", 134 | "invalid", 135 | ], 136 | ) 137 | assert result.exit_code != 0, result.stdout 138 | 139 | 140 | def test_account_import_sol(env_files): 141 | settings.CONFIG_FILE = env_files[1] 142 | old_key = env_files[0].read_bytes() 143 | result = runner.invoke( 144 | app, 145 | [ 146 | "account", 147 | "create", 148 | "--replace", 149 | "--private-key-file", 150 | str(env_files[0]), 151 | "--chain", 152 | "SOL", 153 | "--private-key", 154 | "2ub2ka8FFjDtfz5m9i2N6HvurgHaHDPD1nwVdmWy7ZhvMvGWbxaAMaPn8RECCerzo9Au2AToPXHzE6jsjjWscnHt", 155 | ], 156 | ) 157 | assert result.exit_code == 0, result.stdout 158 | new_key = env_files[0].read_bytes() 159 | assert new_key != old_key 160 | 161 | 162 | def test_account_address(env_files): 163 | settings.CONFIG_FILE = env_files[1] 164 | result = runner.invoke(app, ["account", "address", "--private-key-file", str(env_files[0])]) 165 | assert result.exit_code == 0 166 | assert result.stdout.startswith("✉ Addresses for Active Account ✉\n\nEVM: 0x") 167 | 168 | 169 | def test_account_chain(env_files): 170 | settings.CONFIG_FILE = env_files[1] 171 | result = runner.invoke(app, ["account", "chain"]) 172 | assert result.exit_code == 0 173 | assert result.stdout.startswith("Active Chain:") 174 | 175 | 176 | def test_account_path(): 177 | result = runner.invoke(app, ["account", "path"]) 178 | assert result.exit_code == 0 179 | assert result.stdout.startswith("Aleph Home directory: ") 180 | 181 | 182 | def test_account_show(env_files): 183 | settings.CONFIG_FILE = env_files[1] 184 | result = runner.invoke(app, ["account", "show", "--private-key-file", str(env_files[0])]) 185 | assert result.exit_code == 0 186 | assert result.stdout.startswith("✉ Addresses for Active Account ✉\n\nEVM: 0x") 187 | 188 | 189 | def test_account_export_private_key(env_files): 190 | settings.CONFIG_FILE = env_files[1] 191 | result = runner.invoke(app, ["account", "export-private-key", "--private-key-file", str(env_files[0])]) 192 | assert result.exit_code == 0 193 | assert result.stdout.startswith("⚠️ Private Keys for Active Account ⚠️\n\nEVM: 0x") 194 | 195 | 196 | def test_account_list(env_files): 197 | settings.CONFIG_FILE = env_files[1] 198 | result = runner.invoke(app, ["account", "list"]) 199 | assert result.exit_code == 0 200 | assert result.stdout.startswith("🌐 Chain Infos 🌐") 201 | 202 | 203 | def test_account_sign_bytes(env_files): 204 | settings.CONFIG_FILE = env_files[1] 205 | result = runner.invoke(app, ["account", "sign-bytes", "--message", "test", "--chain", "ETH"]) 206 | assert result.exit_code == 0 207 | assert result.stdout.startswith("\nSignature:") 208 | 209 | 210 | def test_account_balance(env_files): 211 | settings.CONFIG_FILE = env_files[1] 212 | result = runner.invoke( 213 | app, ["account", "balance", "--address", "0xCAfEcAfeCAfECaFeCaFecaFecaFECafECafeCaFe", "--chain", "ETH"] 214 | ) 215 | assert result.exit_code == 0 216 | assert result.stdout.startswith("╭─ Account Infos") 217 | 218 | 219 | def test_account_config(env_files): 220 | settings.CONFIG_FILE = env_files[1] 221 | result = runner.invoke(app, ["account", "config", "--private-key-file", str(env_files[0]), "--chain", "ETH"]) 222 | assert result.exit_code == 0 223 | assert result.stdout.startswith("New Default Configuration: ") 224 | 225 | 226 | def test_message_get(): 227 | # Use subprocess to avoid border effects between tests caused by the initialisation 228 | # of the aiohttp client session out of an async context in the SDK. This avoids 229 | # a "no running event loop" error when running several tests back to back. 230 | result = runner.invoke( 231 | app, 232 | [ 233 | "message", 234 | "get", 235 | FAKE_STORE_HASH, 236 | ], 237 | ) 238 | assert result.exit_code == 0 239 | assert FAKE_STORE_HASH_PUBLISHER in result.stdout 240 | 241 | 242 | def test_message_find(): 243 | result = runner.invoke( 244 | app, 245 | [ 246 | "message", 247 | "find", 248 | "--pagination=1", 249 | "--page=1", 250 | "--start-date=1234", 251 | "--chains=ETH", 252 | f"--hashes={FAKE_STORE_HASH}", 253 | ], 254 | ) 255 | assert result.exit_code == 0 256 | assert FAKE_STORE_HASH_PUBLISHER in result.stdout 257 | assert FAKE_STORE_HASH in result.stdout 258 | 259 | 260 | def test_post_message(env_files): 261 | settings.CONFIG_FILE = env_files[1] 262 | test_file_path = Path(__file__).parent.parent / "test_post.json" 263 | result = runner.invoke( 264 | app, 265 | [ 266 | "message", 267 | "post", 268 | "--private-key-file", 269 | str(env_files[0]), 270 | "--path", 271 | str(test_file_path), 272 | ], 273 | ) 274 | assert result.exit_code == 0 275 | assert "item_hash" in result.stdout 276 | 277 | 278 | def test_sign_message(env_files): 279 | settings.CONFIG_FILE = env_files[1] 280 | account = get_account(env_files[0]) 281 | message = get_test_message(account) 282 | result = runner.invoke( 283 | app, 284 | [ 285 | "message", 286 | "sign", 287 | "--private-key-file", 288 | str(env_files[0]), 289 | "--message", 290 | json.dumps(message), 291 | ], 292 | ) 293 | 294 | assert result.exit_code == 0 295 | assert "signature" in result.stdout 296 | 297 | 298 | def test_sign_message_stdin(env_files): 299 | settings.CONFIG_FILE = env_files[1] 300 | account = get_account(env_files[0]) 301 | message = get_test_message(account) 302 | result = runner.invoke( 303 | app, 304 | [ 305 | "message", 306 | "sign", 307 | "--private-key-file", 308 | str(env_files[0]), 309 | ], 310 | input=json.dumps(message), 311 | ) 312 | 313 | assert result.exit_code == 0 314 | assert "signature" in result.stdout 315 | 316 | 317 | def test_sign_raw(): 318 | result = runner.invoke( 319 | app, 320 | [ 321 | "account", 322 | "sign-bytes", 323 | "--message", 324 | "some message", 325 | ], 326 | ) 327 | 328 | assert result.exit_code == 0 329 | assert "0x" in result.stdout 330 | 331 | 332 | def test_sign_raw_stdin(): 333 | message = "some message" 334 | result = runner.invoke( 335 | app, 336 | [ 337 | "account", 338 | "sign-bytes", 339 | ], 340 | input=message, 341 | ) 342 | 343 | assert result.exit_code == 0 344 | assert "0x" in result.stdout 345 | 346 | 347 | def test_file_upload(): 348 | # Test upload a file to aleph network by creating a file and upload it to an aleph node 349 | with NamedTemporaryFile() as temp_file: 350 | temp_file.write(b"Hello World \n") 351 | result = runner.invoke( 352 | app, 353 | ["file", "upload", temp_file.name], 354 | ) 355 | assert result.exit_code == 0 356 | assert result.stdout is not None 357 | 358 | 359 | def test_file_download(): 360 | # Test download a file from aleph network 361 | ipfs_cid = "QmeomffUNfmQy76CQGy9NdmqEnnHU9soCexBnGU3ezPHVH" 362 | result = runner.invoke( 363 | app, 364 | [ 365 | "file", 366 | "download", 367 | ipfs_cid, 368 | ], # 5 bytes file 369 | ) 370 | assert result.exit_code == 0 371 | assert result.stdout is not None 372 | os.remove(ipfs_cid) 373 | 374 | 375 | def test_file_download_only_info(): 376 | # Test retrieve the underlying content cid 377 | result = runner.invoke( 378 | app, 379 | [ 380 | "file", 381 | "download", 382 | FAKE_STORE_HASH, 383 | "--only-info", 384 | ], 385 | standalone_mode=False, 386 | ) 387 | assert result.exit_code == 0 388 | assert result.return_value.model_dump()["hash"] == FAKE_STORE_HASH_CONTENT_FILE_CID 389 | 390 | 391 | def test_file_list(): 392 | result = runner.invoke( 393 | app, 394 | [ 395 | "file", 396 | "list", 397 | ], 398 | ) 399 | 400 | assert result.exit_code == 0 401 | assert "0x" in result.stdout 402 | -------------------------------------------------------------------------------- /tests/unit/test_init.py: -------------------------------------------------------------------------------- 1 | from aleph_client import __version__ 2 | 3 | 4 | def test_version(): 5 | assert __version__ != "" 6 | -------------------------------------------------------------------------------- /tests/unit/test_node.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from aleph_client.commands.node import compute, core 6 | 7 | 8 | @pytest.mark.asyncio 9 | async def test_node_core(capsys): 10 | await core( 11 | json=False, 12 | active=True, 13 | address=None, 14 | ccn_hash=None, 15 | debug=False, 16 | ) 17 | captured = capsys.readouterr() 18 | assert "Core Channel Node Information" in captured.out 19 | 20 | 21 | @pytest.mark.asyncio 22 | async def test_node_compute(capsys): 23 | await compute( 24 | json=False, 25 | active=True, 26 | address=None, 27 | payg_receiver=None, 28 | crn_url=None, 29 | crn_hash=None, 30 | ccn_hash=None, 31 | debug=False, 32 | ) 33 | captured = capsys.readouterr() 34 | assert "Compute Node Information" in captured.out 35 | -------------------------------------------------------------------------------- /tests/unit/test_pricing.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from aleph_client.commands.pricing import GroupEntity, prices_for_service 6 | 7 | 8 | @pytest.mark.parametrize( 9 | ids=list(GroupEntity), 10 | argnames="args", 11 | argvalues=list(GroupEntity), 12 | ) 13 | @pytest.mark.asyncio 14 | async def test_prices_for_service(capsys, args): 15 | print() # For better display when pytest -v -s 16 | await prices_for_service(service=args) 17 | captured = capsys.readouterr() 18 | assert captured.out.startswith("\n╭─ Pricing:") 19 | -------------------------------------------------------------------------------- /tests/unit/test_program.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import random 5 | from pathlib import Path 6 | from unittest.mock import AsyncMock, MagicMock, patch 7 | 8 | import aiohttp 9 | import pytest 10 | from aleph.sdk.conf import settings 11 | from aleph_message.models import Chain 12 | 13 | from aleph_client.commands.program import ( 14 | delete, 15 | list_programs, 16 | logs, 17 | persist, 18 | runtime_checker, 19 | unpersist, 20 | update, 21 | upload, 22 | ) 23 | 24 | from .mocks import ( 25 | FAKE_ADDRESS_EVM, 26 | FAKE_PROGRAM_HASH, 27 | FAKE_PROGRAM_HASH_2, 28 | FAKE_STORE_HASH, 29 | FAKE_VM_HASH, 30 | Dict, 31 | create_mock_load_account, 32 | ) 33 | 34 | 35 | def create_mock_program_message( 36 | mock_account, program_item_hash=None, internet=False, persistent=False, allow_amend=True 37 | ): 38 | if not program_item_hash: 39 | tmp = list(FAKE_PROGRAM_HASH) 40 | random.shuffle(tmp) 41 | program_item_hash = "".join(tmp) 42 | program = Dict( 43 | chain=Chain.ETH, 44 | sender=mock_account.get_address(), 45 | type="vm-function", 46 | channel="ALEPH-CLOUDSOLUTIONS", 47 | confirmed=True, 48 | item_type="inline", 49 | item_hash=program_item_hash, 50 | content=Dict( 51 | item_type="storage", # for fake store message by convenience 52 | type="vm-function", 53 | address=mock_account.get_address(), 54 | time=1734037086.2333803, 55 | payment=Dict(chain=Chain.ETH, receiver=None, type="hold"), 56 | metadata={ 57 | "name": f"mock_program{'_internet' if internet else ''}" 58 | f"{'_persistent' if persistent else ''}" 59 | f"{'_updatable' if allow_amend else ''}", 60 | }, 61 | environment=Dict(internet=internet), 62 | resources=Dict(vcpus=1, memory=1024, seconds=30), 63 | volumes=[ 64 | Dict(name="immutable", mount="/opt/packages", ref=FAKE_STORE_HASH), 65 | Dict(name="ephemeral", mount="/opt/temp", ephemeral=True, size_mib=1024), 66 | Dict(name="persistent", mount="/opt/utils", persistence=Dict(value="host"), size_mib=1024), 67 | ], 68 | code=Dict(encoding="squashfs", entrypoint="main:app", ref=FAKE_STORE_HASH), 69 | runtime=Dict(ref=FAKE_STORE_HASH), 70 | on=Dict(http=True, persistent=persistent), 71 | allow_amend=allow_amend, 72 | ), 73 | ) 74 | return program 75 | 76 | 77 | def create_mock_program_messages(mock_account): 78 | return AsyncMock( 79 | return_value=[ 80 | create_mock_program_message(mock_account, allow_amend=False), 81 | create_mock_program_message(mock_account, internet=True, allow_amend=False), 82 | create_mock_program_message(mock_account, persistent=True, allow_amend=False), 83 | create_mock_program_message(mock_account), 84 | ] 85 | ) 86 | 87 | 88 | def create_mock_auth_client(mock_account, swap_persistent=False): 89 | mock_response_get_message = create_mock_program_message(mock_account, persistent=swap_persistent) 90 | mock_response_get_message_2 = create_mock_program_message( 91 | mock_account, program_item_hash=FAKE_PROGRAM_HASH_2, persistent=not swap_persistent 92 | ) 93 | mock_auth_client = AsyncMock( 94 | get_messages=AsyncMock(), 95 | get_message=AsyncMock(return_value=mock_response_get_message), 96 | create_store=AsyncMock(return_value=[MagicMock(item_hash=FAKE_STORE_HASH), 200]), 97 | create_program=AsyncMock(return_value=[MagicMock(item_hash=FAKE_PROGRAM_HASH), 200]), 98 | forget=AsyncMock(return_value=(MagicMock(), 200)), 99 | submit=AsyncMock(return_value=[mock_response_get_message_2, 200, MagicMock()]), 100 | get_estimated_price=AsyncMock( 101 | return_value=MagicMock( 102 | required_tokens=1000, 103 | payment_type="hold", 104 | ) 105 | ), 106 | get_program_price=AsyncMock( 107 | return_value=MagicMock( 108 | required_tokens=1000, 109 | payment_type="hold", 110 | ) 111 | ), 112 | ) 113 | mock_auth_client_class = MagicMock() 114 | mock_auth_client_class.return_value.__aenter__ = AsyncMock(return_value=mock_auth_client) 115 | return mock_auth_client_class, mock_auth_client 116 | 117 | 118 | @contextlib.asynccontextmanager 119 | async def vm_client_operate(vm_id, operation, method="GET"): 120 | yield AsyncMock( 121 | url="https://crn.example.com", 122 | status=200, 123 | json=AsyncMock( 124 | return_value=[ 125 | { 126 | "__REALTIME_TIMESTAMP": "2024-02-02 23:34:21", 127 | "MESSAGE": "hello world", 128 | } 129 | ] 130 | ), 131 | ) 132 | 133 | 134 | def create_mock_vm_client(): 135 | mock_vm_client = AsyncMock(operate=vm_client_operate) 136 | mock_vm_client_class = MagicMock() 137 | mock_vm_client_class.return_value.__aenter__ = AsyncMock(return_value=mock_vm_client) 138 | return mock_vm_client_class, mock_vm_client 139 | 140 | 141 | @contextlib.asynccontextmanager 142 | async def mock_client_session_get(self, program_url): 143 | yield AsyncMock( 144 | raise_for_status=MagicMock(), 145 | json=AsyncMock( 146 | return_value={ 147 | "Distribution": "Debian GNU/Linux 12 (bookworm)", 148 | "Python": "3.11.2", 149 | "Docker": "Docker version 20.10.24+dfsg1, build 297e128", 150 | "Nodejs": "v18.13.0", 151 | "Rust": "Not installed", 152 | "Go": "Not installed", 153 | } 154 | ), 155 | ) 156 | 157 | 158 | @pytest.mark.asyncio 159 | async def test_upload_program(): 160 | mock_load_account = create_mock_load_account() 161 | mock_account = mock_load_account.return_value 162 | mock_auth_client_class, mock_auth_client = create_mock_auth_client(mock_account) 163 | mock_get_balance = AsyncMock(return_value={"available_amount": 100000}) 164 | 165 | @patch("aleph_client.commands.program._load_account", mock_load_account) 166 | @patch("aleph_client.utils.os.path.isfile", MagicMock(return_value=True)) 167 | @patch("aleph_client.commands.program.AuthenticatedAlephHttpClient", mock_auth_client_class) 168 | @patch("aleph_client.commands.program.get_balance", mock_get_balance) 169 | @patch("aleph_client.commands.program.open", MagicMock()) 170 | async def upload_program(): 171 | print() # For better display when pytest -v -s 172 | returned = await upload( 173 | address=FAKE_ADDRESS_EVM, 174 | path=Path("/fake/file.squashfs"), 175 | entrypoint="main:app", 176 | name="mock_program", 177 | runtime=settings.DEFAULT_RUNTIME_ID, 178 | compute_units=1, 179 | updatable=True, 180 | skip_volume=True, 181 | skip_env_var=True, 182 | ) 183 | mock_load_account.assert_called_once() 184 | mock_auth_client.create_store.assert_called_once() 185 | mock_get_balance.assert_called_once() 186 | mock_auth_client.get_estimated_price.assert_called_once() 187 | mock_auth_client.create_program.assert_called_once() 188 | assert returned == FAKE_PROGRAM_HASH 189 | 190 | await upload_program() 191 | 192 | 193 | @pytest.mark.asyncio 194 | async def test_update_program(): 195 | mock_load_account = create_mock_load_account() 196 | mock_account = mock_load_account.return_value 197 | mock_auth_client_class, mock_auth_client = create_mock_auth_client(mock_account) 198 | 199 | @patch("aleph_client.commands.program._load_account", mock_load_account) 200 | @patch("aleph_client.utils.os.path.isfile", MagicMock(return_value=True)) 201 | @patch("aleph_client.commands.program.AuthenticatedAlephHttpClient", mock_auth_client_class) 202 | @patch("aleph_client.commands.program.open", MagicMock()) 203 | async def update_program(): 204 | print() # For better display when pytest -v -s 205 | await update(item_hash=FAKE_PROGRAM_HASH, path=Path("/fake/file.squashfs")) 206 | mock_load_account.assert_called_once() 207 | assert mock_auth_client.get_message.call_count == 2 208 | mock_auth_client.create_store.assert_called_once() 209 | 210 | await update_program() 211 | 212 | 213 | @pytest.mark.asyncio 214 | async def test_delete_program(): 215 | mock_load_account = create_mock_load_account() 216 | mock_account = mock_load_account.return_value 217 | mock_auth_client_class, mock_auth_client = create_mock_auth_client(mock_account) 218 | 219 | @patch("aleph_client.commands.program._load_account", mock_load_account) 220 | @patch("aleph_client.commands.program.AuthenticatedAlephHttpClient", mock_auth_client_class) 221 | async def delete_program(): 222 | print() # For better display when pytest -v -s 223 | await delete(item_hash=FAKE_PROGRAM_HASH) 224 | mock_load_account.assert_called_once() 225 | assert mock_auth_client.get_message.call_count == 2 226 | assert mock_auth_client.forget.call_count == 2 227 | 228 | await delete_program() 229 | 230 | 231 | @pytest.mark.asyncio 232 | async def test_list_programs(): 233 | mock_load_account = create_mock_load_account() 234 | mock_account = mock_load_account.return_value 235 | mock_auth_client_class, mock_auth_client = create_mock_auth_client(mock_account) 236 | mock_program_messages = create_mock_program_messages(mock_account) 237 | 238 | @patch("aleph_client.commands.program._load_account", mock_load_account) 239 | @patch("aleph_client.commands.program.AlephHttpClient", mock_auth_client_class) 240 | @patch("aleph_client.commands.program.filter_only_valid_messages", mock_program_messages) 241 | async def list_program(): 242 | print() # For better display when pytest -v -s 243 | await list_programs(address=mock_account.get_address()) 244 | mock_program_messages.assert_called_once() 245 | mock_auth_client.get_messages.assert_called_once() 246 | assert mock_auth_client.get_program_price.call_count == 4 247 | 248 | await list_program() 249 | 250 | 251 | @pytest.mark.asyncio 252 | async def test_persist_program(): 253 | mock_load_account = create_mock_load_account() 254 | mock_account = mock_load_account.return_value 255 | mock_auth_client_class, mock_auth_client = create_mock_auth_client(mock_account) 256 | 257 | @patch("aleph_client.commands.program._load_account", mock_load_account) 258 | @patch("aleph_client.commands.program.AuthenticatedAlephHttpClient", mock_auth_client_class) 259 | async def persist_program(): 260 | print() # For better display when pytest -v -s 261 | returned = await persist(item_hash=FAKE_PROGRAM_HASH) 262 | mock_load_account.assert_called_once() 263 | mock_auth_client.get_message.assert_called_once() 264 | mock_auth_client.submit.assert_called_once() 265 | mock_auth_client.forget.assert_called_once() 266 | assert returned == FAKE_PROGRAM_HASH_2 267 | 268 | await persist_program() 269 | 270 | 271 | @pytest.mark.asyncio 272 | async def test_unpersist_program(): 273 | mock_load_account = create_mock_load_account() 274 | mock_account = mock_load_account.return_value 275 | mock_auth_client_class, mock_auth_client = create_mock_auth_client(mock_account, swap_persistent=True) 276 | 277 | @patch("aleph_client.commands.program._load_account", mock_load_account) 278 | @patch("aleph_client.commands.program.AuthenticatedAlephHttpClient", mock_auth_client_class) 279 | async def unpersist_program(): 280 | print() # For better display when pytest -v -s 281 | returned = await unpersist(item_hash=FAKE_PROGRAM_HASH) 282 | mock_load_account.assert_called_once() 283 | mock_auth_client.get_message.assert_called_once() 284 | mock_auth_client.submit.assert_called_once() 285 | mock_auth_client.forget.assert_called_once() 286 | assert returned == FAKE_PROGRAM_HASH_2 287 | 288 | await unpersist_program() 289 | 290 | 291 | @pytest.mark.asyncio 292 | async def test_logs_program(capsys): 293 | mock_load_account = create_mock_load_account() 294 | mock_vm_client_class, _ = create_mock_vm_client() 295 | 296 | @patch("aleph_client.commands.program._load_account", mock_load_account) 297 | @patch("aleph_client.commands.program.VmClient", mock_vm_client_class) 298 | async def logs_program(): 299 | print() # For better display when pytest -v -s 300 | await logs( 301 | FAKE_VM_HASH, 302 | domain="https://crn.example.com", 303 | chain=Chain.ETH, 304 | ) 305 | 306 | await logs_program() 307 | captured = capsys.readouterr() 308 | assert captured.out == "\nReceived logs\n2024-02-02 23:34:21> hello world\n" 309 | 310 | 311 | @pytest.mark.asyncio 312 | async def test_runtime_checker_program(): 313 | mock_upload = AsyncMock(return_value=FAKE_PROGRAM_HASH) 314 | mock_delete = AsyncMock() 315 | 316 | @patch("aleph_client.commands.program.upload", mock_upload) 317 | @patch.object(aiohttp.ClientSession, "get", mock_client_session_get) 318 | @patch("aleph_client.commands.program.delete", mock_delete) 319 | async def runtime_checker_program(): 320 | print() # For better display when pytest -v -s 321 | await runtime_checker(item_hash=FAKE_STORE_HASH) 322 | mock_upload.assert_called_once() 323 | mock_delete.assert_called_once() 324 | 325 | await runtime_checker_program() 326 | -------------------------------------------------------------------------------- /tests/unit/test_utils.py: -------------------------------------------------------------------------------- 1 | from aleph_message.models import ( 2 | AggregateMessage, 3 | ForgetMessage, 4 | PostMessage, 5 | ProgramMessage, 6 | StoreMessage, 7 | ) 8 | from aleph_message.models.base import MessageType 9 | 10 | from aleph_client.utils import get_message_type_value 11 | 12 | 13 | def test_get_message_type_value(): 14 | assert get_message_type_value(PostMessage) == MessageType.post 15 | assert get_message_type_value(AggregateMessage) == MessageType.aggregate 16 | assert get_message_type_value(StoreMessage) == MessageType.store 17 | assert get_message_type_value(ProgramMessage) == MessageType.program 18 | assert get_message_type_value(ForgetMessage) == MessageType.forget 19 | --------------------------------------------------------------------------------