├── .dockerignore ├── .editorconfig ├── .github ├── .stale.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ ├── feature_request.md │ └── question.md ├── PULL_REQUEST_TEMPLATE.md ├── dependabot.yml ├── release-drafter.yml └── workflows │ ├── build.yml │ ├── greetings.yml │ ├── publish-to-pypi.yml │ └── release-drafter.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── Makefile ├── README.md ├── SECURITY.md ├── algobase ├── __init__.py ├── algorand │ ├── __init__.py │ ├── account.py │ ├── client.py │ ├── dispenser.py │ └── simple_mint.py ├── choices.py ├── data │ ├── __init__.py │ └── ipfs.toml ├── functional.py ├── ipfs │ ├── __init__.py │ ├── client_base.py │ └── nft_storage.py ├── models │ ├── __init__.py │ ├── algod.py │ ├── arc19.py │ ├── arc3.py │ ├── asa.py │ ├── asset_params.py │ ├── dispenser.py │ └── kmd.py ├── settings.py ├── types │ ├── __init__.py │ └── annotated.py └── utils │ ├── __init__.py │ ├── cid.py │ ├── hash.py │ ├── read.py │ ├── url.py │ └── validate.py ├── assets └── images │ └── coverage.svg ├── cookiecutter-config-file.yml ├── docker ├── Dockerfile └── README.md ├── docs ├── explanation.md ├── how-to-guides.md ├── how_to │ ├── how_to_mint_nft_localnet.md │ ├── how_to_store_json_ipfs.md │ └── how_to_validate_arc3.md ├── index.md ├── reference.md ├── stylesheets │ └── extra.css └── tutorials.md ├── examples ├── simple_mint.py ├── store_json_ipfs.py └── validate_arc3.py ├── mkdocs.yml ├── poetry.lock ├── pyproject.toml ├── requirements.txt └── tests ├── __init__.py ├── test_algorand ├── __init__.py ├── test_account.py ├── test_client.py ├── test_dispenser.py └── test_simple_mint.py ├── test_functional.py ├── test_ipfs ├── __init__.py ├── conftest.py ├── test_client_base.py └── test_nft_storage.py ├── test_models ├── __init__.py ├── conftest.py ├── test_arc19.py ├── test_arc3.py ├── test_asa.py └── test_asset_params.py ├── test_settings.py ├── test_types ├── __init__.py └── test_annotated.py ├── test_utils ├── __init__.py ├── test_cid.py ├── test_hash.py ├── test_read.py ├── test_url.py └── test_validate.py └── types.py /.dockerignore: -------------------------------------------------------------------------------- 1 | # Git 2 | .git 3 | .gitignore 4 | .github 5 | 6 | # Docker 7 | .dockerignore 8 | 9 | # IDE 10 | .idea 11 | .vscode 12 | 13 | # Byte-compiled / optimized / DLL files 14 | __pycache__/ 15 | **/__pycache__/ 16 | *.pyc 17 | *.pyo 18 | *.pyd 19 | .Python 20 | *.py[cod] 21 | *$py.class 22 | .pytest_cache/ 23 | ..mypy_cache/ 24 | 25 | # poetry 26 | .venv 27 | 28 | # C extensions 29 | *.so 30 | 31 | # Virtual environment 32 | .venv 33 | venv 34 | 35 | .DS_Store 36 | .AppleDouble 37 | .LSOverride 38 | ._* 39 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # Check http://editorconfig.org for more information 2 | # This is the main config file for this project: 3 | root = true 4 | 5 | [*] 6 | charset = utf-8 7 | end_of_line = lf 8 | insert_final_newline = true 9 | indent_style = space 10 | indent_size = 2 11 | trim_trailing_whitespace = true 12 | 13 | [*.{py, pyi}] 14 | indent_style = space 15 | indent_size = 4 16 | 17 | [Makefile] 18 | indent_style = tab 19 | 20 | [*.md] 21 | trim_trailing_whitespace = false 22 | 23 | [*.{diff,patch}] 24 | trim_trailing_whitespace = false 25 | -------------------------------------------------------------------------------- /.github/.stale.yml: -------------------------------------------------------------------------------- 1 | # Number of days of inactivity before an issue becomes stale 2 | daysUntilStale: 60 3 | # Number of days of inactivity before a stale issue is closed 4 | daysUntilClose: 7 5 | # Issues with these labels will never be considered stale 6 | exemptLabels: 7 | - pinned 8 | - security 9 | # Label to use when marking an issue as stale 10 | staleLabel: wontfix 11 | # Comment to post when marking an issue as stale. Set to `false` to disable 12 | markComment: > 13 | This issue has been automatically marked as stale because it has not had 14 | recent activity. It will be closed if no further activity occurs. Thank you 15 | for your contributions. 16 | # Comment to post when closing a stale issue. Set to `false` to disable 17 | closeComment: false 18 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🐛 Bug report 3 | about: If something isn't working 🔧 4 | title: '' 5 | labels: bug 6 | assignees: 7 | --- 8 | 9 | ## 🐛 Bug Report 10 | 11 | 12 | 13 | ## 🔬 How To Reproduce 14 | 15 | Steps to reproduce the behavior: 16 | 17 | 1. ... 18 | 19 | ### Code sample 20 | 21 | 22 | 23 | ### Environment 24 | 25 | * OS: [e.g. Linux / Windows / macOS] 26 | * Python version, get it with: 27 | 28 | ```bash 29 | python --version 30 | ``` 31 | 32 | ### Screenshots 33 | 34 | 35 | 36 | ## 📈 Expected behavior 37 | 38 | 39 | 40 | ## 📎 Additional context 41 | 42 | 43 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | # Configuration: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository 2 | 3 | blank_issues_enabled: false 4 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🚀 Feature request 3 | about: Suggest an idea for this project 🏖 4 | title: '' 5 | labels: enhancement 6 | assignees: 7 | --- 8 | 9 | ## 🚀 Feature Request 10 | 11 | 12 | 13 | ## 🔈 Motivation 14 | 15 | 16 | 17 | ## 🛰 Alternatives 18 | 19 | 20 | 21 | ## 📎 Additional context 22 | 23 | 24 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: ❓ Question 3 | about: Ask a question about this project 🎓 4 | title: "" 5 | labels: question 6 | assignees: 7 | --- 8 | 9 | ## Checklist 10 | 11 | 12 | 13 | - [ ] I've searched the project's [`issues`](https://github.com/code-alexander/algobase/issues?q=is%3Aissue). 14 | 15 | ## ❓ Question 16 | 17 | 18 | 19 | How can I [...]? 20 | 21 | Is it possible to [...]? 22 | 23 | ## 📎 Additional context 24 | 25 | 26 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Description 2 | 3 | 4 | 5 | ## Related Issue 6 | 7 | 8 | 9 | ## Type of Change 10 | 11 | 12 | 13 | - [ ] 📚 Examples / docs / tutorials / dependencies update 14 | - [ ] 🔧 Bug fix (non-breaking change which fixes an issue) 15 | - [ ] 🥂 Improvement (non-breaking change which improves an existing feature) 16 | - [ ] 🚀 New feature (non-breaking change which adds functionality) 17 | - [ ] 💥 Breaking change (fix or feature that would cause existing functionality to change) 18 | - [ ] 🔐 Security fix 19 | 20 | ## Checklist 21 | 22 | 23 | 24 | - [ ] I've read the [`CODE_OF_CONDUCT.md`](https://github.com/code-alexander/algobase/blob/main/CODE_OF_CONDUCT.md) document. 25 | - [ ] I've read the [`CONTRIBUTING.md`](https://github.com/code-alexander/algobase/blob/main/CONTRIBUTING.md) guide. 26 | - [ ] I've updated the code style using `make codestyle`. 27 | - [ ] I've written tests for all new methods and classes that I created. 28 | - [ ] I've written the docstring in Google format for all the methods and classes that I used. 29 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Configuration: https://dependabot.com/docs/config-file/ 2 | # Docs: https://docs.github.com/en/github/administering-a-repository/keeping-your-dependencies-updated-automatically 3 | 4 | version: 2 5 | 6 | updates: 7 | - package-ecosystem: "pip" 8 | directory: "/" 9 | schedule: 10 | interval: "daily" 11 | allow: 12 | - dependency-type: "all" 13 | commit-message: 14 | prefix: ":arrow_up:" 15 | open-pull-requests-limit: 50 16 | 17 | - package-ecosystem: "github-actions" 18 | directory: "/" 19 | schedule: 20 | interval: "daily" 21 | allow: 22 | - dependency-type: "all" 23 | commit-message: 24 | prefix: ":arrow_up:" 25 | open-pull-requests-limit: 50 26 | 27 | - package-ecosystem: "docker" 28 | directory: "/docker" 29 | schedule: 30 | interval: "weekly" 31 | allow: 32 | - dependency-type: "all" 33 | commit-message: 34 | prefix: ":arrow_up:" 35 | open-pull-requests-limit: 50 36 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | # Release drafter configuration https://github.com/release-drafter/release-drafter#configuration 2 | # Emojis were chosen to match the https://gitmoji.carloscuesta.me/ 3 | 4 | name-template: "v$NEXT_PATCH_VERSION" 5 | tag-template: "v$NEXT_PATCH_VERSION" 6 | 7 | categories: 8 | - title: ":rocket: Features" 9 | labels: [enhancement, feature] 10 | - title: ":wrench: Fixes & Refactoring" 11 | labels: [bug, refactoring, bugfix, fix] 12 | - title: ":package: Build System & CI/CD" 13 | labels: [build, ci, testing] 14 | - title: ":boom: Breaking Changes" 15 | labels: [breaking] 16 | - title: ":pencil: Documentation" 17 | labels: [documentation] 18 | - title: ":arrow_up: Dependencies updates" 19 | labels: [dependencies] 20 | 21 | template: | 22 | ## What’s Changed 23 | 24 | $CHANGES 25 | 26 | ## :busts_in_silhouette: List of contributors 27 | 28 | $CONTRIBUTORS 29 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | python-version: ["3.11"] 11 | 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Set up Python ${{ matrix.python-version }} 15 | uses: actions/setup-python@v5 16 | with: 17 | python-version: ${{ matrix.python-version }} 18 | 19 | - name: Install poetry 20 | run: make poetry-download 21 | 22 | - name: Set up cache 23 | uses: actions/cache@v4 24 | with: 25 | path: .venv 26 | key: venv-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('poetry.lock') }} 27 | - name: Install dependencies 28 | run: | 29 | poetry config virtualenvs.in-project true 30 | poetry install 31 | 32 | - name: Run style checks 33 | run: | 34 | make check-codestyle 35 | 36 | - name: Run tests 37 | run: | 38 | make test 39 | 40 | - name: Run safety checks 41 | run: | 42 | make check-safety 43 | -------------------------------------------------------------------------------- /.github/workflows/greetings.yml: -------------------------------------------------------------------------------- 1 | name: Greetings 2 | 3 | on: [pull_request, issues] 4 | 5 | jobs: 6 | greeting: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/first-interaction@v1 10 | with: 11 | repo-token: ${{ secrets.GITHUB_TOKEN }} 12 | pr-message: 'Hello @${{ github.actor }}, thank you for submitting a PR! We will respond as soon as possible.' 13 | issue-message: | 14 | Hello @${{ github.actor }}, thank you for your interest in our work! 15 | 16 | If this is a bug report, please provide screenshots and **minimum viable code to reproduce your issue**, otherwise we can not help you. 17 | -------------------------------------------------------------------------------- /.github/workflows/publish-to-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python 🐍 distribution 📦 to PyPI 2 | 3 | on: 4 | push: 5 | tags: 6 | - "v*.*.*" 7 | 8 | jobs: 9 | on-main-branch-check: 10 | runs-on: ubuntu-latest 11 | outputs: 12 | on_main: ${{ steps.contains_tag.outputs.retval }} 13 | steps: 14 | - uses: actions/checkout@v4 15 | with: 16 | fetch-depth: 0 17 | - uses: rickstaa/action-contains-tag@v1 18 | id: contains_tag 19 | with: 20 | reference: "main" 21 | tag: "${{ github.ref }}" 22 | 23 | tag-not-on-main-job: 24 | runs-on: ubuntu-latest 25 | needs: on-main-branch-check 26 | if: ${{ needs.on-main-branch-check.outputs.on_main != 'true' }} 27 | steps: 28 | - run: echo "Tag was not pushed to main." 29 | - run: echo ${{needs.on-main-branch-check.outputs.on_main}} 30 | 31 | build: 32 | runs-on: ubuntu-latest 33 | needs: on-main-branch-check 34 | if: ${{ needs.on-main-branch-check.outputs.on_main == 'true' }} 35 | outputs: 36 | release_tag: ${{ steps.set_release_tag.outputs.release_tag }} 37 | strategy: 38 | matrix: 39 | python-version: ["3.11"] 40 | 41 | steps: 42 | - uses: actions/checkout@v4 43 | - name: Set up Python ${{ matrix.python-version }} 44 | uses: actions/setup-python@v5 45 | with: 46 | python-version: ${{ matrix.python-version }} 47 | 48 | - name: Install poetry 49 | run: make poetry-download 50 | 51 | - name: Set up cache 52 | uses: actions/cache@v4 53 | with: 54 | path: .venv 55 | key: venv-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('poetry.lock') }} 56 | - name: Install dependencies 57 | run: | 58 | poetry config virtualenvs.in-project true 59 | poetry install 60 | 61 | - name: Run style checks 62 | run: | 63 | make check-codestyle 64 | 65 | - name: Run tests 66 | run: | 67 | make test 68 | 69 | - name: Run safety checks 70 | run: | 71 | make check-safety 72 | 73 | - name: Build package 74 | run: | 75 | poetry build 76 | 77 | - name: Set Release Tag 78 | id: set_release_tag 79 | run: | 80 | echo "release_tag=v$(poetry version --short)_$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT 81 | 82 | - name: Store the distribution packages 83 | uses: actions/upload-artifact@v4 84 | with: 85 | name: python-package-distributions 86 | path: dist/ 87 | 88 | update-docs: 89 | name: >- 90 | Update documentation 📚 on GitHub Pages 91 | needs: 92 | - build 93 | permissions: 94 | contents: write 95 | runs-on: ubuntu-latest 96 | strategy: 97 | matrix: 98 | python-version: ["3.11"] 99 | steps: 100 | - uses: actions/checkout@v4 101 | - name: Configure Git Credentials 102 | run: | 103 | git config user.name github-actions[bot] 104 | git config user.email 41898282+github-actions[bot]@users.noreply.github.com 105 | - uses: actions/setup-python@v5 106 | with: 107 | python-version: ${{ matrix.python-version }} 108 | - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV 109 | - uses: actions/cache@v3 110 | with: 111 | key: mkdocs-material-${{ env.cache_id }} 112 | path: .cache 113 | restore-keys: | 114 | mkdocs-material- 115 | - name: Install poetry 116 | run: make poetry-download 117 | - name: Install dependencies 118 | run: | 119 | poetry config virtualenvs.in-project true 120 | poetry install 121 | - run: poetry run mkdocs gh-deploy --force 122 | 123 | publish-to-pypi: 124 | name: >- 125 | Publish Python 🐍 distribution 📦 to PyPI 126 | needs: 127 | - build 128 | runs-on: ubuntu-latest 129 | environment: 130 | name: pypi 131 | url: https://pypi.org/p/algobase 132 | permissions: 133 | id-token: write # IMPORTANT: mandatory for trusted publishing 134 | 135 | steps: 136 | - name: Download all the dists 137 | uses: actions/download-artifact@v4 138 | with: 139 | name: python-package-distributions 140 | path: dist/ 141 | - name: Publish distribution 📦 to PyPI 142 | uses: pypa/gh-action-pypi-publish@release/v1 143 | 144 | github-release: 145 | name: >- 146 | Sign the Python 🐍 distribution 📦 with Sigstore 147 | and upload them to GitHub Release 148 | needs: 149 | - build 150 | - publish-to-pypi 151 | runs-on: ubuntu-latest 152 | 153 | permissions: 154 | contents: write # IMPORTANT: mandatory for making GitHub Releases 155 | id-token: write # IMPORTANT: mandatory for sigstore 156 | 157 | steps: 158 | - name: Download all the dists 159 | uses: actions/download-artifact@v4 160 | with: 161 | name: python-package-distributions 162 | path: dist/ 163 | - name: Sign the dists with Sigstore 164 | uses: sigstore/gh-action-sigstore-python@v2.1.1 165 | with: 166 | inputs: >- 167 | ./dist/*.tar.gz 168 | ./dist/*.whl 169 | - name: Create GitHub Release 170 | env: 171 | GITHUB_TOKEN: ${{ github.token }} 172 | run: >- 173 | gh release create 174 | '${{needs.build.outputs.release_tag}}' 175 | --repo '${{ github.repository }}' 176 | --notes "" 177 | - name: Upload artifact signatures to GitHub Release 178 | env: 179 | GITHUB_TOKEN: ${{ github.token }} 180 | # Upload to GitHub Release using the `gh` CLI. 181 | # `dist/` contains the built packages, and the 182 | # sigstore-produced signatures and certificates. 183 | run: >- 184 | gh release upload 185 | '${{needs.build.outputs.release_tag}}' dist/** 186 | --repo '${{ github.repository }}' 187 | -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name: Release Drafter 2 | 3 | on: 4 | push: 5 | # branches to consider in the event; optional, defaults to all 6 | branches: 7 | - main 8 | 9 | jobs: 10 | update_release_draft: 11 | runs-on: ubuntu-latest 12 | steps: 13 | # Drafts your next Release notes as Pull Requests are merged into "main" 14 | - uses: release-drafter/release-drafter@v6.0.0 15 | env: 16 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 17 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3.11 3 | 4 | default_stages: [commit, push] 5 | 6 | repos: 7 | - repo: https://github.com/pre-commit/pre-commit-hooks 8 | rev: v4.5.0 9 | hooks: 10 | - id: check-yaml 11 | - id: end-of-file-fixer 12 | exclude: LICENSE 13 | 14 | - repo: local 15 | hooks: 16 | - id: ruff-check 17 | name: ruff-check 18 | entry: poetry run ruff check --fix --config pyproject.toml 19 | types: [python] 20 | language: system 21 | 22 | - repo: local 23 | hooks: 24 | - id: ruff-format 25 | name: ruff-format 26 | entry: poetry run ruff format --config pyproject.toml 27 | types: [python] 28 | language: system 29 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at alexandercodes@proton.me. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to contribute 2 | 3 | ## Dependencies 4 | 5 | We use `poetry` to manage the [dependencies](https://github.com/python-poetry/poetry). 6 | If you dont have `poetry`, you should install with `make poetry-download`. 7 | 8 | To install dependencies and prepare [`pre-commit`](https://pre-commit.com/) hooks you would need to run `install` command: 9 | 10 | ```bash 11 | make install 12 | make pre-commit-install 13 | ``` 14 | 15 | To activate your `virtualenv` run `poetry shell`. 16 | 17 | ## Codestyle 18 | 19 | After installation you may execute code formatting. 20 | 21 | ```bash 22 | make codestyle 23 | ``` 24 | 25 | ### Checks 26 | 27 | Many checks are configured for this project. Command `make check-codestyle` will check ruff. 28 | The `make check-safety` command will look at the security of your code. 29 | 30 | Comand `make lint` applies all checks. 31 | 32 | ### Before submitting 33 | 34 | Before submitting your code please do the following steps: 35 | 36 | 1. Add any changes you want. 37 | 1. Add tests for the new changes. 38 | 1. Edit documentation if you have changed something significant. 39 | 1. Run `make codestyle` to format your changes. 40 | 1. Run `make lint` to ensure that types, security and docstrings are okay. 41 | 42 | ## Naming Conventions 43 | 44 | ### Git 45 | 46 | - We follow [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) for commit messages. 47 | - We follow the branch naming convention outlined [here](https://dev.to/varbsan/a-simplified-convention-for-naming-branches-and-commits-in-git-il4). 48 | 49 | ## Other help 50 | 51 | You can contribute by spreading word about this library. 52 | It would also be a huge contribution to write 53 | a short article on how you are using this project. 54 | You can also share your best practices with us. 55 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, and 10 | distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by the 13 | copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all other 16 | entities that control, are controlled by, or are under common control with 17 | that entity. For the purposes of this definition, "control" means (i) the 18 | power, direct or indirect, to cause the direction or management of such 19 | entity, whether by contract or otherwise, or (ii) ownership of 20 | fifty percent (50%) or more of the outstanding shares, or (iii) beneficial 21 | ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity exercising 24 | permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation source, 28 | and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical transformation 31 | or translation of a Source form, including but not limited to compiled 32 | object code, generated documentation, and conversions to 33 | other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or Object 36 | form, made available under the License, as indicated by a copyright notice 37 | that is included in or attached to the work (an example is provided in the 38 | Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object form, 41 | that is based on (or derived from) the Work and for which the editorial 42 | revisions, annotations, elaborations, or other modifications represent, 43 | as a whole, an original work of authorship. For the purposes of this 44 | License, Derivative Works shall not include works that remain separable 45 | from, or merely link (or bind by name) to the interfaces of, the Work and 46 | Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including the original 49 | version of the Work and any modifications or additions to that Work or 50 | Derivative Works thereof, that is intentionally submitted to Licensor for 51 | inclusion in the Work by the copyright owner or by an individual or 52 | Legal Entity authorized to submit on behalf of the copyright owner. 53 | For the purposes of this definition, "submitted" means any form of 54 | electronic, verbal, or written communication sent to the Licensor or its 55 | representatives, including but not limited to communication on electronic 56 | mailing lists, source code control systems, and issue tracking systems 57 | that are managed by, or on behalf of, the Licensor for the purpose of 58 | discussing and improving the Work, but excluding communication that is 59 | conspicuously marked or otherwise designated in writing by the copyright 60 | owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity on 63 | behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. 67 | 68 | Subject to the terms and conditions of this License, each Contributor 69 | hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, 70 | royalty-free, irrevocable copyright license to reproduce, prepare 71 | Derivative Works of, publicly display, publicly perform, sublicense, 72 | and distribute the Work and such Derivative Works in 73 | Source or Object form. 74 | 75 | 3. Grant of Patent License. 76 | 77 | Subject to the terms and conditions of this License, each Contributor 78 | hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, 79 | royalty-free, irrevocable (except as stated in this section) patent 80 | license to make, have made, use, offer to sell, sell, import, and 81 | otherwise transfer the Work, where such license applies only to those 82 | patent claims licensable by such Contributor that are necessarily 83 | infringed by their Contribution(s) alone or by combination of their 84 | Contribution(s) with the Work to which such Contribution(s) was submitted. 85 | If You institute patent litigation against any entity (including a 86 | cross-claim or counterclaim in a lawsuit) alleging that the Work or a 87 | Contribution incorporated within the Work constitutes direct or 88 | contributory patent infringement, then any patent licenses granted to 89 | You under this License for that Work shall terminate as of the date such 90 | litigation is filed. 91 | 92 | 4. Redistribution. 93 | 94 | You may reproduce and distribute copies of the Work or Derivative Works 95 | thereof in any medium, with or without modifications, and in Source or 96 | Object form, provided that You meet the following conditions: 97 | 98 | 1. You must give any other recipients of the Work or Derivative Works a 99 | copy of this License; and 100 | 101 | 2. You must cause any modified files to carry prominent notices stating 102 | that You changed the files; and 103 | 104 | 3. You must retain, in the Source form of any Derivative Works that You 105 | distribute, all copyright, patent, trademark, and attribution notices from 106 | the Source form of the Work, excluding those notices that do not pertain 107 | to any part of the Derivative Works; and 108 | 109 | 4. If the Work includes a "NOTICE" text file as part of its distribution, 110 | then any Derivative Works that You distribute must include a readable copy 111 | of the attribution notices contained within such NOTICE file, excluding 112 | those notices that do not pertain to any part of the Derivative Works, 113 | in at least one of the following places: within a NOTICE text file 114 | distributed as part of the Derivative Works; within the Source form or 115 | documentation, if provided along with the Derivative Works; or, within a 116 | display generated by the Derivative Works, if and wherever such 117 | third-party notices normally appear. The contents of the NOTICE file are 118 | for informational purposes only and do not modify the License. 119 | You may add Your own attribution notices within Derivative Works that You 120 | distribute, alongside or as an addendum to the NOTICE text from the Work, 121 | provided that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and may 125 | provide additional or different license terms and conditions for use, 126 | reproduction, or distribution of Your modifications, or for any such 127 | Derivative Works as a whole, provided Your use, reproduction, and 128 | distribution of the Work otherwise complies with the conditions 129 | stated in this License. 130 | 131 | 5. Submission of Contributions. 132 | 133 | Unless You explicitly state otherwise, any Contribution intentionally 134 | submitted for inclusion in the Work by You to the Licensor shall be under 135 | the terms and conditions of this License, without any additional 136 | terms or conditions. Notwithstanding the above, nothing herein shall 137 | supersede or modify the terms of any separate license agreement you may 138 | have executed with Licensor regarding such Contributions. 139 | 140 | 6. Trademarks. 141 | 142 | This License does not grant permission to use the trade names, trademarks, 143 | service marks, or product names of the Licensor, except as required for 144 | reasonable and customary use in describing the origin of the Work and 145 | reproducing the content of the NOTICE file. 146 | 147 | 7. Disclaimer of Warranty. 148 | 149 | Unless required by applicable law or agreed to in writing, Licensor 150 | provides the Work (and each Contributor provides its Contributions) 151 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, 152 | either express or implied, including, without limitation, any warranties 153 | or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS 154 | FOR A PARTICULAR PURPOSE. You are solely responsible for determining the 155 | appropriateness of using or redistributing the Work and assume any risks 156 | associated with Your exercise of permissions under this License. 157 | 158 | 8. Limitation of Liability. 159 | 160 | In no event and under no legal theory, whether in tort 161 | (including negligence), contract, or otherwise, unless required by 162 | applicable law (such as deliberate and grossly negligent acts) or agreed 163 | to in writing, shall any Contributor be liable to You for damages, 164 | including any direct, indirect, special, incidental, or consequential 165 | damages of any character arising as a result of this License or out of 166 | the use or inability to use the Work (including but not limited to damages 167 | for loss of goodwill, work stoppage, computer failure or malfunction, 168 | or any and all other commercial damages or losses), even if such 169 | Contributor has been advised of the possibility of such damages. 170 | 171 | 9. Accepting Warranty or Additional Liability. 172 | 173 | While redistributing the Work or Derivative Works thereof, You may choose 174 | to offer, and charge a fee for, acceptance of support, warranty, 175 | indemnity, or other liability obligations and/or rights consistent with 176 | this License. However, in accepting such obligations, You may act only 177 | on Your own behalf and on Your sole responsibility, not on behalf of any 178 | other Contributor, and only if You agree to indemnify, defend, and hold 179 | each Contributor harmless for any liability incurred by, or claims 180 | asserted against, such Contributor by reason of your accepting any such 181 | warranty or additional liability. 182 | 183 | END OF TERMS AND CONDITIONS 184 | 185 | APPENDIX: How to apply the Apache License to your work 186 | 187 | To apply the Apache License to your work, attach the following boilerplate 188 | notice, with the fields enclosed by brackets "[]" replaced with your own 189 | identifying information. (Don't include the brackets!) The text should be 190 | enclosed in the appropriate comment syntax for the file format. We also 191 | recommend that a file or class name and description of purpose be included 192 | on the same "printed page" as the copyright notice for easier 193 | identification within third-party archives. 194 | 195 | Copyright 2023 algobase 196 | 197 | Licensed under the Apache License, Version 2.0 (the "License"); 198 | you may not use this file except in compliance with the License. 199 | You may obtain a copy of the License at 200 | 201 | http://www.apache.org/licenses/LICENSE-2.0 202 | 203 | Unless required by applicable law or agreed to in writing, software 204 | distributed under the License is distributed on an "AS IS" BASIS, 205 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 206 | or implied. See the License for the specific language governing 207 | permissions and limitations under the License. 208 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | #* Variables 2 | SHELL := /usr/bin/env bash 3 | PYTHON := python 4 | PYTHONPATH := `pwd` 5 | 6 | #* Docker variables 7 | IMAGE := algobase 8 | VERSION := latest 9 | 10 | #* Poetry 11 | .PHONY: poetry-download 12 | poetry-download: 13 | curl -sSL https://install.python-poetry.org | $(PYTHON) - 14 | 15 | .PHONY: poetry-remove 16 | poetry-remove: 17 | curl -sSL https://install.python-poetry.org | $(PYTHON) - --uninstall 18 | 19 | #* Installation 20 | .PHONY: install 21 | install: 22 | poetry lock -n && poetry export --without-hashes > requirements.txt 23 | poetry install -n 24 | -poetry run mypy --install-types --non-interactive ./ 25 | 26 | .PHONY: pre-commit-install 27 | pre-commit-install: 28 | poetry run pre-commit install 29 | 30 | #* Formatters 31 | .PHONY: codestyle 32 | codestyle: 33 | poetry run ruff check --fix --config pyproject.toml ./ 34 | poetry run ruff format --config pyproject.toml ./ 35 | 36 | .PHONY: formatting 37 | formatting: codestyle 38 | 39 | #* Linting 40 | .PHONY: test 41 | test: 42 | PYTHONPATH=$(PYTHONPATH) poetry run pytest -c pyproject.toml --cov-report=html --cov=algobase tests/ 43 | poetry run coverage-badge -o assets/images/coverage.svg -f 44 | 45 | .PHONY: check-codestyle 46 | check-codestyle: 47 | poetry run ruff check --config pyproject.toml ./ 48 | poetry run ruff format --check --config pyproject.toml ./ 49 | 50 | .PHONY: mypy 51 | mypy: 52 | poetry run mypy --config-file pyproject.toml ./ 53 | 54 | .PHONY: check-safety 55 | check-safety: 56 | poetry check 57 | poetry run safety check -i 70612 --full-report 58 | poetry run bandit -ll --recursive algobase tests 59 | 60 | .PHONY: lint 61 | lint: test check-codestyle mypy check-safety 62 | 63 | .PHONY: update-dev-deps 64 | update-dev-deps: 65 | poetry add -D bandit@latest ruff@latest mypy@latest pre-commit@latest pydocstyle@latest pylint@latest pytest@latest pyupgrade@latest safety@latest coverage@latest coverage-badge@latest pytest-html@latest pytest-cov@latest 66 | poetry add -D --allow-prereleases black@latest 67 | 68 | #* Docker 69 | # Example: make docker-build VERSION=latest 70 | # Example: make docker-build IMAGE=some_name VERSION=0.1.0 71 | .PHONY: docker-build 72 | docker-build: 73 | @echo Building docker $(IMAGE):$(VERSION) ... 74 | docker build \ 75 | -t $(IMAGE):$(VERSION) . \ 76 | -f ./docker/Dockerfile --no-cache 77 | 78 | # Example: make docker-remove VERSION=latest 79 | # Example: make docker-remove IMAGE=some_name VERSION=0.1.0 80 | .PHONY: docker-remove 81 | docker-remove: 82 | @echo Removing docker $(IMAGE):$(VERSION) ... 83 | docker rmi -f $(IMAGE):$(VERSION) 84 | 85 | #* Cleaning 86 | .PHONY: pycache-remove 87 | pycache-remove: 88 | find . | grep -E "(__pycache__|\.pyc|\.pyo$$)" | xargs rm -rf 89 | 90 | .PHONY: dsstore-remove 91 | dsstore-remove: 92 | find . | grep -E ".DS_Store" | xargs rm -rf 93 | 94 | .PHONY: mypycache-remove 95 | mypycache-remove: 96 | find . | grep -E ".mypy_cache" | xargs rm -rf 97 | 98 | .PHONY: ipynbcheckpoints-remove 99 | ipynbcheckpoints-remove: 100 | find . | grep -E ".ipynb_checkpoints" | xargs rm -rf 101 | 102 | .PHONY: pytestcache-remove 103 | pytestcache-remove: 104 | find . | grep -E ".pytest_cache" | xargs rm -rf 105 | 106 | .PHONY: ruffcache-remove 107 | ruffcache-remove: 108 | find . | grep -E ".ruff_cache" | xargs rm -rf 109 | 110 | .PHONY: build-remove 111 | build-remove: 112 | rm -rf build/ 113 | 114 | .PHONY: cleanup 115 | cleanup: pycache-remove dsstore-remove mypycache-remove ipynbcheckpoints-remove pytestcache-remove ruffcache-remove 116 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security 2 | 3 | ## 🔐 Reporting Security Issues 4 | 5 | > Do not open issues that might have security implications! 6 | > It is critical that security related issues are reported privately so we have time to address them before they become public knowledge. 7 | 8 | Vulnerabilities can be reported by emailing core members: 9 | 10 | - algobase [alexandercodes@proton.me](mailto:alexandercodes@proton.me) 11 | 12 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: 13 | 14 | - Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) 15 | - Full paths of source file(s) related to the manifestation of the issue 16 | - The location of the affected source code (tag/branch/commit or direct URL) 17 | - Any special configuration required to reproduce the issue 18 | - Environment (e.g. Linux / Windows / macOS) 19 | - Step-by-step instructions to reproduce the issue 20 | - Proof-of-concept or exploit code (if possible) 21 | - Impact of the issue, including how an attacker might exploit the issue 22 | 23 | This information will help us triage your report more quickly. 24 | 25 | ## Preferred Languages 26 | 27 | We prefer all communications to be in English. 28 | -------------------------------------------------------------------------------- /algobase/__init__.py: -------------------------------------------------------------------------------- 1 | """A type-safe Python library for interacting with assets on Algorand.""" 2 | -------------------------------------------------------------------------------- /algobase/algorand/__init__.py: -------------------------------------------------------------------------------- 1 | """Algorand network clients and utilities.""" 2 | -------------------------------------------------------------------------------- /algobase/algorand/account.py: -------------------------------------------------------------------------------- 1 | """Algorand account class.""" 2 | 3 | from dataclasses import dataclass 4 | from typing import Self 5 | 6 | from algosdk.account import address_from_private_key, generate_account 7 | 8 | 9 | @dataclass(frozen=True, slots=True) 10 | class Account: 11 | """Represents an Algorand account.""" 12 | 13 | private_key: str 14 | address: str 15 | 16 | @classmethod 17 | def from_private_key(cls, private_key: str) -> Self: 18 | """Create an account from the given private key.""" 19 | return cls( 20 | private_key=private_key, address=address_from_private_key(private_key) 21 | ) 22 | 23 | 24 | def create_account() -> Account: 25 | """Create a new Algorand account. 26 | 27 | Returns: 28 | Account: The account. 29 | """ 30 | private_key, address = generate_account() 31 | return Account(private_key=private_key, address=address) 32 | -------------------------------------------------------------------------------- /algobase/algorand/client.py: -------------------------------------------------------------------------------- 1 | """Classes and functions to configure and create Algorand API clients.""" 2 | 3 | from collections.abc import Callable 4 | from dataclasses import dataclass 5 | from typing import Literal 6 | 7 | from algosdk.kmd import KMDClient 8 | from algosdk.v2client.algod import AlgodClient 9 | from algosdk.v2client.indexer import IndexerClient 10 | from returns.curry import partial 11 | from returns.pipeline import flow 12 | 13 | from algobase.algorand.account import Account 14 | from algobase.choices import ( 15 | AlgorandApi, 16 | AlgorandApiChoice, 17 | AlgorandNetwork, 18 | ) 19 | from algobase.functional import first_true 20 | from algobase.models import algod, kmd 21 | 22 | 23 | @dataclass(frozen=True, slots=True) 24 | class ClientConfig: 25 | """Configuration for an Algorand API client.""" 26 | 27 | url: str 28 | credential: str 29 | headers: dict[str, str] | None = None 30 | 31 | 32 | def create_algod_client(config: ClientConfig) -> AlgodClient: 33 | """Create an AlgodClient instance from the given configuration. 34 | 35 | Args: 36 | config (ClientConfig): The configuration to use. 37 | 38 | Returns: 39 | AlgodClient: The AlgodClient instance. 40 | """ 41 | return AlgodClient( 42 | algod_token=config.credential, algod_address=config.url, headers=config.headers 43 | ) 44 | 45 | 46 | def create_indexer_client(config: ClientConfig) -> IndexerClient: 47 | """Create an IndexerClient instance from the given configuration. 48 | 49 | Args: 50 | config (ClientConfig): The configuration to use. 51 | 52 | Returns: 53 | IndexerClient: The IndexerClient instance. 54 | """ 55 | return IndexerClient( 56 | indexer_token=config.credential, 57 | indexer_address=config.url, 58 | headers=config.headers, 59 | ) 60 | 61 | 62 | def create_kmd_client(config: ClientConfig) -> KMDClient: 63 | """Create a KMDClient instance from the given configuration. 64 | 65 | Args: 66 | config (ClientConfig): The configuration to use. 67 | 68 | Returns: 69 | KMDClient: The KMDClient instance. 70 | """ 71 | return KMDClient(kmd_token=config.credential, kmd_address=config.url) 72 | 73 | 74 | def create_localnet_default_config(api: AlgorandApiChoice) -> ClientConfig: 75 | """Create a default configuration for the localnet. 76 | 77 | Args: 78 | api (AlgorandApiChoice): The API to configure. 79 | 80 | Returns: 81 | ClientConfig: The default configuration. 82 | """ 83 | port = {AlgorandApi.ALGOD: 4001, AlgorandApi.INDEXER: 8980, AlgorandApi.KMD: 4002}[ 84 | api 85 | ] 86 | return ClientConfig(url=f"http://localhost:{port}", credential="a" * 64) 87 | 88 | 89 | def create_localnet_algod_client() -> AlgodClient: 90 | """Create an AlgodClient instance for the localnet. 91 | 92 | Returns: 93 | AlgodClient: The AlgodClient instance. 94 | """ 95 | return flow(AlgorandApi.ALGOD, create_localnet_default_config, create_algod_client) 96 | 97 | 98 | def create_localnet_indexer_client() -> IndexerClient: 99 | """Create an IndexerClient instance for the localnet. 100 | 101 | Returns: 102 | IndexerClient: The IndexerClient instance. 103 | """ 104 | return flow( 105 | AlgorandApi.INDEXER, create_localnet_default_config, create_indexer_client 106 | ) 107 | 108 | 109 | def create_localnet_kmd_client() -> KMDClient: 110 | """Create a KMDClient instance for the localnet. 111 | 112 | Returns: 113 | KMDClient: The KMDClient instance. 114 | """ 115 | return flow(AlgorandApi.KMD, create_localnet_default_config, create_kmd_client) 116 | 117 | 118 | def find_wallet_id(kmd_client: KMDClient, wallet_name: str) -> str | None: 119 | """Get the ID of a wallet from the KMD client. 120 | 121 | Args: 122 | kmd_client (KMDClient): The KMD client. 123 | wallet_name (str): The name of the wallet. 124 | 125 | Returns: 126 | str | None: The ID of the wallet if found, else None. 127 | """ 128 | return next( 129 | x.id 130 | for x in map(kmd.APIV1Wallet.model_validate, kmd_client.list_wallets()) 131 | if x.name == wallet_name 132 | ) 133 | 134 | 135 | def is_default_account(account: algod.Account) -> bool: 136 | """Check if an account is the default account. 137 | 138 | Args: 139 | account (AlgodResponseType): The account info. 140 | 141 | Returns: 142 | bool: True if the account is the default account, else False. 143 | """ 144 | return account.status != "Offline" and account.amount > 1_000_000_000 145 | 146 | 147 | def match_account( 148 | algod_client: AlgodClient, 149 | addresses: list[str], 150 | predicate: Callable[[algod.Account], bool], 151 | ) -> str: 152 | """Find the first account that matches the predicate, given a list of addresses to lookup. 153 | 154 | Args: 155 | algod_client (AlgodClient): The Algod client. 156 | addresses (list[str]): The addresses to check. 157 | predicate (Callable[[algod.Account], bool]): The predicate function. 158 | 159 | Raises: 160 | ValueError: If no account is found where predicate(account) is True. 161 | 162 | Returns: 163 | str: The address of the matching account if found, else None. 164 | """ 165 | matched = first_true( 166 | addresses, 167 | predicate=lambda x: flow( 168 | x, algod_client.account_info, algod.Account.model_validate, predicate 169 | ), 170 | ) 171 | if matched is None: 172 | raise ValueError("No account found.") 173 | return matched 174 | 175 | 176 | def get_default_account( 177 | algod_client: AlgodClient, kmd_client: KMDClient | None = None 178 | ) -> Account: 179 | """Return an Account instance for the default account. 180 | 181 | Args: 182 | algod_client (AlgodClient): The Algod client. 183 | kmd_client (KMDClient | None, optional): The KMD client. If None, a default instance will be created. Defaults to None. 184 | 185 | Raises: 186 | ValueError: If the Algod client instance isn't connected to a localnet network. 187 | 188 | Returns: 189 | Account: The default account. 190 | """ 191 | if not is_localnet(algod_client): 192 | raise ValueError("Algod client must be connected to a localnet network.") 193 | 194 | kmd_client = kmd_client or create_localnet_kmd_client() 195 | 196 | wallet_handle = kmd_client.init_wallet_handle( 197 | find_wallet_id(kmd_client, "unencrypted-default-wallet"), "" 198 | ) 199 | 200 | return flow( 201 | wallet_handle, 202 | kmd_client.list_keys, 203 | lambda keys: match_account(algod_client, keys, is_default_account), 204 | partial(kmd_client.export_key, wallet_handle, ""), 205 | Account.from_private_key, 206 | ) 207 | 208 | 209 | def is_localnet(algod_client: AlgodClient) -> bool: 210 | """Check if the AlgodClient is connected to a localnet. 211 | 212 | Args: 213 | algod_client (AlgodClient): The AlgodClient instance. 214 | 215 | Returns: 216 | bool: True if the client is connected to a localnet, else False. 217 | """ 218 | return algod_client.suggested_params().gen in { 219 | "devnet-v1", 220 | "sandnet-v1", 221 | "dockernet-v1", 222 | } 223 | 224 | 225 | def get_algonode_config( 226 | network: Literal[ 227 | AlgorandNetwork.BETANET, AlgorandNetwork.TESTNET, AlgorandNetwork.MAINNET 228 | ], 229 | api: Literal[AlgorandApi.ALGOD, AlgorandApi.INDEXER], 230 | ) -> ClientConfig: 231 | """Get the client config for Algonode API. 232 | 233 | Args: 234 | network (AlgorandNetworkChoice): The Algorand network. 235 | api (Literal[AlgorandApi.ALGOD, AlgorandApi.INDEXER]): The Algorand API. 236 | 237 | Returns: 238 | ClientConfig: The client config object. 239 | """ 240 | return ClientConfig( 241 | url=f"https://{network}-{('idx', 'api')[api == AlgorandApi.ALGOD]}.algonode.cloud", 242 | credential="", 243 | ) 244 | -------------------------------------------------------------------------------- /algobase/algorand/dispenser.py: -------------------------------------------------------------------------------- 1 | """Algorand TestNet dispenser client.""" 2 | 3 | from dataclasses import dataclass, field 4 | from typing import Literal, Self 5 | 6 | import httpx 7 | 8 | from algobase.choices import AlgorandAsset 9 | from algobase.models.dispenser import DispenserFundResponse 10 | from algobase.settings import Settings 11 | 12 | 13 | @dataclass(frozen=True, slots=True) 14 | class Dispenser: 15 | """Algorand TestNet dispenser client.""" 16 | 17 | _access_token: str = field(repr=False) 18 | 19 | def __post_init__(self): 20 | """Check that the access token is not None or an empty string. 21 | 22 | Raises: 23 | ValueError: If the access token is None or an empty string. 24 | """ 25 | if not self._access_token: 26 | raise ValueError("Access token is required.") 27 | 28 | @classmethod 29 | def from_settings(cls, settings: Settings) -> Self: 30 | """Create an instance of the IPFS client from the settings object. 31 | 32 | Args: 33 | settings (Settings): The settings object. 34 | 35 | Raises: 36 | ValueError: If the dispenser access token is None. 37 | 38 | Returns: 39 | Self: An instance of the Dispenser client. 40 | """ 41 | if settings.testnet_dispenser_access_token is None: 42 | raise ValueError("Dispenser access token must not be None.") 43 | return cls(_access_token=settings.testnet_dispenser_access_token) 44 | 45 | @property 46 | def base_url(self) -> httpx.URL: 47 | """The base URL of the dispenser API.""" 48 | return httpx.URL("https://api.dispenser.algorandfoundation.tools") 49 | 50 | @property 51 | def access_token(self) -> str: 52 | """The OAauth access token.""" 53 | return self._access_token 54 | 55 | @property 56 | def headers(self) -> dict[str, str]: 57 | """The headers to use for the HTTP requests.""" 58 | return {"Authorization": f"Bearer {self.access_token}"} 59 | 60 | def fund( 61 | self, address: str, amount: int, asset_id: Literal[AlgorandAsset.ALGO] 62 | ) -> DispenserFundResponse: 63 | """Funds an account from the TestNet dispenser. 64 | 65 | Args: 66 | address (str): The address of the account to fund. 67 | amount (int): The amount to fund the account with. 68 | asset_id (Literal[AlgorandAsset.ALGO]): The asset ID. 69 | 70 | Raises: 71 | httpx.HTTPError: If the request was unsuccessful. 72 | 73 | Returns: 74 | DispenserFundResponse: The transaction ID and amount funded. 75 | """ 76 | with httpx.Client() as client: 77 | response = client.post( 78 | url=self.base_url.join(f"fund/{asset_id}"), 79 | json={"receiver": address, "amount": amount, "assetID": asset_id}, 80 | headers=self.headers, 81 | timeout=15, 82 | ) 83 | data = response.json() 84 | if response.status_code == httpx.codes.OK: 85 | return DispenserFundResponse.model_validate(data) 86 | else: 87 | raise httpx.HTTPError( 88 | f"HTTP {response.status_code} error: Failed to fund account {address} with amount {amount} of asset {asset_id}.", 89 | ) 90 | -------------------------------------------------------------------------------- /algobase/algorand/simple_mint.py: -------------------------------------------------------------------------------- 1 | """Utilities for minting assets on Algorand with sensible defaults.""" 2 | 3 | from typing import TypeAlias 4 | 5 | from algosdk.transaction import AssetConfigTxn, wait_for_confirmation 6 | from algosdk.v2client.algod import AlgodClient 7 | from returns.pipeline import flow 8 | 9 | from algobase.algorand.account import Account 10 | from algobase.choices import Arc 11 | from algobase.functional import maybe_apply 12 | from algobase.models.algod import PendingTransactionResponse 13 | from algobase.models.arc3 import Arc3Metadata, Arc3Properties 14 | from algobase.models.arc19 import Arc19Metadata 15 | from algobase.models.asa import Asa 16 | from algobase.models.asset_params import AssetParams 17 | 18 | Arc3NonTraitProperties: TypeAlias = dict[ 19 | str, 20 | str 21 | | int 22 | | float 23 | | dict[str, "Arc3NonTraitProperties"] 24 | | list["Arc3NonTraitProperties"], 25 | ] 26 | 27 | 28 | def create_metadata( 29 | description: str | None = None, properties: Arc3NonTraitProperties | None = None 30 | ) -> Arc3Metadata: 31 | """Create ARC-3 metadata for an NFT. 32 | 33 | Args: 34 | description (str | None, optional): Description of the NFT. Defaults to None. 35 | properties (Arc3NonTraitProperties | None, optional): Additional non-trait properties. Defaults to None. 36 | 37 | Returns: 38 | Arc3Metadata: The ARC-3 metadata. 39 | """ 40 | return Arc3Metadata( 41 | arc=Arc.ARC3, 42 | name="NFT", 43 | decimals=0, 44 | description=description, 45 | properties=maybe_apply(properties, Arc3Properties.model_validate), 46 | ) 47 | 48 | 49 | def create_metadata_arc19( 50 | description: str | None = None, properties: Arc3NonTraitProperties | None = None 51 | ) -> Arc19Metadata: 52 | """Create ARC-19 metadata for an NFT. 53 | 54 | Args: 55 | description (str | None, optional): Description of the NFT. Defaults to None. 56 | properties (Arc3NonTraitProperties | None, optional): Additional non-trait properties. Defaults to None. 57 | 58 | Returns: 59 | Arc19Metadata: The ARC-3 metadata. 60 | """ 61 | return Arc19Metadata( 62 | arc=Arc.ARC19, 63 | arc3_metadata=Arc3Metadata( 64 | arc=Arc.ARC3, 65 | name="NFT", 66 | decimals=0, 67 | description=description, 68 | properties=maybe_apply(properties, Arc3Properties.model_validate), 69 | ), 70 | ) 71 | 72 | 73 | def create_asa(metadata: Arc3Metadata | Arc19Metadata, cid: str) -> Asa: 74 | """Creates an instance of the `Asa` model. 75 | 76 | Args: 77 | metadata (Arc3Metadata | Arc19Metadata): The ARC-3 or ARC-19 metadata. 78 | cid (str): The IPFS CID for the metadata. 79 | 80 | Returns: 81 | Asa: The `Asa` instance. 82 | """ 83 | return Asa( 84 | asset_params=AssetParams( 85 | total=1, 86 | decimals=0, 87 | unit_name="NFT", 88 | asset_name="NFT", 89 | url=f"ipfs://{cid}/#arc3", 90 | ), 91 | metadata=metadata, 92 | ) 93 | 94 | 95 | def create_asset_config_txn( 96 | algod_client: AlgodClient, account: Account, asa: Asa 97 | ) -> AssetConfigTxn: 98 | """Create an AssetConfigTxn for the given account and ASA. 99 | 100 | Args: 101 | algod_client (AlgodClient): The AlgodClient instance. 102 | account (Account): The account to use. 103 | asa (Asa): The ASA to mint. 104 | 105 | Returns: 106 | AssetConfigTxn: The AssetConfigTxn instance. 107 | """ 108 | return AssetConfigTxn( 109 | sender=account.address, 110 | sp=algod_client.suggested_params(), 111 | index=None, 112 | total=asa.asset_params.total, 113 | default_frozen=False, 114 | unit_name=asa.asset_params.unit_name, 115 | asset_name=asa.asset_params.asset_name, 116 | manager=account.address, 117 | reserve=account.address, 118 | freeze=None, 119 | clawback=None, 120 | url=asa.asset_params.url, 121 | metadata_hash=asa.metadata_hash, 122 | note=None, 123 | lease=None, 124 | strict_empty_address_check=False, 125 | decimals=asa.asset_params.decimals, 126 | rekey_to=None, 127 | ) 128 | 129 | 130 | def mint( 131 | algod_client: AlgodClient, account: Account, metadata: Arc3Metadata, cid: str 132 | ) -> int | None: 133 | """Mint an NFT on Algorand. 134 | 135 | Args: 136 | algod_client (AlgodClient): The Algod client. 137 | account (Account): The account to use. 138 | metadata (Arc3Metadata): The ARC-3 metadata. 139 | cid (str): The IPFS CID for the metadata. 140 | 141 | Returns: 142 | int | None: The asset ID if minted, else None. 143 | """ 144 | return flow( 145 | create_asa(metadata, cid), 146 | lambda asa: create_asset_config_txn(algod_client, account, asa).sign( 147 | account.private_key 148 | ), 149 | algod_client.send_transaction, 150 | lambda txid: wait_for_confirmation(algod_client, txid, 4), 151 | PendingTransactionResponse.model_validate, 152 | lambda response: response.asset_index, 153 | ) 154 | -------------------------------------------------------------------------------- /algobase/choices.py: -------------------------------------------------------------------------------- 1 | """Enums and enum type aliases for algobase.""" 2 | 3 | from enum import IntEnum, StrEnum, auto 4 | from typing import Literal, TypeAlias 5 | 6 | 7 | class Arc(StrEnum): 8 | """An enumeration of Algorand ARC standards that are supported in algobase.""" 9 | 10 | ARC3 = auto() 11 | ARC19 = auto() 12 | 13 | 14 | ArcChoice: TypeAlias = Literal[Arc.ARC3, Arc.ARC19] 15 | 16 | 17 | class AsaType(StrEnum): 18 | """An enumeration of Algorand Standard Asset (ASA) types.""" 19 | 20 | FUNGIBLE = auto() 21 | NON_FUNGIBLE_PURE = auto() 22 | NON_FUNGIBLE_FRACTIONAL = auto() 23 | 24 | 25 | AsaTypeChoice: TypeAlias = Literal[ 26 | AsaType.FUNGIBLE, AsaType.NON_FUNGIBLE_PURE, AsaType.NON_FUNGIBLE_FRACTIONAL 27 | ] 28 | 29 | 30 | class IpfsProvider(StrEnum): 31 | """An enumeration of IPFS providers.""" 32 | 33 | NFT_STORAGE = auto() 34 | 35 | 36 | IpfsProviderChoice: TypeAlias = Literal[IpfsProvider.NFT_STORAGE] 37 | 38 | 39 | class IpfsPinStatus(StrEnum): 40 | """An enumeration of IPFS pin statuses.""" 41 | 42 | QUEUED = auto() 43 | PINNING = auto() 44 | PINNED = auto() 45 | FAILED = auto() 46 | 47 | 48 | IpfsPinStatusChoice: TypeAlias = Literal[ 49 | IpfsPinStatus.QUEUED, 50 | IpfsPinStatus.PINNING, 51 | IpfsPinStatus.PINNED, 52 | IpfsPinStatus.FAILED, 53 | ] 54 | 55 | 56 | class AlgorandNetwork(StrEnum): 57 | """An enumeration of Algorand networks.""" 58 | 59 | LOCALNET = auto() 60 | BETANET = auto() 61 | TESTNET = auto() 62 | MAINNET = auto() 63 | 64 | 65 | AlgorandNetworkChoice: TypeAlias = Literal[ 66 | AlgorandNetwork.LOCALNET, 67 | AlgorandNetwork.BETANET, 68 | AlgorandNetwork.TESTNET, 69 | AlgorandNetwork.MAINNET, 70 | ] 71 | 72 | 73 | class AlgorandApi(StrEnum): 74 | """An enumeration of Algorand APIs.""" 75 | 76 | ALGOD = auto() 77 | INDEXER = auto() 78 | KMD = auto() 79 | 80 | 81 | AlgorandApiChoice: TypeAlias = Literal[ 82 | AlgorandApi.ALGOD, AlgorandApi.INDEXER, AlgorandApi.KMD 83 | ] 84 | 85 | 86 | class AlgorandApiProvider(StrEnum): 87 | """An enumeration of Algorand API providers.""" 88 | 89 | LOCALHOST = auto() 90 | CUSTOM = auto() 91 | ALGONODE = auto() 92 | 93 | 94 | AlgorandApiProviderChoice: TypeAlias = Literal[ 95 | AlgorandApiProvider.LOCALHOST, 96 | AlgorandApiProvider.CUSTOM, 97 | AlgorandApiProvider.ALGONODE, 98 | ] 99 | 100 | 101 | class AlgorandAsset(IntEnum): 102 | """An enumeration of Algorand asset names and IDs.""" 103 | 104 | ALGO = 0 105 | -------------------------------------------------------------------------------- /algobase/data/__init__.py: -------------------------------------------------------------------------------- 1 | """Reference data files.""" 2 | -------------------------------------------------------------------------------- /algobase/data/ipfs.toml: -------------------------------------------------------------------------------- 1 | # Public IPFS Gateways. Source: https://github.com/ipfs/public-gateway-checker/blob/master/gateways.txt 2 | ipfs_gateways = [ 3 | "https://ipfs.io", 4 | "https://dweb.link", 5 | "https://gateway.ipfs.io", 6 | "https://ninetailed.ninja", 7 | "https://via0.com", 8 | "https://ipfs.eternum.io", 9 | "https://hardbin.com", 10 | "https://cloudflare-ipfs.com", 11 | "https://astyanax.io", 12 | "https://cf-ipfs.com", 13 | "https://gateway.originprotocol.com", 14 | "https://gateway.pinata.cloud", 15 | "https://ipfs.sloppyta.co", 16 | "https://ipfs.busy.org", 17 | "https://ipfs.greyh.at", 18 | "https://gateway.serph.network", 19 | "https://gw3.io", 20 | "https://jorropo.net", 21 | "https://ipfs.fooock.com", 22 | "https://cdn.cwinfo.net", 23 | "https://aragon.ventures", 24 | "https://permaweb.io", 25 | "https://ipfs.best-practice.se", 26 | "https://storjipfs-gateway.com", 27 | "https://ipfs.runfission.com", 28 | "https://ipfs.trusti.id", 29 | "https://ipfs.overpi.com", 30 | "https://ipfs.ink", 31 | "https://ipfsgateway.makersplace.com", 32 | "https://ipfs.funnychain.co", 33 | "https://ipfs.telos.miami", 34 | "https://ipfs.mttk.net", 35 | "https://ipfs.fleek.co", 36 | "https://ipfs.jbb.one", 37 | "https://ipfs.yt", 38 | "https://hashnews.k1ic.com", 39 | "https://ipfs.drink.cafe", 40 | "https://ipfs.kavin.rocks", 41 | "https://ipfs.denarius.io", 42 | "https://crustwebsites.net", 43 | "https://ipfs0.sjc.cloudsigma.com", 44 | "http://ipfs.genenetwork.org", 45 | "https://ipfs.eth.aragon.network", 46 | "https://ipfs.smartholdem.io", 47 | "https://ipfs.xoqq.ch", 48 | "http://natoboram.mynetgear.com:8080", 49 | "https://video.oneloveipfs.com", 50 | "https://ipfs.scalaproject.io", 51 | "https://search.ipfsgate.com", 52 | "https://ipfs.decoo.io", 53 | "https://alexdav.id", 54 | "https://ipfs.uploads.nu", 55 | "https://hub.textile.io", 56 | "https://ipfs1.pixura.io", 57 | "https://ravencoinipfs-gateway.com", 58 | "https://ipfs.tubby.cloud", 59 | "https://ipfs.lain.la", 60 | "https://ipfs.kaleido.art", 61 | "https://ipfs.slang.cx", 62 | "https://ipfs.arching-kaos.com", 63 | "https://storry.tv", 64 | "https://ipfs.1-2.dev", 65 | "https://dweb.eu.org", 66 | "https://permaweb.eu.org", 67 | "https://ipfs.namebase.io", 68 | "https://ipfs.tribecap.co", 69 | "https://ipfs.kinematiks.com", 70 | "https://nftstorage.link", 71 | "https://gravity.jup.io", 72 | "http://fzdqwfb5ml56oadins5jpuhe6ki6bk33umri35p5kt2tue4fpws5efid.onion", 73 | "https://tth-ipfs.com", 74 | "https://ipfs.chisdealhd.co.uk", 75 | "https://ipfs.alloyxuast.tk", 76 | "https://4everland.io", 77 | "https://ipfs-gateway.cloud", 78 | "https://w3s.link", 79 | "https://cthd.icu", 80 | "https://ipfs.tayfundogdas.me", 81 | "https://ipfs.jpu.jp", 82 | "https://ipfs.soul-network.com", 83 | ] 84 | -------------------------------------------------------------------------------- /algobase/functional.py: -------------------------------------------------------------------------------- 1 | """Functions for type casting.""" 2 | 3 | from collections.abc import Callable, Iterable 4 | from typing import Any, ParamSpec, TypeVar 5 | 6 | A = TypeVar("A") 7 | B = TypeVar("B") 8 | C = TypeVar("C") 9 | 10 | 11 | def maybe_apply(x: A | None, f: Callable[[A], B]) -> B | None: 12 | """Return the result of applying the function to the value if the value is not None, otherwise return None. 13 | 14 | Args: 15 | x (A | None): The value to apply the function to. 16 | f (Callable[[A], B | None]): The function to apply to the value. 17 | 18 | Returns: 19 | B | None: The result of applying the function to the value, or None if the value is None. 20 | """ 21 | return f(x) if x is not None else None 22 | 23 | 24 | IT = TypeVar("IT") 25 | 26 | 27 | def first_true( 28 | iterable: Iterable[IT], 29 | default: IT | None = None, 30 | predicate: Callable[[IT], bool] | None = None, 31 | ) -> IT | None: 32 | """Returns the first true value in the iterable. 33 | 34 | If no true value is found, it returns `default`. 35 | If `predicate` is not None, it returns the first item for which predicate(item) is true. 36 | 37 | Args: 38 | iterable (Iterable[IT]): The iterable. 39 | default (IT | None, optional): The default value to return if no true value is found. Defaults to None. 40 | predicate (Callable[[IT], bool] | None, optional): The predicate function. Defaults to None. 41 | 42 | Returns: 43 | IT | None: The item in the iterable that is true, or `default` if no true value is found. 44 | """ 45 | return next(filter(predicate, iterable), default) 46 | 47 | 48 | T = TypeVar("T") 49 | P = ParamSpec("P") 50 | 51 | 52 | def provide_context(**kwargs: Any) -> Callable[..., T]: 53 | """A closure that provides context arguments to a function. 54 | 55 | Args: 56 | **kwargs: Arbitrary keyword arguments. 57 | 58 | Returns: 59 | Callable[..., T]: The wrapped function. 60 | """ 61 | 62 | def wrapped(fn: Callable[P, T], *fn_args: P.args, **fn_kwargs: P.kwargs) -> T: 63 | """Calls the function with the context arguments and any additional arguments passed in. 64 | 65 | Args: 66 | fn (Callable[P, T]): The function to call. 67 | *fn_args: Variable length argument list. 68 | **fn_kwargs: Arbitrary keyword arguments. 69 | 70 | Returns: 71 | T: The result of calling the function. 72 | """ 73 | inject = { 74 | k: v 75 | for k, v in kwargs.items() 76 | if k in fn.__code__.co_varnames[len(fn_args) :] 77 | } 78 | return fn(*fn_args, **{**inject, **fn_kwargs}) 79 | 80 | return wrapped 81 | -------------------------------------------------------------------------------- /algobase/ipfs/__init__.py: -------------------------------------------------------------------------------- 1 | """IPFS client.""" 2 | -------------------------------------------------------------------------------- /algobase/ipfs/client_base.py: -------------------------------------------------------------------------------- 1 | """Abstract base class for IPFS clients.""" 2 | 3 | from abc import ABC, abstractmethod 4 | from typing import Self 5 | 6 | import httpx 7 | 8 | from algobase.choices import IpfsPinStatusChoice, IpfsProviderChoice 9 | from algobase.settings import Settings 10 | 11 | 12 | class IpfsClient(ABC): 13 | """Abstract base class for IPFS clients.""" 14 | 15 | def __post_init__(self) -> None: 16 | """If an API key is required, check that it is present.""" 17 | if self.is_api_key_required: 18 | self.check_api_key_is_present() 19 | 20 | @classmethod 21 | @abstractmethod 22 | def from_settings(cls, settings: Settings) -> Self: 23 | """Create an instance of the IPFS client from a settings object.""" 24 | ... # pragma: no cover 25 | 26 | @property 27 | @abstractmethod 28 | def ipfs_provider_name(self) -> IpfsProviderChoice: 29 | """The name of the IPFS provider.""" 30 | ... # pragma: no cover 31 | 32 | @property 33 | @abstractmethod 34 | def api_version(self) -> str: 35 | """The version of the IPFS provider's API.""" 36 | ... # pragma: no cover 37 | 38 | @property 39 | @abstractmethod 40 | def base_url(self) -> httpx.URL: 41 | """The base URL of the IPFS provider's API.""" 42 | ... # pragma: no cover 43 | 44 | @property 45 | @abstractmethod 46 | def is_api_key_required(self) -> bool: 47 | """Whether the IPFS provider requires an API key.""" 48 | ... # pragma: no cover 49 | 50 | @property 51 | @abstractmethod 52 | def api_key(self) -> str | None: 53 | """The API key.""" 54 | ... # pragma: no cover 55 | 56 | def check_api_key_is_present(self) -> None: 57 | """Checks that the IPFS provider's API key is present.""" 58 | if self.is_api_key_required and self.api_key is None: 59 | raise ValueError( 60 | f"API key for {self.ipfs_provider_name} must be defined in .env file." 61 | ) 62 | 63 | @abstractmethod 64 | def store_json(self, json: str | bytes) -> str: 65 | """Stores JSON data in IPFS. 66 | 67 | Args: 68 | json (str | bytes): The JSON to store. 69 | 70 | Returns: 71 | str: The IPFS CID of the stored data. 72 | """ 73 | ... # pragma: no cover 74 | 75 | @abstractmethod 76 | def fetch_pin_status(self, cid: str) -> IpfsPinStatusChoice: 77 | """Returns the pinning status of a file, by CID. 78 | 79 | Args: 80 | cid (str): The CID of the file to check. 81 | 82 | Returns: 83 | IpfsPinStatusChoice: The pin status of the CID. 84 | """ 85 | ... # pragma: no cover 86 | -------------------------------------------------------------------------------- /algobase/ipfs/nft_storage.py: -------------------------------------------------------------------------------- 1 | """IPFS client for nft.storage.""" 2 | 3 | from dataclasses import dataclass 4 | from typing import Self 5 | 6 | import httpx 7 | 8 | from algobase.choices import ( 9 | IpfsPinStatus, 10 | IpfsPinStatusChoice, 11 | IpfsProvider, 12 | IpfsProviderChoice, 13 | ) 14 | from algobase.ipfs.client_base import IpfsClient 15 | from algobase.settings import Settings 16 | 17 | 18 | @dataclass 19 | class NftStorage(IpfsClient): 20 | """IPFS client for nft.storage. 21 | 22 | Requires the `NFT_STORAGE_API_KEY` environment variable to be set. 23 | """ 24 | 25 | _api_key: str | None 26 | 27 | @classmethod 28 | def from_settings(cls, settings: Settings) -> Self: 29 | """Create an instance of the IPFS client from the settings object.""" 30 | return cls(_api_key=settings.nft_storage_api_key) 31 | 32 | @property 33 | def ipfs_provider_name(self) -> IpfsProviderChoice: 34 | """The name of the IPFS provider.""" 35 | return IpfsProvider.NFT_STORAGE 36 | 37 | @property 38 | def api_version(self) -> str: 39 | """The version of the IPFS provider's API.""" 40 | return "1.0" 41 | 42 | @property 43 | def base_url(self) -> httpx.URL: 44 | """The base URL of the IPFS provider's API.""" 45 | return httpx.URL("https://api.nft.storage") 46 | 47 | @property 48 | def is_api_key_required(self) -> bool: 49 | """Whether the IPFS provider requires an API key.""" 50 | return True 51 | 52 | @property 53 | def api_key(self) -> str | None: 54 | """The API key.""" 55 | return self._api_key 56 | 57 | @property 58 | def headers(self) -> dict[str, str]: 59 | """The headers to use for the HTTP requests.""" 60 | return {"Authorization": f"Bearer {self.api_key}"} 61 | 62 | def store_json(self, json: str | bytes) -> str: 63 | """Stores JSON data in IPFS. 64 | 65 | Args: 66 | json (str | bytes): The JSON to store. 67 | 68 | Returns: 69 | str: The IPFS CID of the stored data. 70 | """ 71 | with httpx.Client() as client: 72 | response = client.post( 73 | url=self.base_url.join("upload"), 74 | content=json, 75 | headers=self.headers, 76 | timeout=10.0, 77 | ) 78 | data = response.json() 79 | if response.status_code == httpx.codes.OK: 80 | if ( 81 | data.get("ok") is True 82 | and (cid := data.get("value").get("cid")) is not None 83 | ): 84 | return str(cid) 85 | else: 86 | raise httpx.HTTPError( 87 | f"HTTP Exception for {response.request.url}: Failed to store JSON in IPFS using {self.ipfs_provider_name}." 88 | ) 89 | else: 90 | raise httpx.HTTPError( 91 | f"HTTP Exception for {response.request.url}: {response.status_code} {data.get('error').get('message')}" 92 | ) 93 | 94 | def fetch_pin_status(self, cid: str) -> IpfsPinStatusChoice: 95 | """Returns the pinning status of a file, by CID. 96 | 97 | Args: 98 | cid (str): The CID of the file to check. 99 | 100 | Returns: 101 | IpfsPinStatusChoice: The pin status of the CID. 102 | """ 103 | with httpx.Client() as client: 104 | response = client.get( 105 | url=self.base_url.join(f"check/{cid}"), 106 | headers=self.headers, 107 | timeout=10.0, 108 | ) 109 | data = response.json() 110 | if response.status_code == httpx.codes.OK: 111 | pin_status = data.get("value").get("pin").get("status") 112 | if ( 113 | data.get("ok") is True 114 | and pin_status is not None 115 | and hasattr(IpfsPinStatus, str(pin_status).upper()) 116 | ): 117 | return IpfsPinStatus(pin_status) 118 | else: 119 | raise httpx.HTTPError( 120 | f"HTTP Exception for {response.request.url}: {pin_status} is not a valid pin status." 121 | ) 122 | else: 123 | raise httpx.HTTPError( 124 | f"HTTP Exception for {response.request.url}: {response.status_code} {data.get('error').get('message')}" 125 | ) 126 | -------------------------------------------------------------------------------- /algobase/models/__init__.py: -------------------------------------------------------------------------------- 1 | """Pydantic models for Algorand assets and transactions.""" 2 | -------------------------------------------------------------------------------- /algobase/models/arc19.py: -------------------------------------------------------------------------------- 1 | """Pydantic models for Algorand ARC-19 metadata. 2 | 3 | Reference: https://github.com/algorandfoundation/ARCs/blob/main/ARCs/arc-0019.md 4 | """ 5 | 6 | from typing import Literal 7 | 8 | from pydantic import BaseModel, ConfigDict, Field, model_validator 9 | 10 | from algobase.choices import Arc 11 | from algobase.models.arc3 import Arc3Metadata 12 | 13 | 14 | class Arc19Metadata(BaseModel): 15 | """A Pydantic model for Algorand ARC-19 metadata.""" 16 | 17 | model_config = ConfigDict(frozen=True) 18 | 19 | arc: Literal[Arc.ARC19] = Field( 20 | default=Arc.ARC19, 21 | description="Name of the Algorand ARC standard that the NFT metadata adheres to.", 22 | exclude=True, 23 | ) 24 | 25 | arc3_metadata: Arc3Metadata | None = Field( 26 | default=None, 27 | description="Optional ARC-3 metadata model.", 28 | ) 29 | 30 | @model_validator(mode="after") 31 | def validate_arc3_compliance(self) -> "Arc19Metadata": 32 | """If the ARC-3 metadata is present, ensure it complies with ARC-19. 33 | 34 | Raises: 35 | ValueError: If the ARC-3 metadata is present and does not comply with ARC-19. 36 | 37 | Returns: 38 | Arc19Metadata: The model instance. 39 | """ 40 | if ( 41 | self.arc3_metadata is not None 42 | and self.arc3_metadata.extra_metadata is not None 43 | ): 44 | raise ValueError("Extra metadata is not supported for ARC-19.") 45 | return self 46 | -------------------------------------------------------------------------------- /algobase/models/arc3.py: -------------------------------------------------------------------------------- 1 | """Pydantic models for Algorand ARC-3 metadata. 2 | 3 | Reference: https://github.com/algorandfoundation/ARCs/blob/main/ARCs/arc-0003.md 4 | """ 5 | 6 | from typing import Literal 7 | 8 | from pydantic import BaseModel, ConfigDict, Field 9 | 10 | from algobase.choices import Arc 11 | from algobase.types.annotated import ( 12 | Arc3Color, 13 | Arc3LocalizedUrl, 14 | Arc3Sri, 15 | Arc3Url, 16 | Arc16Traits, 17 | AsaDecimals, 18 | Base64Str, 19 | ImageMimeType, 20 | MimeType, 21 | UnicodeLocale, 22 | ) 23 | 24 | 25 | class Arc3Localization(BaseModel): 26 | """A Pydantic model for Algorand ARC-3 localization.""" 27 | 28 | model_config = ConfigDict(frozen=True) 29 | 30 | uri: Arc3LocalizedUrl = Field( 31 | description="The URI pattern to fetch localized data from. This URI should contain the substring `{locale}` which will be replaced with the appropriate locale value before sending the request." 32 | ) 33 | default: UnicodeLocale = Field( 34 | description="The locale of the default data within the base JSON." 35 | ) 36 | locales: list[UnicodeLocale] = Field( 37 | description="The list of locales for which data is available. These locales should conform to those defined in the Unicode Common UnicodeLocale Data Repository (http://cldr.unicode.org/)." 38 | ) 39 | integrity: dict[UnicodeLocale, Arc3Sri] | None = Field( 40 | default=None, 41 | description="The SHA-256 digests of the localized JSON files (except the default one). The field name is the locale. The field value is a single SHA-256 integrity metadata as defined in the W3C subresource integrity specification (https://w3c.github.io/webappsec-subresource-integrity).", 42 | ) 43 | 44 | 45 | class Arc3Properties(BaseModel): 46 | """A Pydantic model for Algorand ARC-3 properties. 47 | 48 | If the `traits` property is present, it must comply with ARC-16: https://github.com/algorandfoundation/ARCs/blob/main/ARCs/arc-0016.md 49 | """ 50 | 51 | model_config = ConfigDict(frozen=True, extra="allow") 52 | 53 | # Struggling to get recursive type definition working here. 54 | # Have defined `Arc3NonTraitProperties` in algobase/types/annotated.py 55 | # but it doesn't work as an annotation for __pydantic_extra__. 56 | __pydantic_extra__: dict[str, str | int | float | dict | list] # type: ignore 57 | 58 | traits: Arc16Traits | None = Field( 59 | default=None, 60 | description="Traits (attributes) that can be used to calculate things like rarity. Values may be strings or numbers.", 61 | ) 62 | 63 | 64 | class Arc3Metadata(BaseModel): 65 | """A Pydantic model for Algorand ARC-3 metadata.""" 66 | 67 | model_config = ConfigDict(frozen=True) 68 | 69 | arc: Literal[Arc.ARC3] = Field( 70 | default=Arc.ARC3, 71 | description="Name of the Algorand ARC standard that the NFT metadata adheres to.", 72 | exclude=True, 73 | ) 74 | 75 | @property 76 | def json_str(self) -> str: 77 | """Returns the model JSON as a string.""" 78 | return self.model_dump_json(exclude_none=True, indent=4) 79 | 80 | @property 81 | def json_bytes(self, encoding: Literal["utf-8"] = "utf-8") -> bytes: 82 | """Returns the model JSON encoded as bytes. 83 | 84 | Currently only officially supports UTF-8 encoding. 85 | """ 86 | return self.json_str.encode(encoding) 87 | 88 | name: str | None = Field( 89 | default=None, description="Identifies the asset to which this token represents." 90 | ) 91 | decimals: AsaDecimals | None = Field( 92 | default=None, 93 | description="The number of decimal places that the token amount should display - e.g. 18, means to divide the token amount by 1000000000000000000 to get its user representation.", 94 | ) 95 | description: str | None = Field( 96 | default=None, description="Describes the asset to which this token represents." 97 | ) 98 | image: Arc3Url | None = Field( 99 | default=None, 100 | description="A URI pointing to a file with MIME type image/* representing the asset to which this token represents. Consider making any images at a width between 320 and 1080 pixels and aspect ratio between 1.91:1 and 4:5 inclusive.", 101 | ) 102 | image_integrity: Arc3Sri | None = Field( 103 | default=None, 104 | description="The SHA-256 digest of the file pointed by the URI image. The field value is a single SHA-256 integrity metadata as defined in the W3C subresource integrity specification (https://w3c.github.io/webappsec-subresource-integrity).", 105 | ) 106 | image_mimetype: ImageMimeType | None = Field( 107 | default=None, 108 | description="The MIME type of the file pointed by the URI image. MUST be of the form 'image/*'.", 109 | ) 110 | background_color: Arc3Color | None = Field( 111 | default=None, 112 | description="Background color do display the asset. MUST be a six-character hexadecimal without a pre-pended #.", 113 | ) 114 | external_url: Arc3Url | None = Field( 115 | default=None, 116 | description="A URI pointing to an external website presenting the asset.", 117 | ) 118 | external_url_integrity: Arc3Sri | None = Field( 119 | default=None, 120 | description="The SHA-256 digest of the file pointed by the URI external_url. The field value is a single SHA-256 integrity metadata as defined in the W3C subresource integrity specification (https://w3c.github.io/webappsec-subresource-integrity).", 121 | ) 122 | external_url_mimetype: Literal["text/html"] | None = Field( 123 | default=None, 124 | description="The MIME type of the file pointed by the URI external_url. It is expected to be 'text/html' in almost all cases.", 125 | ) 126 | animation_url: Arc3Url | None = Field( 127 | default=None, 128 | description="A URI pointing to a multi-media file representing the asset.", 129 | ) 130 | animation_url_integrity: Arc3Sri | None = Field( 131 | default=None, 132 | description="The SHA-256 digest of the file pointed by the URI external_url. The field value is a single SHA-256 integrity metadata as defined in the W3C subresource integrity specification (https://w3c.github.io/webappsec-subresource-integrity).", 133 | ) 134 | animation_url_mimetype: MimeType | None = Field( 135 | default=None, 136 | description="The MIME type of the file pointed by the URI animation_url. If the MIME type is not specified, clients MAY guess the MIME type from the file extension or MAY decide not to display the asset at all. It is STRONGLY RECOMMENDED to include the MIME type.", 137 | ) 138 | properties: Arc3Properties | None = Field( 139 | default=None, 140 | description="Arbitrary properties (also called attributes). Values may be strings, numbers, object or arrays.", 141 | ) 142 | extra_metadata: Base64Str | None = Field( 143 | default=None, 144 | description="Extra metadata in base64. If the field is specified (even if it is an empty string) the asset metadata (am) of the ASA is computed differently than if it is not specified.", 145 | ) 146 | localization: Arc3Localization | None = Field( 147 | default=None, 148 | description="A sub-object that may be used to provide localized values for fields that need it.", 149 | ) 150 | -------------------------------------------------------------------------------- /algobase/models/asa.py: -------------------------------------------------------------------------------- 1 | """Pydantic models for Algorand Standard Assets (ASAs).""" 2 | 3 | import math 4 | import warnings 5 | from base64 import b64decode 6 | from difflib import SequenceMatcher 7 | 8 | from pydantic import ( 9 | BaseModel, 10 | ConfigDict, 11 | Field, 12 | computed_field, 13 | model_validator, 14 | ) 15 | from pydantic_core import Url 16 | 17 | from algobase.choices import AsaType, AsaTypeChoice 18 | from algobase.models.arc3 import Arc3Metadata 19 | from algobase.models.arc19 import Arc19Metadata 20 | from algobase.models.asset_params import AssetParams 21 | from algobase.types.annotated import AlgorandHash, AsaAssetName 22 | from algobase.utils.hash import sha256, sha512_256 23 | from algobase.utils.validate import ( 24 | is_valid, 25 | validate_arc19_asset_url, 26 | validate_type_compatibility, 27 | ) 28 | 29 | 30 | class Asa(BaseModel): 31 | """A Pydantic model for Algorand Standard Assets (ASAs).""" 32 | 33 | model_config = ConfigDict(frozen=True) 34 | 35 | asa_type: AsaTypeChoice | None = Field( 36 | default=None, description="The type of the ASA." 37 | ) 38 | asset_params: AssetParams = Field(description="AssetParams Pydantic model.") 39 | metadata: Arc3Metadata | Arc19Metadata | None = Field( 40 | default=None, description="Metadata Pydantic model.", discriminator="arc" 41 | ) 42 | 43 | @property 44 | def derived_asa_type(self) -> AsaTypeChoice: 45 | """The derived type of the ASA.""" 46 | match self.asset_params: 47 | case asa if asa.total == 1 and asa.decimals == 0: 48 | return AsaType.NON_FUNGIBLE_PURE 49 | # Means the total supply is 1 50 | case asa if asa.decimals == math.log10(asa.total): 51 | return AsaType.NON_FUNGIBLE_FRACTIONAL 52 | case _: 53 | return AsaType.FUNGIBLE 54 | 55 | @property 56 | def derived_arc3_metadata(self) -> Arc3Metadata | None: 57 | """The derived ARC-3 metadata.""" 58 | match self.metadata: 59 | case Arc3Metadata(): 60 | return self.metadata 61 | case Arc19Metadata() if isinstance( 62 | self.metadata.arc3_metadata, Arc3Metadata 63 | ): 64 | return self.metadata.arc3_metadata 65 | case _: 66 | return None 67 | 68 | @computed_field # type: ignore[prop-decorator] 69 | @property 70 | def metadata_hash(self) -> AlgorandHash | None: 71 | """The hash of the JSON metadata.""" 72 | if (metadata := self.derived_arc3_metadata) is None: 73 | return None 74 | if metadata.extra_metadata is None: 75 | return sha256(metadata.json_bytes) 76 | else: 77 | # am = SHA-512/256("arc0003/am" || SHA-512/256("arc0003/amj" || content of JSON Metadata file) || e) 78 | base_hash = sha512_256(b"arc0003/amj" + metadata.json_bytes) 79 | extra_metadata_bytes = b64decode(metadata.extra_metadata) 80 | return sha512_256(b"arc0003/am" + base_hash + extra_metadata_bytes) 81 | 82 | @model_validator(mode="after") 83 | def check_asa_type_constraints(self) -> "Asa": 84 | """Validate the ASA type against the relevant constraints.""" 85 | if self.asa_type is not None and self.asa_type != self.derived_asa_type: 86 | match self.asa_type: 87 | case AsaType.NON_FUNGIBLE_PURE: 88 | raise ValueError( 89 | "Total number of units must be 1 and number of digits after the decimal point must be 0 for a pure NFT." 90 | ) 91 | case AsaType.NON_FUNGIBLE_FRACTIONAL: 92 | raise ValueError( 93 | "Number of digits after the decimal point must be equal to the logarithm in base 10 of total number of units. In other words, the total supply of the ASA must be exactly 1." 94 | ) 95 | case AsaType.FUNGIBLE: 96 | raise ValueError( 97 | "Total supply of the ASA must be greater than 1, for a fungible asset." 98 | ) 99 | return self 100 | 101 | @model_validator(mode="after") 102 | def check_arc_constraints(self) -> "Asa": 103 | """Validate fields against ARC constraints, if applicable.""" 104 | if self.derived_arc3_metadata is not None: 105 | self.check_arc3_metadata_constraints(self.derived_arc3_metadata) 106 | match self.metadata: 107 | case Arc3Metadata(): 108 | self.check_arc3_asset_url() 109 | case Arc19Metadata(): 110 | if self.asset_params.url is None: 111 | raise ValueError("Asset URL must not be `None`.") 112 | validate_arc19_asset_url(self.asset_params.url) 113 | self.check_arc19_reserve() 114 | 115 | return self 116 | 117 | def check_arc3_decimals(self, metadata: Arc3Metadata) -> "Asa": 118 | """Raise an error if the decimals in the asset parameters doesn't match the deimals in the metadata.""" 119 | if ( 120 | metadata.decimals is not None 121 | and metadata.decimals != self.asset_params.decimals 122 | ): 123 | raise ValueError( 124 | f"Decimals in the asset parameters ({self.asset_params.decimals}) must match the decimals in the metadata ({metadata.decimals})." 125 | ) 126 | return self 127 | 128 | def check_arc3_unit_name(self, metadata: Arc3Metadata) -> "Asa": 129 | """Raise a warning if the metadata 'name' property is not related to the asset unit name. 130 | 131 | Uses the difflib `SequenceMatcher` for string similarity. 132 | """ 133 | if ( 134 | self.asset_params.unit_name is not None 135 | and metadata is not None 136 | and metadata.name is not None 137 | and SequenceMatcher( 138 | None, self.asset_params.unit_name.lower(), metadata.name.lower() 139 | ).ratio() 140 | < 0.5 141 | ): 142 | warnings.warn( 143 | UserWarning( 144 | "Asset unit name should be related to the name in the ARC-3 JSON metadata." 145 | ) 146 | ) 147 | return self 148 | 149 | def check_arc3_asset_url(self) -> "Asa": 150 | """Checks that the asset URL is valid for ARC-3 ASAs.""" 151 | if self.asset_params.url is None: 152 | raise ValueError("Asset URL must not be `None`.") 153 | if not self.asset_params.url.endswith("#arc3"): 154 | raise ValueError( 155 | f"Asset URL must end with '#arc3' if asset name is '{self.asset_params.asset_name}'." 156 | ) 157 | validate_type_compatibility(self.asset_params.url, Url) 158 | return self 159 | 160 | def check_arc3_metadata_constraints(self, metadata: Arc3Metadata) -> "Asa": 161 | """Validate fields against ARC constraints, if applicable. 162 | 163 | Raises warnings for values/formats that are allowed but not recommended in ARC specs. 164 | Raises errors for values/formats that are not allowed in ARC specs. 165 | """ 166 | # Currently only ARC3 is supported 167 | if isinstance(metadata, Arc3Metadata): 168 | self.check_arc3_unit_name(metadata) 169 | self.check_arc3_decimals(metadata) 170 | 171 | # Asset name constraints 172 | match self.asset_params.asset_name: 173 | case None: 174 | raise ValueError("Asset name must not be `None` for ARC-3 ASAs.") 175 | case "arc3": 176 | warnings.warn( 177 | UserWarning( 178 | "Asset name 'arc3' is not recommended for ARC-3 ASAs." 179 | ) 180 | ) 181 | case x if x.endswith("@arc3"): 182 | warnings.warn( 183 | UserWarning( 184 | "Asset name format @arc3 is not recommended for ARC-3 ASAs." 185 | ) 186 | ) 187 | # Constraints on combination of asset name and metadata name 188 | case _: 189 | match metadata.name: 190 | case None: 191 | raise ValueError( 192 | f"Metadata name must not be `None` if asset name is '{self.asset_params.asset_name}'." 193 | ) 194 | case x if x != self.asset_params.asset_name: 195 | if is_valid( 196 | validate_type_compatibility, 197 | metadata.name, 198 | AsaAssetName, 199 | ): 200 | raise ValueError( 201 | f"Asset name '{self.asset_params.asset_name}' must match the metadata name '{x}'." 202 | ) 203 | elif not metadata.name.startswith( 204 | self.asset_params.asset_name 205 | ): 206 | raise ValueError( 207 | f"Asset name must be a shortened version of the metadata name '{metadata.name}'." 208 | ) 209 | return self 210 | 211 | def check_arc19_reserve(self) -> "Asa": 212 | """Checks that the CID encoded in the reserve address field is valid for ARC-19 ASAs.""" 213 | if self.asset_params.reserve is None: 214 | raise ValueError("Reserve address must not be `None`.") 215 | return self 216 | -------------------------------------------------------------------------------- /algobase/models/asset_params.py: -------------------------------------------------------------------------------- 1 | """A Pydantic model for Algorand Standard Asset parameters.""" 2 | 3 | from base64 import b64decode 4 | from typing import Self 5 | 6 | from algosdk.v2client.algod import AlgodClient 7 | from pydantic import ( 8 | BaseModel, 9 | ConfigDict, 10 | Field, 11 | ) 12 | 13 | from algobase.models.algod import Asset 14 | from algobase.types.annotated import ( 15 | AlgorandAddress, 16 | AlgorandHash, 17 | AsaAssetName, 18 | AsaDecimals, 19 | AsaUnitName, 20 | AsaUrl, 21 | Uint64, 22 | ) 23 | 24 | 25 | class AssetParams(BaseModel): 26 | """A Pydantic model for Algorand Standard Assets (ASAs).""" 27 | 28 | model_config = ConfigDict(frozen=True) 29 | 30 | total: Uint64 = Field( 31 | description="The total number of base units of the asset to create." 32 | ) 33 | decimals: AsaDecimals = Field( 34 | default=0, 35 | description="The number of digits to use after the decimal point when displaying the asset. If 0, the asset is not divisible. If 1, the base unit of the asset is in tenths. Must be between 0 and 19, inclusive.", 36 | ) 37 | default_frozen: bool = Field( 38 | default=False, 39 | description="Whether slots for this asset in user accounts are frozen by default.", 40 | ) 41 | unit_name: AsaUnitName | None = Field( 42 | default=None, 43 | description="The name of a unit of this asset. Max size is 8 bytes. Example: 'USDT'.", 44 | ) 45 | asset_name: AsaAssetName | None = Field( 46 | default=None, 47 | description="The name of the asset. Max size is 32 bytes. Example: 'Tether'.", 48 | ) 49 | url: AsaUrl | None = Field( 50 | default=None, 51 | description="Specifies a URL where more information about the asset can be retrieved. Max size is 96 bytes.", 52 | ) 53 | metadata_hash: AlgorandHash | None = Field( 54 | default=None, 55 | description="This field is intended to be a 32-byte hash of some metadata that is relevant to your asset and/or asset holders.", 56 | ) 57 | manager: AlgorandAddress | None = Field( 58 | default=None, 59 | description="The address of the account that can manage the configuration of the asset and destroy it.", 60 | ) 61 | reserve: AlgorandAddress | None = Field( 62 | default=None, 63 | description="The address of the account that holds the reserve (non-minted) units of the asset.", 64 | ) 65 | freeze: AlgorandAddress | None = Field( 66 | default=None, 67 | description="The address of the account used to freeze holdings of this asset. If empty, freezing is not permitted.", 68 | ) 69 | clawback: AlgorandAddress | None = Field( 70 | default=None, 71 | description="The address of the account that can clawback holdings of this asset. If empty, clawback is not permitted.", 72 | ) 73 | 74 | @classmethod 75 | def from_algod(cls, algod_client: AlgodClient, asset_id: Uint64) -> Self: 76 | """Constructs an instance by fetching asset params from Algod. 77 | 78 | Args: 79 | algod_client (AlgodClient): The Algod client. 80 | asset_id (Uint64): The asset ID to search for. 81 | 82 | Returns: 83 | Self: The `AssetParams` instance. 84 | """ 85 | if asset_id: 86 | response = algod_client.asset_info(asset_id) 87 | data = response.get("asset", response) # type: ignore[union-attr] 88 | data["params"]["metadata-hash"] = ( 89 | b64decode(data["params"]["metadata-hash"]) 90 | if "metadata-hash" in data["params"] 91 | else None 92 | ) 93 | asset = Asset.model_validate(data) 94 | return cls.model_validate(asset.params.model_dump()) 95 | return cls( 96 | total=10_000_000_000, 97 | decimals=6, 98 | default_frozen=False, 99 | unit_name="ALGO", 100 | asset_name="ALGO", 101 | url="https://www.algorand.foundation", 102 | metadata_hash=None, 103 | manager=None, 104 | reserve=None, 105 | freeze=None, 106 | clawback=None, 107 | ) 108 | -------------------------------------------------------------------------------- /algobase/models/dispenser.py: -------------------------------------------------------------------------------- 1 | """Pydantic models for the Algorand TestNet dispenser API.""" 2 | 3 | from pydantic import BaseModel, ConfigDict, Field 4 | 5 | 6 | class DispenserFundResponse(BaseModel): 7 | """TestNet dispenser API 'fund' response.""" 8 | 9 | model_config = ConfigDict(frozen=True) 10 | 11 | tx_id: str = Field(alias="txID") 12 | amount: int 13 | -------------------------------------------------------------------------------- /algobase/models/kmd.py: -------------------------------------------------------------------------------- 1 | """Pydantic models for the KMD API (v1). 2 | 3 | Mostly auto-generated using datamodel-codegen. 4 | Spec: https://github.com/algorand/go-algorand/blob/master/daemon/kmd/api/swagger.json 5 | """ 6 | 7 | from typing import TypeAlias 8 | 9 | from pydantic import BaseModel 10 | 11 | TxType: TypeAlias = str 12 | 13 | 14 | class APIV1Wallet(BaseModel): 15 | """A KMD wallet.""" 16 | 17 | driver_name: str | None 18 | driver_version: int | None 19 | id: str | None 20 | mnemonic_ux: bool | None 21 | name: str | None 22 | supported_txs: list[TxType] | None 23 | 24 | 25 | class APIV1GETWalletsResponse(BaseModel): 26 | """The response from the `GET /v1/wallets` endpoint.""" 27 | 28 | error: bool | None 29 | message: str | None 30 | wallets: list[APIV1Wallet] | None 31 | -------------------------------------------------------------------------------- /algobase/settings.py: -------------------------------------------------------------------------------- 1 | """Configuration settings for the algobase.""" 2 | 3 | 4 | from collections.abc import Callable 5 | from typing import Self, TypeVar 6 | 7 | from pydantic import Field 8 | from pydantic_settings import BaseSettings, SettingsConfigDict 9 | 10 | from algobase.choices import ( 11 | AlgorandApiProvider, 12 | AlgorandApiProviderChoice, 13 | AlgorandNetwork, 14 | AlgorandNetworkChoice, 15 | ) 16 | 17 | T = TypeVar("T") 18 | 19 | 20 | class Settings(BaseSettings): 21 | """Pydantic model for algobase settings.""" 22 | 23 | model_config = SettingsConfigDict( 24 | env_prefix="AB_", env_file=".env", env_file_encoding="utf-8" 25 | ) 26 | 27 | algorand_network: AlgorandNetworkChoice = Field( 28 | description="The name of the Algorand network.", 29 | default=AlgorandNetwork.LOCALNET, 30 | ) 31 | algorand_provider: AlgorandApiProviderChoice = Field( 32 | description="The Algorand API provider.", default=AlgorandApiProvider.LOCALHOST 33 | ) 34 | algod_token: str = Field(description="The Algod API token.", default="a" * 64) 35 | nft_storage_api_key: str | None = Field( 36 | description="API key for nft.storage.", default=None 37 | ) 38 | testnet_dispenser_access_token: str | None = Field( 39 | description="Access token for the Algorand TestNet dispenser.", default=None 40 | ) 41 | 42 | def __or__(self, f: Callable[[Self], T]) -> T: 43 | """Operator overloading to pipe settings into a function or other callable. 44 | 45 | Args: 46 | f (Callable[[Self], T]): The function that takes `settings` as an argument. 47 | 48 | Returns: 49 | T: The type returned by the function. 50 | """ 51 | return f(self) 52 | -------------------------------------------------------------------------------- /algobase/types/__init__.py: -------------------------------------------------------------------------------- 1 | """Types for algobase.""" 2 | -------------------------------------------------------------------------------- /algobase/types/annotated.py: -------------------------------------------------------------------------------- 1 | """Annotated types for Pydantic models.""" 2 | 3 | from functools import partial 4 | from typing import Annotated, Union 5 | 6 | from algosdk.constants import HASH_LEN, MAX_ASSET_DECIMALS 7 | from annotated_types import Ge, Gt, Le, Len, Lt 8 | from cytoolz import compose 9 | from pydantic import AfterValidator, UrlConstraints 10 | from pydantic_core import Url 11 | from typing_extensions import TypeAliasType 12 | 13 | from algobase.utils.url import decode_url_braces 14 | from algobase.utils.validate import ( 15 | validate_address, 16 | validate_arc3_sri, 17 | validate_base64, 18 | validate_contains_substring, 19 | validate_encoded_length, 20 | validate_hex, 21 | validate_is_power_of_10, 22 | validate_locale, 23 | validate_mime_type, 24 | validate_not_in, 25 | validate_not_ipfs_gateway, 26 | validate_type_compatibility, 27 | ) 28 | 29 | # Generic types 30 | Uint32 = Annotated[int, Ge(0), Lt(2**32)] 31 | Uint64 = Annotated[int, Ge(0), Lt(2**64)] 32 | Base64Str = Annotated[str, AfterValidator(validate_base64)] 33 | 34 | 35 | # World Wide Web Consortium (W3C) types 36 | MimeType = Annotated[ 37 | str, AfterValidator(partial(validate_mime_type, primary_type=None)) 38 | ] 39 | ImageMimeType = Annotated[ 40 | str, AfterValidator(partial(validate_mime_type, primary_type="image")) 41 | ] 42 | 43 | # Unicode Common Locale Data Repository (CLDR) types 44 | UnicodeLocale = Annotated[str, AfterValidator(validate_locale)] 45 | 46 | # Algorand types 47 | AlgorandHash = Annotated[bytes, Len(HASH_LEN, HASH_LEN)] # 32 bytes 48 | AlgorandAddress = Annotated[str, AfterValidator(validate_address)] 49 | 50 | # Algorand Standard Asset (ASA) types 51 | AsaDecimals = Annotated[Uint32, Ge(0), Le(MAX_ASSET_DECIMALS)] # <= 19 52 | AsaUnitName = Annotated[ 53 | str, AfterValidator(partial(validate_encoded_length, max_length=8)) 54 | ] 55 | AsaAssetName = Annotated[ 56 | str, AfterValidator(partial(validate_encoded_length, max_length=32)) 57 | ] 58 | AsaUrl = Annotated[ 59 | str, 60 | AfterValidator( 61 | compose( 62 | partial(validate_encoded_length, max_length=96), 63 | decode_url_braces, 64 | ) 65 | ), 66 | ] 67 | AsaFractionalNftTotal = Annotated[ 68 | Uint64, Gt(1), AfterValidator(validate_is_power_of_10) 69 | ] 70 | 71 | # Algorand ARC-16 types 72 | Arc16Traits = dict[str, str | int] 73 | 74 | # Algorand ARC-3 types 75 | Arc3Url = Annotated[ 76 | str, 77 | AfterValidator( 78 | compose( 79 | partial(validate_encoded_length, max_length=96), 80 | decode_url_braces, 81 | validate_not_ipfs_gateway, 82 | partial( 83 | validate_type_compatibility, 84 | _type=Annotated[Url, UrlConstraints(allowed_schemes=["https", "ipfs"])], 85 | ), 86 | ) 87 | ), 88 | ] 89 | Arc3LocalizedUrl = Annotated[ 90 | str, 91 | AfterValidator( 92 | compose( 93 | partial(validate_encoded_length, max_length=96), 94 | partial(validate_contains_substring, substring="{locale}"), 95 | decode_url_braces, 96 | validate_not_ipfs_gateway, 97 | partial( 98 | validate_type_compatibility, 99 | _type=Annotated[Url, UrlConstraints(allowed_schemes=["https", "ipfs"])], 100 | ), 101 | ) 102 | ), 103 | ] 104 | Arc3Sri = Annotated[str, AfterValidator(validate_arc3_sri)] 105 | Arc3Color = Annotated[str, Len(6, 6), AfterValidator(validate_hex)] 106 | Arc3NonTraitProperties = TypeAliasType( # type: ignore 107 | "Arc3NonTraitProperties", 108 | Annotated[ 109 | dict[ 110 | str, 111 | Union[ 112 | str, 113 | int, 114 | float, 115 | list[Union[str, int, float, "Arc3NonTraitProperties"]], # type: ignore 116 | "Arc3NonTraitProperties", # type: ignore 117 | ], 118 | ], 119 | AfterValidator(partial(validate_not_in, element="traits")), 120 | ], 121 | ) 122 | -------------------------------------------------------------------------------- /algobase/utils/__init__.py: -------------------------------------------------------------------------------- 1 | """Utility functions for algobase.""" 2 | -------------------------------------------------------------------------------- /algobase/utils/cid.py: -------------------------------------------------------------------------------- 1 | """Utility functions for working with IPFS content identifiers (CIDs).""" 2 | 3 | import multihash 4 | from algosdk import encoding 5 | from multiformats_cid import make_cid # type: ignore[attr-defined] 6 | from pydantic import TypeAdapter 7 | from returns.pipeline import flow 8 | 9 | from algobase.types.annotated import AlgorandAddress 10 | 11 | 12 | def cid_to_algorand_address(cid: str) -> AlgorandAddress: 13 | """Converts a CID to an Algorand address. 14 | 15 | This is used in ARC-19: https://github.com/algorandfoundation/ARCs/blob/main/ARCs/arc-0019.md 16 | 17 | Args: 18 | cid (str): The CID to convert. 19 | 20 | Returns: 21 | AlgorandAddress: The Algorand address. 22 | """ 23 | return flow( 24 | make_cid(cid).multihash, 25 | lambda h: multihash.decode(h).digest, 26 | encoding.encode_address, 27 | TypeAdapter(AlgorandAddress).validate_python, 28 | ) 29 | -------------------------------------------------------------------------------- /algobase/utils/hash.py: -------------------------------------------------------------------------------- 1 | """Utility functions for hashing data.""" 2 | 3 | import hashlib 4 | 5 | 6 | def sha256(data: bytes) -> bytes: 7 | """Returns a SHA-256 hash digest of the input data. 8 | 9 | Args: 10 | data (bytes): The data to hash. 11 | 12 | Returns: 13 | bytes: The hash digest. 14 | """ 15 | return hashlib.sha256(data).digest() 16 | 17 | 18 | def sha512_256(data: bytes) -> bytes: 19 | """Returns a SHA-512/256 hash digest of the input data. 20 | 21 | Args: 22 | data (bytes): The data to hash. 23 | 24 | Returns: 25 | bytes: The hash digest. 26 | """ 27 | return hashlib.new("sha512_256", data).digest() 28 | -------------------------------------------------------------------------------- /algobase/utils/read.py: -------------------------------------------------------------------------------- 1 | """Functions for reading and caching reference data files.""" 2 | 3 | import mimetypes 4 | import tomllib 5 | 6 | 7 | def read_ipfs_gateways() -> list[str]: 8 | """Read IPFS gateways from the reference data file. 9 | 10 | Returns: 11 | list[str]: The list of IPFS gateways. 12 | """ 13 | with open("algobase/data/ipfs.toml", "rb") as f: 14 | data = tomllib.load(f) 15 | return list(data["ipfs_gateways"]) 16 | 17 | 18 | def read_mime_types() -> list[str]: 19 | """Read MIME types from the reference data file. 20 | 21 | Returns: 22 | list[str]: The list of MIME types. 23 | """ 24 | mimetypes.init() 25 | return list(mimetypes.types_map.values()) 26 | -------------------------------------------------------------------------------- /algobase/utils/url.py: -------------------------------------------------------------------------------- 1 | """Functions for working with URLs.""" 2 | 3 | from urllib.parse import quote, urlparse 4 | 5 | 6 | def decode_url_braces(url: str) -> str: 7 | """Decodes curly braces in a URL string. 8 | 9 | This allows for arbitrary parameters to be passed in URL strings, as specified in some Algorand standards. 10 | For example, ARC-3 asset URLs may contain the string '{id}', which clients must replace with the asset ID in decimal form. 11 | 12 | Args: 13 | url (str): The URL string to decode. 14 | 15 | Returns: 16 | str: The decoded URL string. 17 | """ 18 | parsed_url = urlparse(url) 19 | decoded_path = parsed_url.path.replace(quote("{"), "{").replace(quote("}"), "}") 20 | decoded_url = parsed_url._replace(path=decoded_path).geturl() 21 | return decoded_url 22 | -------------------------------------------------------------------------------- /algobase/utils/validate.py: -------------------------------------------------------------------------------- 1 | """Functions for data validation.""" 2 | 3 | import base64 4 | import binascii 5 | import hashlib 6 | import math 7 | import string 8 | from collections.abc import Callable, Iterable 9 | from functools import cache 10 | from typing import Any, overload 11 | 12 | from algosdk.encoding import is_valid_address 13 | from babel import Locale, UnknownLocaleError 14 | from pydantic import TypeAdapter 15 | from pydantic_core import Url 16 | 17 | from algobase.utils.read import read_ipfs_gateways, read_mime_types 18 | 19 | 20 | def is_valid(func: Callable[..., Any], *args: Any, **kwargs: Any) -> bool: 21 | """Checks if a function call is valid. 22 | 23 | The other functions in this module raise errors when the input is not valid. 24 | This is a convenience function to check if a function call is valid without raising an error. 25 | 26 | Args: 27 | func (Callable[..., Any]): The function to call. 28 | *args: Variable length argument list. 29 | **kwargs: Arbitrary keyword arguments. 30 | 31 | Returns: 32 | bool: True if the function call doesn't raise a ValueError, else False. 33 | """ 34 | try: 35 | func(*args, **kwargs) 36 | return True 37 | except ValueError: 38 | return False 39 | 40 | 41 | def validate_address(value: str) -> str: 42 | """Checks that the value is a valid Algorand address. 43 | 44 | Args: 45 | value (str): The value to check. 46 | 47 | Raises: 48 | ValueError: If the value is not a valid Algorand address. 49 | 50 | Returns: 51 | str: The value passed in. 52 | """ 53 | if not is_valid_address(value): 54 | raise ValueError(f"'{value}' is not a valid Algorand address.") 55 | return value 56 | 57 | 58 | @overload # pragma: no cover 59 | def validate_encoded_length(value: str, max_length: int) -> str: 60 | ... 61 | 62 | 63 | @overload # pragma: no cover 64 | def validate_encoded_length(value: Url, max_length: int) -> Url: 65 | ... 66 | 67 | 68 | def validate_encoded_length(value: str | Url, max_length: int) -> str | Url: 69 | """Checks that the value is not longer than `max_length` when encoded in UTF-8. 70 | 71 | Args: 72 | value (str | Url): The value to check. 73 | max_length (int): The maximum length of the value when encoded in UTF-8. 74 | 75 | Raises: 76 | ValueError: If the value is longer than `max_length` when encoded in UTF-8. 77 | 78 | Returns: 79 | str | Url: The value passed in. 80 | """ 81 | url = value if isinstance(value, str) else value.unicode_string() 82 | if len(url.encode("utf-8")) > max_length: 83 | raise ValueError(f"'{value}' is > {max_length} bytes when encoded in UTF-8.") 84 | return value 85 | 86 | 87 | @cache 88 | def validate_not_ipfs_gateway(url: str) -> str: 89 | """Checks that the URL host is not a known public IPFS gateway. 90 | 91 | Args: 92 | url (str): The URL to check. 93 | 94 | Raises: 95 | ValueError: If the URL host is a known public IPFS gateway. 96 | 97 | Returns: 98 | str: The URL passed in. 99 | """ 100 | gateways = read_ipfs_gateways() 101 | if any(Url(gateway).host == Url(url).host for gateway in gateways): 102 | raise ValueError(f"'{Url(url).host}' is an IPFS gateway.") 103 | return url 104 | 105 | 106 | def validate_base64(value: str) -> str: 107 | """Checks that the value is a valid base64 string. 108 | 109 | Args: 110 | value (str): The value to check. 111 | 112 | Raises: 113 | ValueError: If the value is not a valid base64 string. 114 | 115 | Returns: 116 | str: The value passed in. 117 | """ 118 | try: 119 | base64.b64decode(value, validate=True) 120 | except binascii.Error: 121 | raise ValueError(f"'{value}' is not valid base64.") 122 | return value 123 | 124 | 125 | def validate_sri(value: str) -> str: 126 | """Checks that the value is a valid W3C Subresource Integrity (SRI) value. 127 | 128 | Args: 129 | value (str): The value to check. 130 | 131 | Raises: 132 | ValueError: If the value is not a valid SRI. 133 | 134 | Returns: 135 | str: The value passed in. 136 | """ 137 | supported_algorithms = {"sha256", "sha384", "sha512"} 138 | hash_algorithm = next( 139 | (x for x in supported_algorithms if value.startswith(f"{x}-")), None 140 | ) 141 | if hash_algorithm is None: 142 | raise ValueError( 143 | f"'{value}' is not a valid SRI. String must start with 'sha256-', 'sha384-', or 'sha512-'." 144 | ) 145 | hasher = hashlib.new(hash_algorithm) 146 | hash_digest = value.removeprefix(f"{hash_algorithm}-") 147 | try: 148 | validate_base64(hash_digest) 149 | except ValueError as e: 150 | raise ValueError(f"'{value}' is not a valid SRI. Hash digest {e}") 151 | if len(base64.b64decode(hash_digest)) != hasher.digest_size: 152 | raise ValueError( 153 | f"'{value}' is not a valid SRI. Expected {hasher.digest_size} byte hash digest, got {len(hash_digest)} bytes." 154 | ) 155 | return value 156 | 157 | 158 | def validate_arc3_sri(value: str) -> str: 159 | """Checks that the value is a valid SHA-256 Subresource Integrity (SRI) value. 160 | 161 | Args: 162 | value (str): The value to check. 163 | 164 | Raises: 165 | ValueError: If the value is not a valid SRI. 166 | 167 | Returns: 168 | str: The value passed in. 169 | """ 170 | if not value.startswith("sha256-"): 171 | raise ValueError( 172 | f"'{value}' is not a valid ARC-3 SRI. String must start with 'sha256-'." 173 | ) 174 | return validate_sri(value) 175 | 176 | 177 | @cache 178 | def validate_mime_type(value: str, primary_type: str | None = None) -> str: 179 | """Checks that the value is a valid MIME type. 180 | 181 | If `primary_type` is not `None`, then the value must be a valid MIME type with the specified primary type. 182 | E.g. if `primary_type` is 'image', then the value must be a valid MIME type starting with 'image/'. 183 | 184 | Args: 185 | value (str): The value to check. 186 | primary_type (str | None, optional): The primary type of the MIME type. Defaults to None. 187 | 188 | Raises: 189 | ValueError: If the value is not a valid MIME type. 190 | 191 | Returns: 192 | str: The value passed in. 193 | """ 194 | if value not in read_mime_types(): 195 | raise ValueError(f"'{value}' is not a valid MIME type.") 196 | if primary_type is not None and not value.startswith(f"{primary_type}/"): 197 | raise ValueError(f"'{value}' is not a valid {primary_type} MIME type.") 198 | return value 199 | 200 | 201 | def validate_hex(value: str) -> str: 202 | """Checks that the value is a valid hexadecimal string. 203 | 204 | Args: 205 | value (str): The value to check. 206 | 207 | Raises: 208 | ValueError: If the value is not a valid hexadecimal string. 209 | 210 | Returns: 211 | str: The value passed in. 212 | """ 213 | if not all(x in string.hexdigits for x in value): 214 | raise ValueError(f"'{value}' is not a valid hex string.") 215 | return value 216 | 217 | 218 | def validate_locale(value: str) -> str: 219 | """Checks that the value is a valid Unicode CLDR locale. 220 | 221 | Args: 222 | value (str): The value to check. 223 | 224 | Raises: 225 | ValueError: If the value is not a valid locale identifier. 226 | UnknownLocaleError: If the value is not a valid Unicode CLDR locale. 227 | 228 | Returns: 229 | str: The value passed in. 230 | """ 231 | try: 232 | Locale.parse(value) 233 | except ValueError as e: 234 | raise ValueError(f"'{value}' is not a valid locale identifier: {e}") 235 | except UnknownLocaleError: 236 | raise ValueError(f"'{value}' is not a valid Unicode CLDR locale.") 237 | return value 238 | 239 | 240 | def validate_contains_substring(value: str | Url, substring: str) -> str | Url: 241 | """Checks that the value contains the substring. 242 | 243 | Args: 244 | value (str | Url): The value to check. 245 | substring (str): The substring to check for. 246 | 247 | Raises: 248 | ValueError: If the value does not contain the substring. 249 | 250 | Returns: 251 | str | Url: The value passed in. 252 | """ 253 | value_string = value if isinstance(value, str) else value.unicode_string() 254 | if substring not in value_string: 255 | raise ValueError(f"'{value_string}' does not contain subtring '{substring}'.") 256 | return value 257 | 258 | 259 | def validate_not_in(iterable: Iterable[str], element: str) -> Iterable[str]: 260 | """Checks that the element is not in the iterable. 261 | 262 | Args: 263 | element (str): The element to check for. 264 | iterable (Iterable): The iterable to check. 265 | 266 | Raises: 267 | ValueError: If the element is in the iterable. 268 | 269 | Returns: 270 | Iterable: The iterable passed in. 271 | """ 272 | if element in iterable: 273 | raise ValueError(f"'{element}' is in {iterable}.") 274 | return iterable 275 | 276 | 277 | def validate_is_power_of_10(n: int) -> int: 278 | """Checks that the value is a power of 10. 279 | 280 | Args: 281 | n (int): The value to check. 282 | 283 | Raises: 284 | ValueError: If the value is not a power of 10. 285 | 286 | Returns: 287 | int: The value passed in. 288 | """ 289 | if not (n > 0 and math.log10(n).is_integer()): 290 | raise ValueError(f"{n} is not a power of 10.") 291 | return n 292 | 293 | 294 | def validate_type_compatibility(value: str, _type: type) -> str: 295 | """Checks that the value is compatible with the annotated type. 296 | 297 | Args: 298 | value (str): The value to check. 299 | _type (Type): The type to validate against. 300 | 301 | Raises: 302 | ValidationError: If the value is not compatible with the type. 303 | 304 | Returns: 305 | str: The value passed in. 306 | """ 307 | TypeAdapter(_type).validate_python(value) 308 | return value 309 | 310 | 311 | def validate_arc19_asset_url(value: str) -> str: 312 | """Checks that the value is a valid URL for Algorand ARC-19. 313 | 314 | Args: 315 | value (str): The value to check. 316 | 317 | Raises: 318 | ValueError: If the value is not a valid URL for Algorand ARC-19. 319 | 320 | Returns: 321 | str: The value passed in. 322 | """ 323 | if not value.startswith("template-ipfs://"): 324 | raise ValueError("ARC-19 asset URL must start with 'template-ipfs://'") 325 | 326 | # Extract the template substring from the URL, e.g. {ipfscid:0:dag-pb:reserve:sha2-256} 327 | template = value[value.find("{") + 1 : value.find("}")] 328 | match template.split(":"): 329 | case ["ipfscid", "0", "dag-pb", "reserve", "sha2-256"]: 330 | ... 331 | case ["ipfscid", "1", "raw" | "dag-pb", "reserve", "sha2-256"]: 332 | ... 333 | case _: 334 | raise ValueError("Asset URL template must follow ARC-19 specification") 335 | return value 336 | -------------------------------------------------------------------------------- /assets/images/coverage.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | coverage 17 | coverage 18 | 100% 19 | 100% 20 | 21 | 22 | -------------------------------------------------------------------------------- /cookiecutter-config-file.yml: -------------------------------------------------------------------------------- 1 | # This file contains values from Cookiecutter 2 | 3 | default_context: 4 | project_name: "algobase" 5 | project_description: "A type-safe Python library for interacting with assets on Algorand." 6 | organization: "algobase" 7 | license: "Apache Software License 2.0" 8 | minimal_python_version: 3.11 9 | github_name: "code-alexander" 10 | email: "alexandercodes@proton.me" 11 | version: "0.1.0" 12 | line_length: "88" 13 | create_example_template: "cli" 14 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11-slim-buster 2 | 3 | ENV LANG=C.UTF-8 \ 4 | LC_ALL=C.UTF-8 \ 5 | PATH="${PATH}:/root/.poetry/bin" 6 | 7 | RUN apt-get update && \ 8 | apt-get install -y --no-install-recommends \ 9 | curl \ 10 | && rm -rf /var/lib/apt/lists/* 11 | 12 | COPY pyproject.toml ./ 13 | 14 | # Install Poetry 15 | RUN curl -sSL https://install.python-poetry.org | POETRY_HOME=/opt/poetry python && \ 16 | cd /usr/local/bin && \ 17 | ln -s /opt/poetry/bin/poetry && \ 18 | poetry config virtualenvs.create false 19 | 20 | # Allow installing dev dependencies to run tests 21 | ARG INSTALL_DEV=false 22 | RUN bash -c "if [ $INSTALL_DEV == 'true' ] ; then poetry install --no-root ; else poetry install --no-root --no-dev ; fi" 23 | 24 | CMD mkdir -p /workspace 25 | WORKDIR /workspace 26 | -------------------------------------------------------------------------------- /docker/README.md: -------------------------------------------------------------------------------- 1 | # Docker for algobase 2 | 3 | ## Installation 4 | 5 | To create Docker you need to run: 6 | 7 | ```bash 8 | make docker-build 9 | ``` 10 | 11 | which is equivalent to: 12 | 13 | ```bash 14 | make docker-build VERSION=latest 15 | ``` 16 | 17 | You may provide name and version for the image. 18 | Default name is `IMAGE := algobase`. 19 | Default version is `VERSION := latest`. 20 | 21 | ```bash 22 | make docker-build IMAGE=some_name VERSION=0.1.0 23 | ``` 24 | 25 | ## Usage 26 | 27 | ```bash 28 | docker run -it --rm \ 29 | -v $(pwd):/workspace \ 30 | algobase bash 31 | ``` 32 | 33 | ## How to clean up 34 | 35 | To uninstall docker image run `make docker-remove` with `VERSION`: 36 | 37 | ```bash 38 | make docker-remove VERSION=0.1.0 39 | ``` 40 | 41 | you may also choose the image name 42 | 43 | ```bash 44 | make docker-remove IMAGE=some_name VERSION=latest 45 | ``` 46 | 47 | If you want to clean all, including `build` and `pycache` run `make cleanup` 48 | -------------------------------------------------------------------------------- /docs/explanation.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/code-alexander/algobase/b4ab6786eff8b8603bd849aff13f471b58c5174d/docs/explanation.md -------------------------------------------------------------------------------- /docs/how-to-guides.md: -------------------------------------------------------------------------------- 1 | ### Coming Soon.... 2 | -------------------------------------------------------------------------------- /docs/how_to/how_to_mint_nft_localnet.md: -------------------------------------------------------------------------------- 1 | # How to Mint an ARC-3 NFT on LocalNet 2 | 3 | ## ⚠️ Warning 4 | 5 | This library is in the early stages of development. 6 | 7 | The API is not stable and the code has not been audited. 8 | 9 | ## Context 10 | 11 | `algobase` makes it easy to mint ARC-3 NFTs on Algorand and upload metadata to IPFS. 12 | 13 | This tutorial uses [nft.storage](https://nft.storage/), which provides a free [IPFS](https://ipfs.tech/) pinning service. 14 | 15 | For more information on ARC standards, check out these resources: 16 | 17 | - [ARC Token Standards Explained for NFT Creators](https://www.algorand.foundation/news/arc-token-standards-explained-for-nft-creators) 18 | - [Algorand Requests for Comments (ARCs)](https://arc.algorand.foundation/) 19 | 20 | ## Set Up 21 | 22 | Make sure `algobase` is intalled before you start this tutorial (see intructions [here](https://github.com/code-alexander/algobase/blob/main/README.md)). 23 | 24 | You will also need to install [algokit](https://developer.algorand.org/docs/get-started/algokit/) and start a [LocalNet](https://developer.algorand.org/docs/get-started/algokit/#start-a-localnet) instance with the following command: 25 | `algokit localnet start` 26 | 27 | If you want to store your NFT's metadata in IPFS using [nft.storage](https://nft.storage/), you can sign for an [nft.storage account](https://nft.storage/docs/#create-an-account) and create an [API key](https://nft.storage/docs/#get-an-api-token). 28 | 29 | You will need to set the API key as an environment variable called `AB_NFT_STORAGE_API_KEY`, or add it to your .env file. 30 | 31 | Follow our [IPFS tutorial](https://code-alexander.github.io/algobase/how_to/how_to_store_json_ipfs/) if you get stuck 🤗 32 | 33 | ## Mint an ARC-3 NFT 34 | 35 | ```python 36 | from datetime import datetime 37 | 38 | from algobase.algorand.client import ( 39 | create_localnet_algod_client, 40 | get_default_account, 41 | ) 42 | from algobase.algorand.simple_mint import create_metadata, mint 43 | from algobase.ipfs.nft_storage import NftStorage 44 | from algobase.settings import Settings 45 | 46 | # Fetch settings from the environment 47 | settings = Settings() 48 | 49 | # Instantiate Algod client 50 | algod_client = create_localnet_algod_client() 51 | 52 | # Get the default localnet account 53 | account = get_default_account(algod_client) 54 | 55 | # Define the ARC-3 metadata for the NFT 56 | metadata = create_metadata( 57 | description="My first NFT!", 58 | properties={ 59 | "creator": account.address, 60 | "created_at": datetime.now().isoformat(), 61 | }, 62 | ) 63 | 64 | # Instantiate IPFS client 65 | ipfs_client = settings | NftStorage.from_settings 66 | 67 | # Store the metadata JSON in IPFS and get the CID 68 | cid = ipfs_client.store_json(metadata.json_bytes) 69 | 70 | print(f"Stored JSON on IPFS with CID {cid}") 71 | print(f"View the metadata at https://nftstorage.link/ipfs/{cid}") 72 | """ 73 | Stored JSON on IPFS with CID bafkreif2cduyjxdljxaydxxiryxdiw5arljif745e46fv3sajkxwqvvtzq 74 | View the metadata at https://nftstorage.link/ipfs/bafkreif2cduyjxdljxaydxxiryxdiw5arljif745e46fv3sajkxwqvvtzq 75 | """ 76 | 77 | # Mint the NFT on localnet 78 | asset_id = mint( 79 | algod_client=algod_client, 80 | account=account, 81 | metadata=metadata, 82 | cid=cid, 83 | ) 84 | 85 | print(f"NFT minted! Asset ID: {asset_id}") 86 | print(f"View the asset in Dappflow: https://app.dappflow.org/explorer/asset/{asset_id}") 87 | """ 88 | NFT minted! Asset ID: 1008 89 | View the asset in Dappflow: https://app.dappflow.org/explorer/asset/1008 90 | """ 91 | ``` 92 | -------------------------------------------------------------------------------- /docs/how_to/how_to_store_json_ipfs.md: -------------------------------------------------------------------------------- 1 | # How to Store and Pin JSON in IPFS 2 | 3 | ## ⚠️ Warning 4 | 5 | This library is in the early stages of development. 6 | 7 | The API is not stable and the code has not been audited. 8 | 9 | ## Context 10 | 11 | `algobase` provides an easy way to upload JSON to [IPFS](https://ipfs.tech/) using [nft.storage](https://nft.storage/). 12 | 13 | [nft.storage](https://nft.storage/) provides a free [IPFS](https://ipfs.tech/) pinning service. 14 | 15 | ## Set Up 16 | 17 | Make sure `algobase` is intalled before you start this tutorial (see intructions [here](https://github.com/code-alexander/algobase/blob/main/README.md)). 18 | 19 | Sign up for an [nft.storage account](https://nft.storage/docs/#create-an-account) and create an [API key](https://nft.storage/docs/#get-an-api-token). 20 | 21 | You will need to set the API key as an environment variable called `AB_NFT_STORAGE_API_KEY`, or add it to your .env file. 22 | 23 | `algobase` uses the [dotenv](https://github.com/theskumar/python-dotenv/tree/main?tab=readme-ov-file#command-line-interface) library. You can use its CLI to set the variable: 24 | 25 | ``` 26 | dotenv set AB_NFT_STORAGE_API_KEY 27 | ``` 28 | 29 | Otherwise, to set the environment variable in Python: 30 | 31 | ```python 32 | import os 33 | 34 | os.environ["AB_NFT_STORAGE_API_KEY"] = "" 35 | ``` 36 | 37 | ## How to Store JSON in IPFS and Check it's Pinned 38 | 39 | ```python 40 | from algobase.ipfs.nft_storage import NftStorage 41 | from algobase.settings import Settings 42 | 43 | # Fetch settings from the environment 44 | settings = Settings() 45 | 46 | # Instantiate the client object by piping settings to it 47 | client = settings | NftStorage.from_settings 48 | 49 | # Store JSON in IPFS (returns the CID of the file if successful) 50 | cid = client.store_json( 51 | json='{"integer": 123, "boolean": true, "list": ["a", "b", "c"]}' 52 | ) 53 | 54 | print(f"Stored JSON on IPFS with CID {cid}") 55 | """ 56 | Stored JSON on IPFS with CID bafkreiaci6q6dolsy32cnqhtmgvf23gzphzzc7urfnka2omgzn7behvbx4 57 | """ 58 | 59 | # Check IPFS pin status ('queued', 'pinning', 'pinned', or 'failed') 60 | pin_status = client.fetch_pin_status(cid) 61 | 62 | print(f"IPFS pin status for CID {cid} is '{pin_status}'") 63 | """ 64 | IPFS pin status for CID bafkreiaci6q6dolsy32cnqhtmgvf23gzphzzc7urfnka2omgzn7behvbx4 is 'pinned' 65 | """ 66 | ``` 67 | -------------------------------------------------------------------------------- /docs/how_to/how_to_validate_arc3.md: -------------------------------------------------------------------------------- 1 | # How to Validate ARC-3 Assets and Metadata 2 | 3 | ## ⚠️ Warning 4 | 5 | This library is in the early stages of development. 6 | 7 | The API is not stable and the code has not been audited. 8 | 9 | ## Context 10 | 11 | `algobase` provides [Pydantic](https://github.com/pydantic/pydantic) models for validating [Algorand ARC-3](https://github.com/algorandfoundation/ARCs/blob/main/ARCs/arc-0003.md) assets and metadata. 12 | 13 | There are three sets of constraints that must be adhered to: 14 | 15 | - Constraints on the asset parameters (applies to all ASAs) 16 | - Constraints on the JSON metadata 17 | - Constraints on the combination of values in the ASA [asset parameters](https://developer.algorand.org/docs/get-details/transactions/transactions/#asset-parameters) and the JSON metadata 18 | 19 | `algobase` provides models for validating the JSON metadata on its own, and in combination with the asset parameters. 20 | 21 | ## Set Up 22 | 23 | Make sure `algobase` is intalled before you start this tutorial (see intructions [here](https://github.com/code-alexander/algobase/blob/main/README.md)). 24 | 25 | ## Understanding Pydantic 26 | 27 | There are multiple ways to instantiate a model in [Pydantic](https://github.com/pydantic/pydantic). 28 | 29 | Given a User model: 30 | 31 | ```python 32 | from pydantic import BaseModel 33 | 34 | class User(BaseModel): 35 | name: str 36 | age: int | None 37 | ``` 38 | 39 | There are multiple ways to instantiate a Pydantic model and validate your data: 40 | 41 | ```python 42 | # Using keyword arguments 43 | >>> User(name="Sam", age=50) 44 | User(name='Sam', age=50) 45 | 46 | # Using dict unpacking 47 | >>> User(**{"name": "Sam", "age": 50}) 48 | User(name='Sam', age=50) 49 | 50 | # Passing a dict to the model_validate() classmethod 51 | >>> User.model_validate({"name": "Sam", "age": 50}) 52 | User(name='Sam', age=50) 53 | 54 | # Passing a json string to the model_validate() classmethod 55 | >>> User.model_validate_json('{"name": "Sam", "age": 50}') 56 | User(name='Sam', age=50) 57 | ``` 58 | 59 | By default, [Pydantic](https://github.com/pydantic/pydantic) will try to coerce your input data to the required type: 60 | 61 | ```python 62 | # Using keyword arguments 63 | >>> User(name="Sam", age="50") 64 | User(name='Sam', age=50) 65 | ``` 66 | 67 | You can disable this by using [strict mode](https://docs.pydantic.dev/latest/concepts/strict_mode/): 68 | 69 | ```python 70 | # Lax mode (default) 71 | >>> User.model_validate({"name": "Sam", "age": "50"}) 72 | User(name='Sam', age=50) 73 | 74 | # Strict mode 75 | >>> User.model_validate({"name": "Sam", "age": "50"}, strict=True) 76 | ValidationError: 1 validation error for User 77 | age 78 | Input should be a valid integer [type=int_type, input_value='50', input_type=str] 79 | For further information visit https://errors.pydantic.dev/2.5/v/int_type 80 | ``` 81 | 82 | ## Validating ASA Asset Parameters 83 | 84 | To validate the asset parameters without checking the JSON metadata: 85 | 86 | ```python 87 | from algobase.models.asset_params import AssetParams 88 | 89 | # Define ASA asset params dict 90 | asset_params = { 91 | "total": 1, 92 | "decimals": 0, 93 | "default_frozen": False, 94 | "unit_name": "USDT", 95 | "asset_name": "Tether", 96 | "url": "https://tether.to/", 97 | "metadata_hash": b"fACPO4nRgO55j1ndAK3W6Sgc4APkcyFh", 98 | "manager": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 99 | "reserve": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 100 | "freeze": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 101 | "clawback": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 102 | } 103 | 104 | # Validate the asset params data 105 | AssetParams.model_validate(asset_params) 106 | ``` 107 | 108 | ## Validating ARC-3 JSON Metadata 109 | 110 | To validate JSON metadata without checking the ASA parameters: 111 | 112 | ```python 113 | from algobase.models.arc3 import Arc3Metadata 114 | 115 | # Define metadata dict 116 | metadata = { 117 | "name": "My Songs", 118 | "decimals": 0, 119 | "description": "My first and best song!", 120 | "image": "https://s3.amazonaws.com/your-bucket/song/cover/mysong.png", 121 | "image_integrity": "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=", 122 | "image_mimetype": "image/png", 123 | "background_color": "FFFFFF", 124 | "external_url": "https://mysongs.com/song/mysong", 125 | "external_url_integrity": "sha256-7IGatqxLhUYkruDsEva52Ku43up6774yAmf0k98MXnU=", 126 | "external_url_mimetype": "text/html", 127 | "animation_url": "https://s3.amazonaws.com/your-bucket/song/preview/mysong.ogg", 128 | "animation_url_integrity": "sha256-LwArA6xMdnFF3bvQjwODpeTG/RVn61weQSuoRyynA1I=", 129 | "animation_url_mimetype": "audio/ogg", 130 | "properties": { 131 | "traits": { 132 | "background": "red", 133 | "shirt_color": "blue", 134 | "glasses": "none", 135 | "tattoos": 4, 136 | }, 137 | "simple_property": "example value", 138 | "rich_property": { 139 | "name": "Name", 140 | "value": "123", 141 | "display_value": "123 Example Value", 142 | "class": "emphasis", 143 | "css": { 144 | "color": "#ffffff", 145 | "font-weight": "bold", 146 | "text-decoration": "underline", 147 | }, 148 | }, 149 | "valid_types": { 150 | "string": "Name", 151 | "int": 1, 152 | "float": 3.14, 153 | "list": ["a", "b", "c"], 154 | }, 155 | "array_property": { 156 | "name": "Name", 157 | "value": [1, 2, 3, 4], 158 | "class": "emphasis", 159 | }, 160 | }, 161 | "extra_metadata": "iHcUslDaL/jEM/oTxqEX++4CS8o3+IZp7/V5Rgchqwc=", 162 | "localization": { 163 | "uri": "ipfs://QmWS1VAdMD353A6SDk9wNyvkT14kyCiZrNDYAad4w1tKqT/{locale}.json", 164 | "default": "en", 165 | "locales": ["en", "es", "fr"], 166 | "integrity": { 167 | "es": "sha256-T0UofLOqdamWQDLok4vy/OcetEFzD8dRLig4229138Y=", 168 | "fr": "sha256-UUM89QQlXRlerdzVfatUzvNrEI/gwsgsN/lGkR13CKw=", 169 | }, 170 | }, 171 | } 172 | 173 | # Validate the metadata 174 | Arc3Metadata.model_validate(metadata) 175 | ``` 176 | 177 | ## Validating ARC-3 Asset Parameters and JSON Metadata 178 | 179 | To validate a combination of asset parameters and JSON metadata: 180 | 181 | ```python 182 | from algobase.models.asa import Asa 183 | 184 | # Define dict containing both asset params and metadata 185 | data = { 186 | "asset_params": { 187 | "total": 1, 188 | "decimals": 0, 189 | "default_frozen": False, 190 | "unit_name": "Song0001", 191 | "asset_name": "My Songs", 192 | "url": "https://tether.to/#arc3", 193 | "metadata_hash": b"fACPO4nRgO55j1ndAK3W6Sgc4APkcyFh", 194 | "manager": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 195 | "reserve": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 196 | "freeze": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 197 | "clawback": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 198 | }, 199 | "metadata": { 200 | "arc": "arc3", 201 | "name": "My Songs", 202 | "decimals": 0, 203 | "description": "My first and best song!", 204 | "image": "https://s3.amazonaws.com/your-bucket/song/cover/mysong.png", 205 | "image_integrity": "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=", 206 | "image_mimetype": "image/png", 207 | "background_color": "FFFFFF", 208 | "external_url": "https://mysongs.com/song/mysong", 209 | "external_url_integrity": "sha256-7IGatqxLhUYkruDsEva52Ku43up6774yAmf0k98MXnU=", 210 | "external_url_mimetype": "text/html", 211 | "animation_url": "https://s3.amazonaws.com/your-bucket/song/preview/mysong.ogg", 212 | "animation_url_integrity": "sha256-LwArA6xMdnFF3bvQjwODpeTG/RVn61weQSuoRyynA1I=", 213 | "animation_url_mimetype": "audio/ogg", 214 | "properties": { 215 | "traits": { 216 | "background": "red", 217 | "shirt_color": "blue", 218 | "glasses": "none", 219 | "tattoos": 4, 220 | }, 221 | "simple_property": "example value", 222 | "rich_property": { 223 | "name": "Name", 224 | "value": "123", 225 | "display_value": "123 Example Value", 226 | "class": "emphasis", 227 | "css": { 228 | "color": "#ffffff", 229 | "font-weight": "bold", 230 | "text-decoration": "underline", 231 | }, 232 | }, 233 | "valid_types": { 234 | "string": "Name", 235 | "int": 1, 236 | "float": 3.14, 237 | "list": ["a", "b", "c"], 238 | }, 239 | "array_property": { 240 | "name": "Name", 241 | "value": [1, 2, 3, 4], 242 | "class": "emphasis", 243 | }, 244 | }, 245 | "extra_metadata": "iHcUslDaL/jEM/oTxqEX++4CS8o3+IZp7/V5Rgchqwc=", 246 | "localization": { 247 | "uri": "ipfs://QmWS1VAdMD353A6SDk9wNyvkT14kyCiZrNDYAad4w1tKqT/{locale}.json", 248 | "default": "en", 249 | "locales": ["en", "es", "fr"], 250 | "integrity": { 251 | "es": "sha256-T0UofLOqdamWQDLok4vy/OcetEFzD8dRLig4229138Y=", 252 | "fr": "sha256-UUM89QQlXRlerdzVfatUzvNrEI/gwsgsN/lGkR13CKw=", 253 | }, 254 | }, 255 | }, 256 | } 257 | 258 | # Validate the data 259 | Asa.model_validate(data) 260 | ``` 261 | 262 | The extra field (`"arc": "arc3"`) is mandatory when validating a dict against the ASA model, if the `metadata` field is not None. 263 | 264 | It will be excluded from model serialization, but is needed for a [discriminated union](https://docs.pydantic.dev/latest/concepts/unions/#discriminated-unions). 265 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Welcome to algobase 2 | 3 | `algobase` is a type-safe Python library for interacting with assets on Algorand. 4 | 5 | ## Table Of Contents 6 | 7 | 1. [How-To Guides](how_to/how_to_validate_arc3.md) 8 | 2. [Reference](reference.md) 9 | -------------------------------------------------------------------------------- /docs/reference.md: -------------------------------------------------------------------------------- 1 | ::: algobase.models.asset_params 2 | ::: algobase.models.arc3 3 | ::: algobase.models.arc19 4 | ::: algobase.models.asa 5 | ::: algobase.models.algod 6 | ::: algobase.models.kmd 7 | 8 | ::: algobase.algorand.client 9 | 10 | ::: algobase.ipfs.client_base 11 | ::: algobase.ipfs.nft_storage 12 | 13 | ::: algobase.utils.hash 14 | ::: algobase.utils.read 15 | ::: algobase.utils.url 16 | ::: algobase.utils.validate 17 | 18 | ::: algobase.choices 19 | ::: algobase.functional 20 | ::: algobase.settings 21 | -------------------------------------------------------------------------------- /docs/stylesheets/extra.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --md-primary-fg-color: #50e3c2; 3 | --md-primary-fg-color--light: #ed9fdc; 4 | --md-primary-fg-color--dark: #1b2e50; 5 | } 6 | -------------------------------------------------------------------------------- /docs/tutorials.md: -------------------------------------------------------------------------------- 1 | ### Coming Soon.... 2 | -------------------------------------------------------------------------------- /examples/simple_mint.py: -------------------------------------------------------------------------------- 1 | """Example showing how to mint an NFT on an Algorand localnet. 2 | 3 | To run this example, make sure you have localnet running. 4 | You can follow the guide here: https://developer.algorand.org/docs/get-started/algokit/#start-a-localnet 5 | 6 | The metadata for this NFT is stored in IPFS using the NFT Storage API. 7 | It requires the environment variable `AB_NFT_STORAGE_API_KEY` to be set. 8 | """ 9 | 10 | from datetime import datetime 11 | 12 | from algobase.algorand.client import ( 13 | create_localnet_algod_client, 14 | get_default_account, 15 | ) 16 | from algobase.algorand.simple_mint import create_metadata, mint 17 | from algobase.ipfs.nft_storage import NftStorage 18 | from algobase.settings import Settings 19 | 20 | # Fetch settings from the environment 21 | settings = Settings() 22 | 23 | # Instantiate Algod client 24 | algod_client = create_localnet_algod_client() 25 | 26 | # Get the default localnet account 27 | account = get_default_account(algod_client) 28 | 29 | # Define the ARC-3 metadata for the NFT 30 | metadata = create_metadata( 31 | description="My first NFT!", 32 | properties={ 33 | "creator": account.address, 34 | "created_at": datetime.now().isoformat(), 35 | }, 36 | ) 37 | 38 | # Instantiate IPFS client 39 | ipfs_client = settings | NftStorage.from_settings 40 | 41 | # Store the metadata JSON in IPFS and get the CID 42 | cid = ipfs_client.store_json(metadata.json_bytes) 43 | 44 | print(f"Stored JSON on IPFS with CID {cid}") 45 | print(f"View the metadata at https://nftstorage.link/ipfs/{cid}") 46 | """ 47 | Stored JSON on IPFS with CID bafkreif2cduyjxdljxaydxxiryxdiw5arljif745e46fv3sajkxwqvvtzq 48 | View the metadata at https://nftstorage.link/ipfs/bafkreif2cduyjxdljxaydxxiryxdiw5arljif745e46fv3sajkxwqvvtzq 49 | """ 50 | 51 | # Mint the NFT on localnet 52 | asset_id = mint( 53 | algod_client=algod_client, 54 | account=account, 55 | metadata=metadata, 56 | cid=cid, 57 | ) 58 | 59 | print(f"NFT minted! Asset ID: {asset_id}") 60 | print(f"View the asset in Dappflow: https://app.dappflow.org/explorer/asset/{asset_id}") 61 | """ 62 | NFT minted! Asset ID: 1008 63 | View the asset in Dappflow: https://app.dappflow.org/explorer/asset/1008 64 | """ 65 | -------------------------------------------------------------------------------- /examples/store_json_ipfs.py: -------------------------------------------------------------------------------- 1 | """Example showing how to store JSON in IPFS. 2 | 3 | Make sure the environment variable `AB_NFT_STORAGE_API_KEY` is set before running. 4 | """ 5 | 6 | from algobase.ipfs.nft_storage import NftStorage 7 | from algobase.settings import Settings 8 | 9 | # Fetch settings from the environment 10 | settings = Settings() 11 | 12 | # Instantiate the client object by piping settings to it 13 | client = settings | NftStorage.from_settings 14 | 15 | # Store JSON in IPFS (returns the CID of the file if successful) 16 | cid = client.store_json( 17 | json='{"integer": 123, "boolean": true, "list": ["a", "b", "c"]}' 18 | ) 19 | 20 | print(f"Stored JSON on IPFS with CID {cid}") 21 | """ 22 | Stored JSON on IPFS with CID bafkreiaci6q6dolsy32cnqhtmgvf23gzphzzc7urfnka2omgzn7behvbx4 23 | """ 24 | 25 | # Check IPFS pin status ('queued', 'pinning', 'pinned', or 'failed') 26 | pin_status = client.fetch_pin_status(cid) 27 | 28 | print(f"IPFS pin status for CID {cid} is '{pin_status}'") 29 | """ 30 | IPFS pin status for CID bafkreiaci6q6dolsy32cnqhtmgvf23gzphzzc7urfnka2omgzn7behvbx4 is 'pinned' 31 | """ 32 | -------------------------------------------------------------------------------- /examples/validate_arc3.py: -------------------------------------------------------------------------------- 1 | """Example showing how to validate ARC-3 ASA asset parameters and metadata.""" 2 | 3 | from algobase.models.asa import Asa 4 | 5 | # Define dict containing both asset params and metadata 6 | data = { 7 | "asset_params": { 8 | "total": 1, 9 | "decimals": 0, 10 | "default_frozen": False, 11 | "unit_name": "Song0001", 12 | "asset_name": "My Songs", 13 | "url": "https://tether.to/#arc3", 14 | "metadata_hash": b"fACPO4nRgO55j1ndAK3W6Sgc4APkcyFh", 15 | "manager": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 16 | "reserve": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 17 | "freeze": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 18 | "clawback": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 19 | }, 20 | "metadata": { 21 | "arc": "arc3", 22 | "name": "My Songs", 23 | "decimals": 0, 24 | "description": "My first and best song!", 25 | "image": "https://s3.amazonaws.com/your-bucket/song/cover/mysong.png", 26 | "image_integrity": "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=", 27 | "image_mimetype": "image/png", 28 | "background_color": "FFFFFF", 29 | "external_url": "https://mysongs.com/song/mysong", 30 | "external_url_integrity": "sha256-7IGatqxLhUYkruDsEva52Ku43up6774yAmf0k98MXnU=", 31 | "external_url_mimetype": "text/html", 32 | "animation_url": "https://s3.amazonaws.com/your-bucket/song/preview/mysong.ogg", 33 | "animation_url_integrity": "sha256-LwArA6xMdnFF3bvQjwODpeTG/RVn61weQSuoRyynA1I=", 34 | "animation_url_mimetype": "audio/ogg", 35 | "properties": { 36 | "traits": { 37 | "background": "red", 38 | "shirt_color": "blue", 39 | "glasses": "none", 40 | "tattoos": 4, 41 | }, 42 | "simple_property": "example value", 43 | "rich_property": { 44 | "name": "Name", 45 | "value": "123", 46 | "display_value": "123 Example Value", 47 | "class": "emphasis", 48 | "css": { 49 | "color": "#ffffff", 50 | "font-weight": "bold", 51 | "text-decoration": "underline", 52 | }, 53 | }, 54 | "valid_types": { 55 | "string": "Name", 56 | "int": 1, 57 | "float": 3.14, 58 | "list": ["a", "b", "c"], 59 | }, 60 | "array_property": { 61 | "name": "Name", 62 | "value": [1, 2, 3, 4], 63 | "class": "emphasis", 64 | }, 65 | }, 66 | "extra_metadata": "iHcUslDaL/jEM/oTxqEX++4CS8o3+IZp7/V5Rgchqwc=", 67 | "localization": { 68 | "uri": "ipfs://QmWS1VAdMD353A6SDk9wNyvkT14kyCiZrNDYAad4w1tKqT/{locale}.json", 69 | "default": "en", 70 | "locales": ["en", "es", "fr"], 71 | "integrity": { 72 | "es": "sha256-T0UofLOqdamWQDLok4vy/OcetEFzD8dRLig4229138Y=", 73 | "fr": "sha256-UUM89QQlXRlerdzVfatUzvNrEI/gwsgsN/lGkR13CKw=", 74 | }, 75 | }, 76 | }, 77 | } 78 | 79 | # Validate the data 80 | Asa.model_validate(data) 81 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: algobase 2 | 3 | theme: 4 | name: "material" 5 | features: 6 | - navigation.tabs 7 | 8 | extra_css: 9 | - stylesheets/extra.css 10 | 11 | plugins: 12 | - mkdocstrings: 13 | handlers: 14 | python: 15 | options: 16 | docstring_style: google 17 | ignore_init_summary: true 18 | docstring_section_style: list 19 | filters: ["!^_"] 20 | separate_signature: true 21 | show_root_heading: true 22 | show_signature_annotations: true 23 | show_symbol_type_heading: true 24 | show_symbol_type_toc: true 25 | unwrap_annotated: true 26 | 27 | nav: 28 | - Welcome: index.md 29 | - How-To Guides: 30 | - How to Validate ARC-3 Assets and Metadata: how_to/how_to_validate_arc3.md 31 | - How to Store JSON in IPFS: how_to/how_to_store_json_ipfs.md 32 | - How to Mint an ARC-3 NFT on LocalNet: how_to/how_to_mint_nft_localnet.md 33 | - Reference: 34 | - Reference: reference.md 35 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # Poetry pyproject.toml: https://python-poetry.org/docs/pyproject/ 2 | [build-system] 3 | requires = ["poetry_core>=1.0.0"] 4 | build-backend = "poetry.core.masonry.api" 5 | 6 | [tool.poetry] 7 | name = "algobase" 8 | version = "0.12.8" 9 | description = "A type-safe Python library for interacting with assets on Algorand." 10 | readme = "README.md" 11 | authors = ["algobase "] 12 | license = "Apache Software License 2.0" 13 | repository = "https://github.com/code-alexander/algobase" 14 | homepage = "https://github.com/code-alexander/algobase" 15 | 16 | # Keywords description https://python-poetry.org/docs/pyproject/#keywords 17 | keywords = [] #! Update me 18 | 19 | # Pypi classifiers: https://pypi.org/classifiers/ 20 | classifiers = [ #! Update me 21 | "Development Status :: 3 - Alpha", 22 | "Intended Audience :: Developers", 23 | "Operating System :: OS Independent", 24 | "Topic :: Software Development :: Libraries :: Python Modules", 25 | "License :: OSI Approved :: Apache Software License", 26 | "Programming Language :: Python :: 3", 27 | "Programming Language :: Python :: 3.11", 28 | ] 29 | 30 | [tool.poetry.scripts] 31 | # Entry points for the package https://python-poetry.org/docs/pyproject/#scripts 32 | # "algobase" = "algobase.__main__:app" 33 | 34 | [tool.poetry.dependencies] 35 | python = "^3.11" 36 | 37 | typer = {extras = ["all"], version = ">=0.9,<0.13"} 38 | rich = "^13.7.0" 39 | pydantic = "^2.5.3" 40 | py-algorand-sdk = "^2.5.0" 41 | babel = "^2.14.0" 42 | cytoolz = "^0.12.2" 43 | fastapi = ">=0.109,<0.111" 44 | httpx = "^0.23.1" 45 | pytest-httpx = "^0.21.3" 46 | pydantic-settings = "^2.1.0" 47 | mypy = "^1.10.1" 48 | returns = "^0.22.0" 49 | py-multiformats-cid = "^0.4.4" 50 | 51 | 52 | [tool.poetry.plugins."poetry.application.plugin"] 53 | export = "poetry_plugin_export.plugins:ExportApplicationPlugin" 54 | 55 | 56 | [tool.poetry.group.dev.dependencies] 57 | bandit = "^1.7.6" 58 | mypy-extensions = "^1.0.0" 59 | pre-commit = "^3.6.0" 60 | pydocstyle = "^6.3.0" 61 | pylint = "^3.0.3" 62 | pytest = ">=7.4.4,<9.0.0" 63 | pyupgrade = "^3.15.0" 64 | safety = ">=2.3.5,<4.0.0" 65 | coverage = "^7.4.0" 66 | coverage-badge = "^1.1.0" 67 | pytest-html = "^4.1.1" 68 | pytest-cov = "^4.1.0" 69 | ruff = ">=0.1.13,<0.3.0" 70 | black = {version = "^24.1a1", allow-prereleases = true} 71 | mkdocs = "^1.5.3" 72 | mkdocstrings = {extras = ["python"], version = "^0.24.0"} 73 | mkdocs-material = "^9.5.6" 74 | datamodel-code-generator = "^0.25.3" 75 | 76 | [tool.ruff] 77 | target-version = "py311" 78 | 79 | [tool.ruff.lint] 80 | extend-select = [ 81 | "D", # pydocstyle 82 | "I", # isort 83 | "UP", # pyupgrade 84 | ] 85 | 86 | [tool.ruff.lint.pydocstyle] 87 | convention = "google" 88 | 89 | [tool.mypy] 90 | # https://mypy.readthedocs.io/en/latest/config_file.html#using-a-pyproject-toml-file 91 | python_version = "3.11" 92 | pretty = true 93 | show_traceback = true 94 | color_output = true 95 | 96 | allow_redefinition = false 97 | check_untyped_defs = true 98 | disallow_any_generics = true 99 | disallow_incomplete_defs = true 100 | ignore_missing_imports = true 101 | implicit_reexport = false 102 | no_implicit_optional = true 103 | show_column_numbers = true 104 | show_error_codes = true 105 | show_error_context = true 106 | strict_equality = true 107 | strict_optional = true 108 | warn_no_return = true 109 | warn_redundant_casts = true 110 | warn_return_any = true 111 | warn_unreachable = true 112 | warn_unused_configs = true 113 | warn_unused_ignores = true 114 | 115 | 116 | [tool.pytest.ini_options] 117 | # https://docs.pytest.org/en/6.2.x/customize.html#pyproject-toml 118 | # Directories that are not visited by pytest collector: 119 | norecursedirs =["hooks", "*.egg", ".eggs", "dist", "build", "docs", ".tox", ".git", "__pycache__"] 120 | doctest_optionflags = ["NUMBER", "NORMALIZE_WHITESPACE", "IGNORE_EXCEPTION_DETAIL"] 121 | 122 | # Extra options: 123 | addopts = [ 124 | "--strict-markers", 125 | "--tb=short", 126 | "--doctest-modules", 127 | "--doctest-continue-on-failure", 128 | ] 129 | 130 | [tool.coverage.run] 131 | source = ["tests"] 132 | 133 | [coverage.paths] 134 | source = "algobase" 135 | 136 | [coverage.run] 137 | branch = true 138 | 139 | [coverage.report] 140 | fail_under = 50 141 | show_missing = true 142 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | annotated-types==0.7.0 ; python_version >= "3.11" and python_version < "4.0" 2 | anyio==4.4.0 ; python_version >= "3.11" and python_version < "4.0" 3 | babel==2.16.0 ; python_version >= "3.11" and python_version < "4.0" 4 | base58==2.1.1 ; python_version >= "3.11" and python_version < "4.0" 5 | certifi==2024.7.4 ; python_version >= "3.11" and python_version < "4.0" 6 | cffi==1.17.0 ; python_version >= "3.11" and python_version < "4.0" 7 | click==8.1.7 ; python_version >= "3.11" and python_version < "4.0" 8 | colorama==0.4.6 ; python_version >= "3.11" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") 9 | cytoolz==0.12.3 ; python_version >= "3.11" and python_version < "4.0" 10 | fastapi==0.110.3 ; python_version >= "3.11" and python_version < "4.0" 11 | h11==0.14.0 ; python_version >= "3.11" and python_version < "4.0" 12 | httpcore==0.16.3 ; python_version >= "3.11" and python_version < "4.0" 13 | httpx==0.23.3 ; python_version >= "3.11" and python_version < "4.0" 14 | idna==3.7 ; python_version >= "3.11" and python_version < "4.0" 15 | iniconfig==2.0.0 ; python_version >= "3.11" and python_version < "4.0" 16 | markdown-it-py==3.0.0 ; python_version >= "3.11" and python_version < "4.0" 17 | mdurl==0.1.2 ; python_version >= "3.11" and python_version < "4.0" 18 | morphys==1.0 ; python_version >= "3.11" and python_version < "4.0" 19 | msgpack==1.0.8 ; python_version >= "3.11" and python_version < "4.0" 20 | mypy-extensions==1.0.0 ; python_version >= "3.11" and python_version < "4.0" 21 | mypy==1.11.1 ; python_version >= "3.11" and python_version < "4.0" 22 | packaging==24.1 ; python_version >= "3.11" and python_version < "4.0" 23 | pluggy==1.5.0 ; python_version >= "3.11" and python_version < "4.0" 24 | py-algorand-sdk==2.6.1 ; python_version >= "3.11" and python_version < "4.0" 25 | py-multibase==1.0.3 ; python_version >= "3.11" and python_version < "4.0" 26 | py-multicodec==0.2.1 ; python_version >= "3.11" and python_version < "4.0" 27 | py-multiformats-cid==0.4.4 ; python_version >= "3.11" and python_version < "4.0" 28 | py-multihash==2.0.1 ; python_version >= "3.11" and python_version < "4.0" 29 | pycparser==2.22 ; python_version >= "3.11" and python_version < "4.0" 30 | pycryptodomex==3.20.0 ; python_version >= "3.11" and python_version < "4.0" 31 | pydantic-core==2.20.1 ; python_version >= "3.11" and python_version < "4.0" 32 | pydantic-settings==2.4.0 ; python_version >= "3.11" and python_version < "4.0" 33 | pydantic==2.8.2 ; python_version >= "3.11" and python_version < "4.0" 34 | pygments==2.18.0 ; python_version >= "3.11" and python_version < "4.0" 35 | pynacl==1.5.0 ; python_version >= "3.11" and python_version < "4.0" 36 | pytest-httpx==0.21.3 ; python_version >= "3.11" and python_version < "4.0" 37 | pytest==7.4.4 ; python_version >= "3.11" and python_version < "4.0" 38 | python-baseconv==1.2.2 ; python_version >= "3.11" and python_version < "4.0" 39 | python-dotenv==1.0.1 ; python_version >= "3.11" and python_version < "4.0" 40 | returns==0.22.0 ; python_version >= "3.11" and python_version < "4.0" 41 | rfc3986[idna2008]==1.5.0 ; python_version >= "3.11" and python_version < "4.0" 42 | rich==13.7.1 ; python_version >= "3.11" and python_version < "4.0" 43 | shellingham==1.5.4 ; python_version >= "3.11" and python_version < "4.0" 44 | six==1.16.0 ; python_version >= "3.11" and python_version < "4.0" 45 | sniffio==1.3.1 ; python_version >= "3.11" and python_version < "4.0" 46 | starlette==0.37.2 ; python_version >= "3.11" and python_version < "4.0" 47 | toolz==0.12.1 ; python_version >= "3.11" and python_version < "4.0" 48 | typer[all]==0.12.4 ; python_version >= "3.11" and python_version < "4.0" 49 | typing-extensions==4.12.2 ; python_version >= "3.11" and python_version < "4.0" 50 | varint==1.0.2 ; python_version >= "3.11" and python_version < "4.0" 51 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for algobase.""" 2 | -------------------------------------------------------------------------------- /tests/test_algorand/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for algobase.""" 2 | -------------------------------------------------------------------------------- /tests/test_algorand/test_account.py: -------------------------------------------------------------------------------- 1 | """Tests the `Account` class.""" 2 | 3 | from algobase.algorand.account import Account, create_account 4 | 5 | 6 | def test_account() -> None: 7 | """Tests the `Account` class.""" 8 | account = Account(private_key="test_key", address="test_address") 9 | assert account.private_key == "test_key" 10 | assert account.address == "test_address" 11 | 12 | 13 | def test_create_account() -> None: 14 | """Test the create_account() function.""" 15 | account = create_account() 16 | assert isinstance(account, Account) 17 | -------------------------------------------------------------------------------- /tests/test_algorand/test_dispenser.py: -------------------------------------------------------------------------------- 1 | """Tests for the Algorand TestNet dispenser API client.""" 2 | 3 | from types import SimpleNamespace 4 | from typing import Any 5 | 6 | import httpx 7 | import pytest 8 | from pytest_httpx import HTTPXMock 9 | 10 | from algobase.algorand.dispenser import Dispenser 11 | from algobase.choices import AlgorandAsset 12 | from algobase.models.dispenser import DispenserFundResponse 13 | 14 | 15 | @pytest.mark.parametrize( 16 | "field, expected", 17 | [ 18 | ("_access_token", "test_token"), 19 | ("access_token", "test_token"), 20 | ("base_url", "https://api.dispenser.algorandfoundation.tools"), 21 | ("headers", {"Authorization": "Bearer test_token"}), 22 | ], 23 | ) 24 | def test_properties(field: str, expected: Any) -> None: 25 | """Test the properties of the `TestNetDispenser` class.""" 26 | client = Dispenser(_access_token="test_token") 27 | assert getattr(client, field) == expected 28 | 29 | 30 | def test_fund_successful( 31 | httpx_mock: HTTPXMock, 32 | ) -> None: 33 | """Test that the response is parsed correctly when the request is successful (response is mocked).""" 34 | httpx_mock.add_response( 35 | json={ 36 | "txID": "SFSHW3D33H6AIA26B53JPHX2HUXATKD4XL7T473XN7RIP7X7F3BA", 37 | "amount": 1000000, 38 | } 39 | ) 40 | client = Dispenser(_access_token="test_token") 41 | response = client.fund( 42 | address="test_address", amount=1000000, asset_id=AlgorandAsset.ALGO 43 | ) 44 | assert isinstance(response, DispenserFundResponse) 45 | assert response.tx_id == "SFSHW3D33H6AIA26B53JPHX2HUXATKD4XL7T473XN7RIP7X7F3BA" 46 | assert response.amount == 1000000 47 | 48 | 49 | def test_fund_error( 50 | httpx_mock: HTTPXMock, 51 | ) -> None: 52 | """Test that an error is raised when the request is unsuccessful (response is mocked).""" 53 | httpx_mock.add_response( 54 | status_code=500, 55 | json={"code": "unexpected_error", "message": "Unexpected internal error"}, 56 | ) 57 | client = Dispenser(_access_token="test_token") 58 | with pytest.raises(httpx.HTTPError): 59 | client.fund(address="test_address", amount=1000000, asset_id=AlgorandAsset.ALGO) 60 | 61 | 62 | def test_from_settings_constructor() -> None: 63 | """Test that the client can be created from a settings object.""" 64 | settings = SimpleNamespace(testnet_dispenser_access_token="test_token") 65 | client = Dispenser.from_settings(settings) # type: ignore[arg-type] 66 | assert isinstance(client, Dispenser) 67 | assert client.access_token == "test_token" 68 | 69 | 70 | def test_from_settings_constructor_token_missing() -> None: 71 | """Test that the client raises an error if the access token is None in the settings object.""" 72 | settings = SimpleNamespace(testnet_dispenser_access_token=None) 73 | with pytest.raises(ValueError): 74 | Dispenser.from_settings(settings) # type: ignore[arg-type] 75 | 76 | 77 | def test_access_token_missing() -> None: 78 | """Test that the client raises an error if the access token is None or an empty string.""" 79 | with pytest.raises(ValueError): 80 | Dispenser(_access_token=None) # type: ignore[arg-type] 81 | with pytest.raises(ValueError): 82 | Dispenser(_access_token="") 83 | -------------------------------------------------------------------------------- /tests/test_algorand/test_simple_mint.py: -------------------------------------------------------------------------------- 1 | """Tests for the simple mint utility functions.""" 2 | from unittest.mock import MagicMock 3 | 4 | from algosdk.transaction import AssetConfigTxn, SuggestedParams 5 | 6 | from algobase.algorand.account import Account 7 | from algobase.algorand.simple_mint import ( 8 | create_asa, 9 | create_asset_config_txn, 10 | create_metadata, 11 | create_metadata_arc19, 12 | mint, 13 | ) 14 | from algobase.choices import Arc 15 | from algobase.models.arc3 import Arc3Metadata 16 | from algobase.models.arc19 import Arc19Metadata 17 | from algobase.models.asa import Asa 18 | 19 | 20 | def test_create_metadata() -> None: 21 | """Test the create_metadata() function.""" 22 | metadata = create_metadata( 23 | description="My first NFT!", properties={"creator": "test_address"} 24 | ) 25 | assert isinstance(metadata, Arc3Metadata) 26 | assert metadata.arc == Arc.ARC3 27 | assert metadata.name == "NFT" 28 | assert metadata.decimals == 0 29 | assert metadata.description == "My first NFT!" 30 | assert getattr(metadata.properties, "creator") == "test_address" 31 | 32 | 33 | def test_create_metadata_arc19() -> None: 34 | """Test the create_metadata_arc19() function.""" 35 | metadata = create_metadata_arc19( 36 | description="My first NFT!", properties={"creator": "test_address"} 37 | ) 38 | assert isinstance(metadata, Arc19Metadata) 39 | assert metadata.arc == Arc.ARC19 40 | assert hasattr(metadata, "arc3_metadata") and isinstance( 41 | metadata.arc3_metadata, Arc3Metadata 42 | ) 43 | assert metadata.arc3_metadata.name == "NFT" 44 | assert metadata.arc3_metadata.decimals == 0 45 | assert metadata.arc3_metadata.description == "My first NFT!" 46 | assert getattr(metadata.arc3_metadata.properties, "creator") == "test_address" 47 | 48 | 49 | def test_create_asa() -> None: 50 | """Test the create_asa() function for ARC-3 metadata.""" 51 | metadata = create_metadata( 52 | description="My first NFT!", properties={"creator": "test_address"} 53 | ) 54 | cid = "test_cid" 55 | asa = create_asa(metadata, cid) 56 | assert isinstance(asa, Asa) 57 | assert asa.asset_params.total == 1 58 | assert asa.asset_params.decimals == 0 59 | assert asa.asset_params.unit_name == "NFT" 60 | assert asa.asset_params.asset_name == "NFT" 61 | assert asa.asset_params.url == "ipfs://test_cid/#arc3" 62 | assert isinstance(asa.metadata, Arc3Metadata) 63 | assert asa.metadata == metadata 64 | 65 | 66 | def test_create_asset_config_txn() -> None: 67 | """Test the create_asset_config_txn() function.""" 68 | mock_algod = MagicMock() 69 | mock_algod.suggested_params.return_value = SuggestedParams( 70 | **{ 71 | "first": 6, 72 | "last": 1006, 73 | "gh": "W+YiTIAibva56J3LrTHBIEQ//VUE/8eSZzBqJmykhWo=", 74 | "gen": "dockernet-v1", 75 | "fee": 0, 76 | "flat_fee": False, 77 | "consensus_version": "future", 78 | "min_fee": 1000, 79 | } 80 | ) 81 | 82 | account = Account( 83 | "test_key", "UYAUCPT2B475MESZAIA4BULTWIQM23VBPHQOLKKOPD7JRFB5QS4L3BOFUM" 84 | ) 85 | 86 | txn = create_asset_config_txn( 87 | mock_algod, 88 | account, 89 | create_asa( 90 | metadata=create_metadata( 91 | description="My first NFT!", properties={"creator": "test_address"} 92 | ), 93 | cid="test_cid", 94 | ), 95 | ) 96 | 97 | assert isinstance(txn, AssetConfigTxn) 98 | 99 | 100 | def test_mint() -> None: 101 | """Test the mint() function.""" 102 | mock_algod = MagicMock() 103 | mock_algod.suggested_params.return_value = SuggestedParams( 104 | **{ 105 | "first": 6, 106 | "last": 1006, 107 | "gh": "W+YiTIAibva56J3LrTHBIEQ//VUE/8eSZzBqJmykhWo=", 108 | "gen": "dockernet-v1", 109 | "fee": 0, 110 | "flat_fee": False, 111 | "consensus_version": "future", 112 | "min_fee": 1000, 113 | } 114 | ) 115 | mock_algod.send_transaction.return_value = "test_txid" 116 | mock_algod.status.return_value = {"last-round": 0} 117 | mock_algod.pending_transaction_info.return_value = { 118 | "asset-index": 1007, 119 | "confirmed-round": 7, 120 | "pool-error": "", 121 | "txn": { 122 | "sig": "KCbvV1FV2xLbFUGI7MtIFfYCg2p59FX5SJJZsXUc3bsGXkm/wIK6ezHgC/Et5fc9k9UXtb/orbKzbHsFqj/9BQ==", 123 | "txn": { 124 | "apar": { 125 | "am": "LvYRe05h02XZbUNAUTGu43QAvYRqyzpfrRKBlAh/wak=", 126 | "an": "NFT", 127 | "au": "ipfs://test_cid/#arc3", 128 | "m": "UYAUCPT2B475MESZAIA4BULTWIQM23VBPHQOLKKOPD7JRFB5QS4L3BOFUM", 129 | "r": "UYAUCPT2B475MESZAIA4BULTWIQM23VBPHQOLKKOPD7JRFB5QS4L3BOFUM", 130 | "t": 1, 131 | "un": "NFT", 132 | }, 133 | "fee": 1000, 134 | "fv": 6, 135 | "gen": "dockernet-v1", 136 | "gh": "W+YiTIAibva56J3LrTHBIEQ//VUE/8eSZzBqJmykhWo=", 137 | "lv": 1006, 138 | "snd": "UYAUCPT2B475MESZAIA4BULTWIQM23VBPHQOLKKOPD7JRFB5QS4L3BOFUM", 139 | "type": "acfg", 140 | }, 141 | }, 142 | } 143 | 144 | account = Account( 145 | "sDR9sBBWSSks/yYVFGTT1X6imLL12DF6+x+4l2hX7ji+EC+xUI8Paxpbo+tSC6o2BAv+QIRPF2zO3cvKn3N3Pg==", 146 | "UYAUCPT2B475MESZAIA4BULTWIQM23VBPHQOLKKOPD7JRFB5QS4L3BOFUM", 147 | ) 148 | cid = "test_cid" 149 | 150 | metadata = Arc3Metadata( 151 | arc=Arc.ARC3, 152 | name="NFT", 153 | decimals=0, 154 | description="My first NFT!", 155 | ) 156 | 157 | asset_id = mint(mock_algod, account, metadata, cid) 158 | 159 | assert isinstance(asset_id, int) 160 | assert asset_id == 1007 161 | -------------------------------------------------------------------------------- /tests/test_functional.py: -------------------------------------------------------------------------------- 1 | """Test the type casting functions.""" 2 | 3 | from typing import TypeVar 4 | 5 | import pytest 6 | 7 | from algobase.functional import first_true, maybe_apply, provide_context 8 | 9 | T = TypeVar("T") 10 | 11 | 12 | @pytest.mark.parametrize( 13 | "x, f", 14 | [("some_string", str), (1, int), (1.0, float), (True, bool), ([0, 1, 2], list)], 15 | ) 16 | def test_maybe_apply_cast(x: T, f: type[T]) -> None: 17 | """Test that maybe_apply() returns the correct value when casting some value.""" 18 | assert isinstance(maybe_apply(x, f), f) 19 | assert maybe_apply(x, f) == x 20 | 21 | 22 | @pytest.mark.parametrize( 23 | "x, f", 24 | [(None, str), (None, int), (None, float), (None, bool), (None, list)], 25 | ) 26 | def test_maybe_apply_cast_none(x: None, f: type[T]) -> None: 27 | """Test that maybe_apply() returns None when casting None.""" 28 | assert maybe_apply(x, f) is None 29 | assert maybe_apply(x, f) == x 30 | 31 | 32 | def test_provide_context() -> None: 33 | """Tests the provide_context() function.""" 34 | context = provide_context(a=4, b=5, c=6) 35 | 36 | def f(x, y, z, a, b, c): 37 | """Function that accepts context arguments.""" 38 | return x + y + z + a + b + c 39 | 40 | assert context(f, 1, 2, z=3) == 21 41 | 42 | 43 | def test_first_true() -> None: 44 | """Tests the first_true() function.""" 45 | iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] 46 | 47 | def predicate(n: int) -> bool: 48 | """Predicate function. 49 | 50 | Args: 51 | n (int): The number to check. 52 | 53 | Returns: 54 | bool: True if the number is 5, False otherwise. 55 | """ 56 | return n == 5 57 | 58 | assert first_true(iterable, predicate=predicate) == 5 59 | 60 | 61 | def test_first_true_default() -> None: 62 | """Tests the first_true() function with a default value provided.""" 63 | iterable = [0, 1, 2, 3, 4] 64 | 65 | def predicate(n: int) -> bool: 66 | """Predicate function. 67 | 68 | Args: 69 | n (int): The number to check. 70 | 71 | Returns: 72 | bool: True if the number is 5, False otherwise. 73 | """ 74 | return n == 5 75 | 76 | assert first_true(iterable, default=5, predicate=predicate) == 5 77 | -------------------------------------------------------------------------------- /tests/test_ipfs/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for algobase.""" 2 | -------------------------------------------------------------------------------- /tests/test_ipfs/conftest.py: -------------------------------------------------------------------------------- 1 | """Pytest fixtures for the IPFS client tests.""" 2 | 3 | 4 | import pytest 5 | 6 | from tests.types import FixtureDict 7 | 8 | 9 | @pytest.fixture 10 | def nft_storage_store_json_successful() -> FixtureDict: 11 | """Pytest fixture that returns a dictionary response from a successful upload to IPFS via nft.storage. 12 | 13 | Returns: 14 | FixtureDict: The dictionary response. 15 | """ 16 | return { 17 | "ok": True, 18 | "value": { 19 | "cid": "bafkreic7xfupwwdiwnzudgi6s6brjunxktdfio4hj4a5tlp2hrou7rnjvy", 20 | "created": "2024-01-29T09:15:48.637+00:00", 21 | "type": "application/json", 22 | "scope": "test-1", 23 | "files": [], 24 | "size": 58, 25 | "name": "Upload at 2024-01-29T09:17:17.808Z", 26 | "pin": { 27 | "cid": "bafkreic7xfupwwdiwnzudgi6s6brjunxktdfio4hj4a5tlp2hrou7rnjvy", 28 | "created": "2024-01-29T09:15:48.637+00:00", 29 | "size": 58, 30 | "status": "pinned", 31 | }, 32 | "deals": [], 33 | }, 34 | } 35 | 36 | 37 | @pytest.fixture 38 | def nft_storage_store_json_bad_request() -> FixtureDict: 39 | """Pytest fixture that returns a dictionary response from a failed upload to IPFS via nft.storage (HTTP 400). 40 | 41 | Returns: 42 | FixtureDict: The dictionary response. 43 | """ 44 | return {"ok": False, "error": {"name": "string", "message": "string"}} 45 | 46 | 47 | @pytest.fixture 48 | def nft_storage_store_json_unauthorized() -> FixtureDict: 49 | """Pytest fixture that returns a dictionary response from a failed upload to IPFS via nft.storage (HTTP 401). 50 | 51 | Returns: 52 | FixtureDict: The dictionary response. 53 | """ 54 | return {"ok": False, "error": {"name": "HTTP Error", "message": "Unauthorized"}} 55 | 56 | 57 | @pytest.fixture 58 | def nft_storage_store_json_forbidden() -> FixtureDict: 59 | """Pytest fixture that returns a dictionary response from a failed upload to IPFS via nft.storage (HTTP 403). 60 | 61 | Returns: 62 | FixtureDict: The dictionary response. 63 | """ 64 | return { 65 | "ok": False, 66 | "error": {"name": "HTTP Error", "message": "Token is not valid"}, 67 | } 68 | 69 | 70 | @pytest.fixture 71 | def nft_storage_store_json_internal_server_error() -> FixtureDict: 72 | """Pytest fixture that returns a dictionary response from a failed upload to IPFS via nft.storage (HTTP 500). 73 | 74 | Returns: 75 | FixtureDict: The dictionary response. 76 | """ 77 | return {"ok": False, "error": {"name": "string", "message": "string"}} 78 | 79 | 80 | @pytest.fixture 81 | def nft_storage_fetch_pin_status_successful() -> FixtureDict: 82 | """Pytest fixture that returns a dictionary response from a successful pin status check from nft.storage. 83 | 84 | Returns: 85 | FixtureDict: The dictionary response. 86 | """ 87 | return { 88 | "ok": True, 89 | "value": { 90 | "cid": "bafkreic7xfupwwdiwnzudgi6s6brjunxktdfio4hj4a5tlp2hrou7rnjvy", 91 | "pin": { 92 | "cid": "bafkreic7xfupwwdiwnzudgi6s6brjunxktdfio4hj4a5tlp2hrou7rnjvy", 93 | "created": "2024-01-29T09:15:48.637+00:00", 94 | "size": 58, 95 | "status": "pinned", 96 | }, 97 | "deals": [ 98 | { 99 | "status": "active", 100 | "lastChanged": "2024-01-30T00:30:04.385474+00:00", 101 | "chainDealID": 70754247, 102 | "datamodelSelector": "Links/224/Hash/Links/23/Hash/Links/0/Hash", 103 | "statusText": None, 104 | "dealActivation": "2024-02-01T20:28:00+00:00", 105 | "dealExpiration": "2025-07-17T20:28:00+00:00", 106 | "miner": "f020378", 107 | "pieceCid": "baga6ea4seaqe5zxp37xbig2veyqbp5e2ce7jzqrptwxgj6ys3echq56vnaeggga", 108 | "batchRootCid": "bafybeihcgb5rwrkde6zf3bn2xrvr7ytfvtu3g6yrhez6sq5pjw5nkrf2m4", 109 | } 110 | ], 111 | }, 112 | } 113 | 114 | 115 | @pytest.fixture 116 | def nft_storage_fetch_pin_status_not_found() -> FixtureDict: 117 | """Pytest fixture that returns a dictionary response from a failed pin status check from nft.storage (HTTP 404). 118 | 119 | Returns: 120 | FixtureDict: The dictionary response. 121 | """ 122 | return {"ok": False, "error": {"name": "string", "message": "string"}} 123 | 124 | 125 | @pytest.fixture 126 | def nft_storage_fetch_pin_status_internal_server_error() -> FixtureDict: 127 | """Pytest fixture that returns a dictionary response from a failed pin status check from nft.storage (HTTP 500). 128 | 129 | Returns: 130 | FixtureDict: The dictionary response. 131 | """ 132 | return {"ok": False, "error": {"name": "string", "message": "string"}} 133 | -------------------------------------------------------------------------------- /tests/test_ipfs/test_client_base.py: -------------------------------------------------------------------------------- 1 | """Tests the IpfsClient abstract base class.""" 2 | 3 | from dataclasses import dataclass 4 | from typing import Self 5 | 6 | import httpx 7 | import pytest 8 | 9 | from algobase.choices import ( 10 | IpfsPinStatus, 11 | IpfsPinStatusChoice, 12 | IpfsProvider, 13 | IpfsProviderChoice, 14 | ) 15 | from algobase.ipfs.client_base import IpfsClient 16 | from algobase.settings import Settings 17 | 18 | 19 | class TestIpfsClient: 20 | """Tests the IpfsClient abstract base class.""" 21 | 22 | @dataclass 23 | class Client(IpfsClient): 24 | """Concrete implementation of the IpfsClient abstract base class.""" 25 | 26 | @classmethod 27 | def from_settings(cls, settings: Settings) -> Self: 28 | """Create an instance of the IPFS client from a settings object.""" 29 | return cls() 30 | 31 | @property 32 | def ipfs_provider_name(self) -> IpfsProviderChoice: 33 | """The name of the IPFS provider.""" 34 | return IpfsProvider.NFT_STORAGE 35 | 36 | @property 37 | def api_version(self) -> str: 38 | """The version of the IPFS provider's API.""" 39 | return "1.0" 40 | 41 | @property 42 | def base_url(self) -> httpx.URL: 43 | """The base URL of the IPFS provider's API.""" 44 | return httpx.URL("https://api.nft.storage") 45 | 46 | @property 47 | def is_api_key_required(self) -> bool: 48 | """Whether the IPFS provider requires an API key.""" 49 | return True 50 | 51 | @property 52 | def api_key(self) -> str | None: 53 | """The API key.""" 54 | return "test_api_key" 55 | 56 | def store_json(self, json: str | bytes) -> str: 57 | """Stores JSON data in IPFS. 58 | 59 | Args: 60 | json (str | bytes): The JSON to store. 61 | 62 | Returns: 63 | str: The IPFS CID of the stored data. 64 | """ 65 | return "some_cid" 66 | 67 | def fetch_pin_status(self, cid: str) -> IpfsPinStatusChoice: 68 | """Returns the pinning status of a file, by CID. 69 | 70 | Args: 71 | cid (str): The CID of the file to check. 72 | 73 | Returns: 74 | IpfsPinStatusChoice: The status of the CID. 75 | """ 76 | return IpfsPinStatus.PINNED 77 | 78 | @pytest.mark.parametrize( 79 | "attribute, value", 80 | [ 81 | ("api_version", "1.0"), 82 | ("base_url", "https://api.nft.storage"), 83 | ("is_api_key_required", True), 84 | ("ipfs_provider_name", IpfsProvider.NFT_STORAGE), 85 | ("api_key", "test_api_key"), 86 | ], 87 | ) 88 | def test_properties( 89 | self, 90 | attribute: str, 91 | value: str | bool | IpfsProviderChoice, 92 | ) -> None: 93 | """Test that the client has the required abstract properties.""" 94 | client = self.Client() 95 | assert getattr(client, attribute) == value 96 | 97 | def test_api_key_missing(self) -> None: 98 | """Test that the client raises an error if the API key is missing.""" 99 | 100 | class Missing(self.Client): # type: ignore 101 | api_key = None 102 | 103 | with pytest.raises(ValueError): 104 | Missing() 105 | -------------------------------------------------------------------------------- /tests/test_ipfs/test_nft_storage.py: -------------------------------------------------------------------------------- 1 | """Tests the nft.storage IPFS.""" 2 | 3 | from functools import reduce 4 | 5 | import httpx 6 | import pytest 7 | from _pytest.monkeypatch import MonkeyPatch 8 | from pytest_httpx import HTTPXMock 9 | 10 | from algobase.choices import IpfsProvider, IpfsProviderChoice 11 | from algobase.ipfs.nft_storage import NftStorage 12 | from algobase.settings import Settings 13 | from tests.types import FixtureDict 14 | 15 | 16 | class TestNftStorage: 17 | """Tests the NftStorage client class.""" 18 | 19 | def test_from_settings_constructor(self) -> None: 20 | """Test that the client can be created from a settings object.""" 21 | settings = Settings() 22 | settings.nft_storage_api_key = "test_api_key" 23 | test_client = NftStorage.from_settings(settings) 24 | assert isinstance(test_client, NftStorage) 25 | 26 | @pytest.mark.parametrize( 27 | "attribute, value", 28 | [ 29 | ("api_version", "1.0"), 30 | ("base_url", "https://api.nft.storage"), 31 | ("is_api_key_required", True), 32 | ("ipfs_provider_name", IpfsProvider.NFT_STORAGE), 33 | ("api_key", "test_api_key"), 34 | ], 35 | ) 36 | def test_properties( 37 | self, 38 | attribute: str, 39 | value: str | bool | IpfsProviderChoice, 40 | ) -> None: 41 | """Test that the client has the required abstract properties.""" 42 | test_client = NftStorage(_api_key="test_api_key") 43 | assert getattr(test_client, attribute) == value 44 | 45 | def test_api_key_missing(self, monkeypatch: MonkeyPatch) -> None: 46 | """Test that the client raises an error if the API key is missing.""" 47 | with pytest.raises(ValueError): 48 | NftStorage(_api_key=None) 49 | 50 | def test_store_json_successful( 51 | self, 52 | httpx_mock: HTTPXMock, 53 | nft_storage_store_json_successful: FixtureDict, 54 | ) -> None: 55 | """Test that a CID is returned when JSON is successfully stored in IPFS (response is mocked).""" 56 | httpx_mock.add_response(json=nft_storage_store_json_successful) 57 | test_client = NftStorage(_api_key="test_api_key") 58 | assert ( 59 | test_client.store_json( 60 | json='{"integer": 123, "boolean": true, "list": ["a", "b", "c"]}' 61 | ) 62 | == "bafkreic7xfupwwdiwnzudgi6s6brjunxktdfio4hj4a5tlp2hrou7rnjvy" 63 | ) 64 | 65 | @pytest.mark.parametrize( 66 | "keys, value", 67 | [ 68 | (["ok"], False), 69 | (["ok"], None), 70 | (["value", "cid"], None), 71 | ], 72 | ) 73 | def test_store_json_cid_is_none( 74 | self, 75 | httpx_mock: HTTPXMock, 76 | nft_storage_store_json_successful: FixtureDict, 77 | keys: list[str], 78 | value: bool | None, 79 | ) -> None: 80 | """Test that an error is raise when a 200 response is returned but "ok" is False or "cid" is None (response is mocked).""" 81 | response_dict = nft_storage_store_json_successful 82 | reduce(dict.__getitem__, keys[:-1], response_dict)[keys[-1]] = value 83 | 84 | httpx_mock.add_response(json=response_dict) 85 | 86 | test_client = NftStorage(_api_key="test_api_key") 87 | with pytest.raises(httpx.HTTPError): 88 | test_client.store_json( 89 | json='{"integer": 123, "boolean": true, "list": ["a", "b", "c"]}' 90 | ) 91 | 92 | def test_nft_storage_store_json_bad_request( 93 | self, 94 | httpx_mock: HTTPXMock, 95 | nft_storage_store_json_bad_request: FixtureDict, 96 | ) -> None: 97 | """Test that an error is raised when a 400 response is returned (response is mocked).""" 98 | httpx_mock.add_response( 99 | json=nft_storage_store_json_bad_request, status_code=400 100 | ) 101 | 102 | test_client = NftStorage(_api_key="test_api_key") 103 | with pytest.raises(httpx.HTTPError): 104 | test_client.store_json( 105 | json='{"integer": 123, "boolean": true, "list": ["a", "b", "c"]}' 106 | ) 107 | 108 | def test_nft_storage_store_json_unauthorized( 109 | self, 110 | httpx_mock: HTTPXMock, 111 | nft_storage_store_json_unauthorized: FixtureDict, 112 | ) -> None: 113 | """Test that an error is raised when a 401 response is returned (response is mocked).""" 114 | httpx_mock.add_response( 115 | json=nft_storage_store_json_unauthorized, status_code=401 116 | ) 117 | 118 | test_client = NftStorage(_api_key="test_api_key") 119 | with pytest.raises(httpx.HTTPError): 120 | test_client.store_json( 121 | json='{"integer": 123, "boolean": true, "list": ["a", "b", "c"]}' 122 | ) 123 | 124 | def test_nft_storage_store_json_forbidden( 125 | self, 126 | httpx_mock: HTTPXMock, 127 | nft_storage_store_json_forbidden: FixtureDict, 128 | ) -> None: 129 | """Test that an error is raised when a 403 response is returned (response is mocked).""" 130 | httpx_mock.add_response(json=nft_storage_store_json_forbidden, status_code=403) 131 | 132 | test_client = NftStorage(_api_key="test_api_key") 133 | with pytest.raises(httpx.HTTPError): 134 | test_client.store_json( 135 | json='{"integer": 123, "boolean": true, "list": ["a", "b", "c"]}' 136 | ) 137 | 138 | def test_nft_storage_store_json_internal_server_error( 139 | self, 140 | httpx_mock: HTTPXMock, 141 | nft_storage_store_json_internal_server_error: FixtureDict, 142 | ) -> None: 143 | """Test that an error is raised when a 500 response is returned (response is mocked).""" 144 | httpx_mock.add_response( 145 | json=nft_storage_store_json_internal_server_error, status_code=500 146 | ) 147 | 148 | test_client = NftStorage(_api_key="test_api_key") 149 | with pytest.raises(httpx.HTTPError): 150 | test_client.store_json( 151 | json='{"integer": 123, "boolean": true, "list": ["a", "b", "c"]}' 152 | ) 153 | 154 | def test_fetch_pin_status_successful( 155 | self, 156 | httpx_mock: HTTPXMock, 157 | nft_storage_fetch_pin_status_successful: FixtureDict, 158 | ) -> None: 159 | """Test that a pin status is returned when a pin status is successfully checked from nft.storage (response is mocked).""" 160 | httpx_mock.add_response(json=nft_storage_fetch_pin_status_successful) 161 | 162 | test_client = NftStorage(_api_key="test_api_key") 163 | assert ( 164 | test_client.fetch_pin_status( 165 | cid="bafkreic7xfupwwdiwnzudgi6s6brjunxktdfio4hj4a5tlp2hrou7rnjvy" 166 | ) 167 | == "pinned" 168 | ) 169 | 170 | @pytest.mark.parametrize( 171 | "keys, value", 172 | [ 173 | (["ok"], False), 174 | (["ok"], None), 175 | (["value", "pin", "status"], None), 176 | (["value", "pin", "status"], "invalid_status"), 177 | (["value", "pin", "status"], "queueing"), 178 | ], 179 | ) 180 | def test_fetch_pin_status_invalid_status_or_none( 181 | self, 182 | httpx_mock: HTTPXMock, 183 | nft_storage_fetch_pin_status_successful: FixtureDict, 184 | keys: list[str], 185 | value: bool | None, 186 | ) -> None: 187 | """Test that an error is raise when a 200 response is returned but "ok" is False or pin status is None or invalid. (response is mocked).""" 188 | response_dict = nft_storage_fetch_pin_status_successful 189 | reduce(dict.__getitem__, keys[:-1], response_dict)[keys[-1]] = value 190 | 191 | httpx_mock.add_response(json=response_dict) 192 | 193 | test_client = NftStorage(_api_key="test_api_key") 194 | with pytest.raises(httpx.HTTPError): 195 | test_client.fetch_pin_status( 196 | cid="bafkreic7xfupwwdiwnzudgi6s6brjunxktdfio4hj4a5tlp2hrou7rnjvy" 197 | ) 198 | 199 | def test_fetch_pin_status_not_found( 200 | self, 201 | httpx_mock: HTTPXMock, 202 | nft_storage_fetch_pin_status_not_found: FixtureDict, 203 | ) -> None: 204 | """Test that an error is raised when a 400 response is returned (response is mocked).""" 205 | httpx_mock.add_response( 206 | json=nft_storage_fetch_pin_status_not_found, status_code=400 207 | ) 208 | 209 | test_client = NftStorage(_api_key="test_api_key") 210 | with pytest.raises(httpx.HTTPError): 211 | test_client.fetch_pin_status(cid="0") 212 | 213 | def test_fetch_pin_status_internal_server_error( 214 | self, 215 | httpx_mock: HTTPXMock, 216 | nft_storage_fetch_pin_status_internal_server_error: FixtureDict, 217 | ) -> None: 218 | """Test that an error is raised when a 500 response is returned (response is mocked).""" 219 | httpx_mock.add_response( 220 | json=nft_storage_fetch_pin_status_internal_server_error, status_code=500 221 | ) 222 | 223 | test_client = NftStorage(_api_key="test_api_key") 224 | with pytest.raises(httpx.HTTPError): 225 | test_client.fetch_pin_status(cid="0") 226 | -------------------------------------------------------------------------------- /tests/test_models/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for algobase.""" 2 | -------------------------------------------------------------------------------- /tests/test_models/conftest.py: -------------------------------------------------------------------------------- 1 | """Pytest fixtures for the models tests.""" 2 | 3 | from copy import deepcopy 4 | 5 | import pytest 6 | 7 | from tests.types import FixtureDict 8 | 9 | arc3_metadata = { 10 | "name": "My Song", 11 | "decimals": 0, 12 | "description": "My first and best song!", 13 | "image": "https://s3.amazonaws.com/your-bucket/song/cover/mysong.png", 14 | "image_integrity": "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=", 15 | "image_mimetype": "image/png", 16 | "background_color": "FFFFFF", 17 | "external_url": "https://mysongs.com/song/mysong", 18 | "external_url_integrity": "sha256-7IGatqxLhUYkruDsEva52Ku43up6774yAmf0k98MXnU=", 19 | "external_url_mimetype": "text/html", 20 | "animation_url": "https://s3.amazonaws.com/your-bucket/song/preview/mysong.ogg", 21 | "animation_url_integrity": "sha256-LwArA6xMdnFF3bvQjwODpeTG/RVn61weQSuoRyynA1I=", 22 | "animation_url_mimetype": "audio/ogg", 23 | "properties": { 24 | "traits": { 25 | "background": "red", 26 | "shirt_color": "blue", 27 | "glasses": "none", 28 | "tattoos": 4, 29 | }, 30 | "simple_property": "example value", 31 | "rich_property": { 32 | "name": "Name", 33 | "value": "123", 34 | "display_value": "123 Example Value", 35 | "class": "emphasis", 36 | "css": { 37 | "color": "#ffffff", 38 | "font-weight": "bold", 39 | "text-decoration": "underline", 40 | }, 41 | }, 42 | "valid_types": { 43 | "string": "Name", 44 | "int": 1, 45 | "float": 3.14, 46 | "list": ["a", "b", "c"], 47 | }, 48 | "array_property": { 49 | "name": "Name", 50 | "value": [1, 2, 3, 4], 51 | "class": "emphasis", 52 | }, 53 | }, 54 | "extra_metadata": "iHcUslDaL/jEM/oTxqEX++4CS8o3+IZp7/V5Rgchqwc=", 55 | "localization": { 56 | "uri": "ipfs://QmWS1VAdMD353A6SDk9wNyvkT14kyCiZrNDYAad4w1tKqT/{locale}.json", 57 | "default": "en", 58 | "locales": ["en", "es", "fr"], 59 | "integrity": { 60 | "es": "sha256-T0UofLOqdamWQDLok4vy/OcetEFzD8dRLig4229138Y=", 61 | "fr": "sha256-UUM89QQlXRlerdzVfatUzvNrEI/gwsgsN/lGkR13CKw=", 62 | }, 63 | }, 64 | } 65 | 66 | 67 | @pytest.fixture 68 | def arc3_metadata_fixture() -> FixtureDict: 69 | """Pytest fixture for a dictionary containing valid ARC-3 metadata. 70 | 71 | Returns: 72 | FixtureDict: The dictionary of valid ARC-3 metadata. 73 | """ 74 | return deepcopy(arc3_metadata) 75 | 76 | 77 | @pytest.fixture 78 | def arc3_metadata_with_extra_metadata() -> FixtureDict: 79 | """Pytest fixture for a dictionary containing valid ARC-3 metadata. 80 | 81 | This is the 'extra metadata' example from the ARC-3 spec: 82 | https://github.com/algorandfoundation/ARCs/blob/main/ARCs/arc-0003.md 83 | 84 | Returns: 85 | FixtureDict: he dictionary of valid ARC-3 metadata 86 | """ 87 | return { 88 | "name": "My Picture", 89 | "description": "Lorem ipsum...", 90 | "image": "https://s3.amazonaws.com/your-bucket/images/{id}.png", 91 | "image_integrity": "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=", 92 | "image_mimetype": "image/png", 93 | "external_url": "https://mysongs.com/song/{id}", 94 | "extra_metadata": "iHcUslDaL/jEM/oTxqEX++4CS8o3+IZp7/V5Rgchqwc=", 95 | } 96 | 97 | 98 | @pytest.fixture 99 | def asa_nft_fixture() -> FixtureDict: 100 | """Pytest fixture for a dictionary containing valid ASA data. 101 | 102 | Returns: 103 | FixtureDict: The dictionary of valid ASA data. 104 | """ 105 | metadata = deepcopy(arc3_metadata) 106 | metadata["arc"] = "arc3" 107 | return { 108 | "asset_params": { 109 | "total": 1, 110 | "decimals": 0, 111 | "default_frozen": False, 112 | "unit_name": "USDT", 113 | "asset_name": "My Song", 114 | "url": "https://tether.to/#arc3", 115 | "metadata_hash": b"fACPO4nRgO55j1ndAK3W6Sgc4APkcyFh", 116 | "manager": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 117 | "reserve": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 118 | "freeze": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 119 | "clawback": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 120 | }, 121 | "metadata": metadata, 122 | } 123 | 124 | 125 | @pytest.fixture 126 | def asa_arc19_nft_fixture() -> FixtureDict: 127 | """Pytest fixture for a dictionary containing valid ARC-19 ASA data. 128 | 129 | Returns: 130 | FixtureDict: The dictionary of valid ASA data. 131 | """ 132 | metadata = deepcopy(arc3_metadata) 133 | metadata["arc"] = "arc19" 134 | return { 135 | "asset_params": { 136 | "total": 1, 137 | "decimals": 0, 138 | "default_frozen": False, 139 | "unit_name": "USDT", 140 | "asset_name": "My Song", 141 | "url": "template-ipfs://{ipfscid:0:dag-pb:reserve:sha2-256}/arc3.json", 142 | "metadata_hash": b"fACPO4nRgO55j1ndAK3W6Sgc4APkcyFh", 143 | "manager": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 144 | "reserve": "EEQYWGGBHRDAMTEVDPVOSDVX3HJQIG6K6IVNR3RXHYOHV64ZWAEISS4CTI", # CID encoded as address 145 | "freeze": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 146 | "clawback": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 147 | }, 148 | "metadata": metadata, 149 | } 150 | 151 | 152 | @pytest.fixture 153 | def asa_nft_extra_metadata_fixture() -> FixtureDict: 154 | """Pytest fixture for a dictionary containing valid ASA data. 155 | 156 | Returns: 157 | FixtureDict: The dictionary of valid ASA data. 158 | """ 159 | return { 160 | "asset_params": { 161 | "total": 1, 162 | "decimals": 0, 163 | "default_frozen": False, 164 | "unit_name": "USDT", 165 | "asset_name": "My Picture", 166 | "url": "https://tether.to/#arc3", 167 | "metadata_hash": b"fACPO4nRgO55j1ndAK3W6Sgc4APkcyFh", 168 | "manager": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 169 | "reserve": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 170 | "freeze": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 171 | "clawback": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 172 | }, 173 | "metadata": { 174 | "arc": "arc3", 175 | "name": "My Picture", 176 | "description": "Lorem ipsum...", 177 | "image": "https://s3.amazonaws.com/your-bucket/images/{id}.png", 178 | "image_integrity": "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=", 179 | "image_mimetype": "image/png", 180 | "external_url": "https://mysongs.com/song/{id}", 181 | "extra_metadata": "iHcUslDaL/jEM/oTxqEX++4CS8o3+IZp7/V5Rgchqwc=", 182 | }, 183 | } 184 | -------------------------------------------------------------------------------- /tests/test_models/test_arc19.py: -------------------------------------------------------------------------------- 1 | """Unit tests for the ARC-19 Pydantic model.""" 2 | 3 | import pytest 4 | from pydantic import ValidationError 5 | 6 | from algobase.choices import Arc 7 | from algobase.models.arc3 import Arc3Metadata 8 | from algobase.models.arc19 import Arc19Metadata 9 | from tests.types import FixtureDict 10 | 11 | 12 | def test_no_arc3_metadata() -> None: 13 | """Test that validation succeeds when no ARC-3 metadata is present.""" 14 | metadata = Arc19Metadata() 15 | assert isinstance(metadata, Arc19Metadata) 16 | assert metadata.arc == Arc.ARC19 17 | assert metadata.arc3_metadata is None 18 | 19 | 20 | def test_arc3_metadata_valid(arc3_metadata_fixture: FixtureDict) -> None: 21 | """Test that validation succeeds when ARC-3 metadata is compliant with ARC-19.""" 22 | test_dict = arc3_metadata_fixture.copy() 23 | test_dict.pop("extra_metadata") 24 | arc3_metadata = Arc3Metadata.model_validate(test_dict) 25 | metadata = Arc19Metadata(arc3_metadata=arc3_metadata) 26 | assert isinstance(metadata, Arc19Metadata) 27 | assert metadata.arc == Arc.ARC19 28 | assert isinstance(metadata.arc3_metadata, Arc3Metadata) 29 | assert metadata.arc3_metadata.arc == Arc.ARC3 30 | 31 | 32 | def test_arc3_metadata_invalid(arc3_metadata_fixture: FixtureDict) -> None: 33 | """Test that validation fails when ARC-3 metadata is not compliant with ARC-19.""" 34 | arc3_metadata = Arc3Metadata.model_validate(arc3_metadata_fixture) 35 | with pytest.raises(ValidationError): 36 | Arc19Metadata(arc3_metadata=arc3_metadata) 37 | -------------------------------------------------------------------------------- /tests/test_models/test_arc3.py: -------------------------------------------------------------------------------- 1 | """Unit tests for the ARC-3 Pydantic models.""" 2 | 3 | import pytest 4 | from pydantic import ValidationError 5 | 6 | from algobase.models.arc3 import Arc3Localization, Arc3Metadata, Arc3Properties 7 | from algobase.types.annotated import ( 8 | Arc3Color, 9 | Arc3LocalizedUrl, 10 | Arc3Sri, 11 | Arc3Url, 12 | Arc16Traits, 13 | AsaDecimals, 14 | Base64Str, 15 | ImageMimeType, 16 | MimeType, 17 | UnicodeLocale, 18 | ) 19 | 20 | 21 | class TestArc3Localization: 22 | """Tests the `Arc3Localization` Pydantic model.""" 23 | 24 | valid_dict = { 25 | "uri": "ipfs://QmWS1VAdMD353A6SDk9wNyvkT14kyCiZrNDYAad4w1tKqT/{locale}.json", 26 | "default": "en", 27 | "locales": ["en", "es", "fr"], 28 | "integrity": { 29 | "es": "sha256-T0UofLOqdamWQDLok4vy/OcetEFzD8dRLig4229138Y=", 30 | "fr": "sha256-UUM89QQlXRlerdzVfatUzvNrEI/gwsgsN/lGkR13CKw=", 31 | }, 32 | } 33 | 34 | def test_valid_dict(self) -> None: 35 | """Test that validation succeeds when passed a valid dictionary.""" 36 | assert Arc3Localization.model_validate(self.valid_dict) 37 | 38 | @pytest.mark.parametrize( 39 | "field, expected_type", 40 | [ 41 | ("uri", Arc3LocalizedUrl), 42 | ("default", UnicodeLocale), 43 | ("locales", list[UnicodeLocale]), 44 | ("integrity", dict[UnicodeLocale, Arc3Sri] | None), 45 | ], 46 | ) 47 | def test_annotated_types(self, field: str, expected_type: type) -> None: 48 | """Test that annotated types are correct.""" 49 | assert ( 50 | Arc3Localization.model_fields[field].rebuild_annotation() == expected_type 51 | ) 52 | 53 | @pytest.mark.parametrize("field", ["uri", "default", "locales"]) 54 | def test_mandatory_fields(self, field: str) -> None: 55 | """Test that validation fails if a mandatory field is missing.""" 56 | test_dict = self.valid_dict.copy() 57 | test_dict.pop(field) 58 | with pytest.raises(ValidationError): 59 | Arc3Localization.model_validate(test_dict) 60 | 61 | @pytest.mark.parametrize( 62 | "field, expected", 63 | [ 64 | ("integrity", None), 65 | ], 66 | ) 67 | def test_default_values(self, field: str, expected: int | bool | None) -> None: 68 | """Test that non-mandatory fields have the correct default values.""" 69 | test_dict = self.valid_dict.copy() 70 | test_dict.pop(field) 71 | assert getattr(Arc3Localization.model_validate(test_dict), field) == expected 72 | 73 | 74 | class TestArc3Properties: 75 | """Tests the `Arc3Properties` Pydantic model.""" 76 | 77 | valid_dict = { 78 | "creator": "Tim Smith", 79 | "created_at": "January 2, 2022", 80 | "rich_property": { 81 | "name": "Name", 82 | "value": "123", 83 | "display_value": "123 Example Value", 84 | "class": "emphasis", 85 | "css": { 86 | "color": "#ffffff", 87 | "font-weight": "bold", 88 | "text-decoration": "underline", 89 | }, 90 | }, 91 | "traits": { 92 | "background": "red", 93 | "shirt_color": "blue", 94 | "glasses": "none", 95 | "tattoos": 4, 96 | }, 97 | } 98 | 99 | def test_valid_dict(self) -> None: 100 | """Test that validation succeeds when passed a valid dictionary.""" 101 | assert Arc3Properties.model_validate(self.valid_dict) 102 | 103 | @pytest.mark.parametrize( 104 | "field, expected_type", 105 | [ 106 | ("traits", Arc16Traits | None), 107 | ], 108 | ) 109 | def test_annotated_types(self, field: str, expected_type: type) -> None: 110 | """Test that annotated types are correct.""" 111 | assert Arc3Properties.model_fields[field].rebuild_annotation() == expected_type 112 | 113 | @pytest.mark.parametrize( 114 | "field, expected", 115 | [ 116 | ("traits", None), 117 | ], 118 | ) 119 | def test_default_values(self, field: str, expected: int | bool | None) -> None: 120 | """Test that non-mandatory fields have the correct default values.""" 121 | test_dict = self.valid_dict.copy() 122 | test_dict.pop(field) 123 | assert getattr(Arc3Properties.model_validate(test_dict), field) == expected 124 | 125 | 126 | class TestArc3Metadata: 127 | """Tests the `Arc3Metadata` Pydantic model.""" 128 | 129 | valid_dict = { 130 | "name": "My Song", 131 | "decimals": 1, 132 | "description": "My first and best song!", 133 | "image": "https://s3.amazonaws.com/your-bucket/song/cover/mysong.png", 134 | "image_integrity": "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=", 135 | "image_mimetype": "image/png", 136 | "background_color": "FFFFFF", 137 | "external_url": "https://mysongs.com/song/mysong", 138 | "external_url_integrity": "sha256-7IGatqxLhUYkruDsEva52Ku43up6774yAmf0k98MXnU=", 139 | "external_url_mimetype": "text/html", 140 | "animation_url": "https://s3.amazonaws.com/your-bucket/song/preview/mysong.ogg", 141 | "animation_url_integrity": "sha256-LwArA6xMdnFF3bvQjwODpeTG/RVn61weQSuoRyynA1I=", 142 | "animation_url_mimetype": "audio/ogg", 143 | "properties": { 144 | "simple_property": "example value", 145 | "rich_property": { 146 | "name": "Name", 147 | "value": "123", 148 | "display_value": "123 Example Value", 149 | "class": "emphasis", 150 | "css": { 151 | "color": "#ffffff", 152 | "font-weight": "bold", 153 | "text-decoration": "underline", 154 | }, 155 | }, 156 | "valid_types": { 157 | "string": "Name", 158 | "int": 1, 159 | "float": 3.14, 160 | "list": ["a", "b", "c"], 161 | }, 162 | "array_property": { 163 | "name": "Name", 164 | "value": [1, 2, 3, 4], 165 | "class": "emphasis", 166 | }, 167 | "traits": { 168 | "background": "red", 169 | "shirt_color": "blue", 170 | "glasses": "none", 171 | "tattoos": 4, 172 | }, 173 | }, 174 | "extra_metadata": "iHcUslDaL/jEM/oTxqEX++4CS8o3+IZp7/V5Rgchqwc=", 175 | "localization": { 176 | "uri": "ipfs://QmWS1VAdMD353A6SDk9wNyvkT14kyCiZrNDYAad4w1tKqT/{locale}.json", 177 | "default": "en", 178 | "locales": ["en", "es", "fr"], 179 | "integrity": { 180 | "es": "sha256-T0UofLOqdamWQDLok4vy/OcetEFzD8dRLig4229138Y=", 181 | "fr": "sha256-UUM89QQlXRlerdzVfatUzvNrEI/gwsgsN/lGkR13CKw=", 182 | }, 183 | }, 184 | } 185 | 186 | def test_valid_dict(self): 187 | """Test that validation succeeds when passed a valid dictionary.""" 188 | assert Arc3Metadata.model_validate(self.valid_dict) 189 | 190 | @pytest.mark.parametrize( 191 | "field, expected_type", 192 | [ 193 | ("decimals", AsaDecimals | None), 194 | ("image", Arc3Url | None), 195 | ("image_integrity", Arc3Sri | None), 196 | ("image_mimetype", ImageMimeType | None), 197 | ("background_color", Arc3Color | None), 198 | ("external_url", Arc3Url | None), 199 | ("external_url_integrity", Arc3Sri | None), 200 | ("animation_url", Arc3Url | None), 201 | ("animation_url_integrity", Arc3Sri | None), 202 | ("animation_url_mimetype", MimeType | None), 203 | ("properties", Arc3Properties | None), 204 | ("extra_metadata", Base64Str | None), 205 | ("localization", Arc3Localization | None), 206 | ], 207 | ) 208 | def test_annotated_types(self, field: str, expected_type: type) -> None: 209 | """Test that annotated types are correct.""" 210 | assert Arc3Metadata.model_fields[field].rebuild_annotation() == expected_type 211 | 212 | @pytest.mark.parametrize( 213 | "field, expected", 214 | [ 215 | ("name", None), 216 | ("decimals", None), 217 | ("description", None), 218 | ("image", None), 219 | ("image_integrity", None), 220 | ("image_mimetype", None), 221 | ("background_color", None), 222 | ("external_url", None), 223 | ("external_url_integrity", None), 224 | ("external_url_mimetype", None), 225 | ("animation_url", None), 226 | ("animation_url_integrity", None), 227 | ("animation_url_mimetype", None), 228 | ("properties", None), 229 | ("extra_metadata", None), 230 | ("localization", None), 231 | ], 232 | ) 233 | def test_default_values(self, field: str, expected: int | bool | None) -> None: 234 | """Test that non-mandatory fields have the correct default values.""" 235 | test_dict = self.valid_dict.copy() 236 | test_dict.pop(field) 237 | assert getattr(Arc3Metadata.model_validate(test_dict), field) == expected 238 | -------------------------------------------------------------------------------- /tests/test_models/test_asset_params.py: -------------------------------------------------------------------------------- 1 | """Unit tests for the AssetParams Pydantic model.""" 2 | 3 | 4 | from types import SimpleNamespace 5 | 6 | import pytest 7 | from pydantic import ValidationError 8 | 9 | from algobase.models.asset_params import AssetParams 10 | from algobase.types.annotated import ( 11 | AlgorandAddress, 12 | AlgorandHash, 13 | AsaAssetName, 14 | AsaDecimals, 15 | AsaUnitName, 16 | AsaUrl, 17 | Uint64, 18 | ) 19 | 20 | 21 | class TestAssetParams: 22 | """Tests the `AssetParams` Pydantic model.""" 23 | 24 | valid_dict = { 25 | "total": 1, 26 | "decimals": 0, 27 | "default_frozen": False, 28 | "unit_name": "USDT", 29 | "asset_name": "Tether", 30 | "url": "https://tether.to/", 31 | "metadata_hash": b"fACPO4nRgO55j1ndAK3W6Sgc4APkcyFh", 32 | "manager": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 33 | "reserve": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 34 | "freeze": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 35 | "clawback": "7ZUECA7HFLZTXENRV24SHLU4AVPUTMTTDUFUBNBD64C73F3UHRTHAIOF6Q", 36 | } 37 | 38 | def test_valid_dict(self) -> None: 39 | """Test that validation succeeds when passed a valid dictionary.""" 40 | assert AssetParams.model_validate(self.valid_dict) 41 | 42 | @pytest.mark.parametrize( 43 | "field, expected_type", 44 | [ 45 | ("total", Uint64), 46 | ("decimals", AsaDecimals), 47 | ("unit_name", AsaUnitName | None), 48 | ("asset_name", AsaAssetName | None), 49 | ("url", AsaUrl | None), 50 | ("metadata_hash", AlgorandHash | None), 51 | ("manager", AlgorandAddress | None), 52 | ("reserve", AlgorandAddress | None), 53 | ("freeze", AlgorandAddress | None), 54 | ("clawback", AlgorandAddress | None), 55 | ], 56 | ) 57 | def test_annotated_types(self, field: str, expected_type: type) -> None: 58 | """Test that annotated types are correct.""" 59 | assert AssetParams.model_fields[field].rebuild_annotation() == expected_type 60 | 61 | @pytest.mark.parametrize("field", ["total"]) 62 | def test_mandatory_fields(self, field: str) -> None: 63 | """Test that validation fails if a mandatory field is missing.""" 64 | test_dict = self.valid_dict.copy() 65 | test_dict.pop(field) 66 | with pytest.raises(ValidationError): 67 | AssetParams.model_validate(test_dict) 68 | 69 | @pytest.mark.parametrize( 70 | "field, expected", 71 | [ 72 | ("decimals", 0), 73 | ("default_frozen", False), 74 | ("unit_name", None), 75 | ("asset_name", None), 76 | ("url", None), 77 | ("metadata_hash", None), 78 | ("manager", None), 79 | ("reserve", None), 80 | ("freeze", None), 81 | ("clawback", None), 82 | ], 83 | ) 84 | def test_default_values(self, field: str, expected: int | bool | None) -> None: 85 | """Test that non-mandatory fields have the correct default values.""" 86 | test_dict = self.valid_dict.copy() 87 | test_dict.pop(field) 88 | assert getattr(AssetParams.model_validate(test_dict), field) == expected 89 | 90 | @pytest.mark.parametrize("x", [1.0, "1"]) 91 | def test_total_invalid_strict(self, x: float | str) -> None: 92 | """Test that `total` raises an error in strict mode if passed a float or a string.""" 93 | test_dict = self.valid_dict.copy() 94 | test_dict["total"] = x 95 | with pytest.raises(ValidationError): 96 | AssetParams.model_validate(test_dict, strict=True) 97 | 98 | @pytest.mark.parametrize("x, expected", [(1.0, 1), ("1", 1)]) 99 | def test_total_valid_non_strict_coerced( 100 | self, x: float | str, expected: int 101 | ) -> None: 102 | """Test that `total` does not raise an error in non-strict mode if passed a float or a string.""" 103 | test_dict = self.valid_dict.copy() 104 | test_dict["total"] = x 105 | assert AssetParams.model_validate(test_dict, strict=False).total == expected 106 | 107 | @pytest.mark.parametrize("x", [1, 1.0, "True", "true"]) 108 | def test_default_frozen_invalid_strict(self, x: int | float | str) -> None: 109 | """Test that `default_frozen` raises an error in strict mode if passed a non-boolean type.""" 110 | test_dict = self.valid_dict.copy() 111 | test_dict["default_frozen"] = x 112 | with pytest.raises(ValidationError): 113 | AssetParams.model_validate(test_dict, strict=True) 114 | 115 | @pytest.mark.parametrize( 116 | "x, expected", 117 | [ 118 | (1, True), 119 | (1.0, True), 120 | ("True", True), 121 | ("true", True), 122 | (0, False), 123 | (0.0, False), 124 | ("False", False), 125 | ("false", False), 126 | ], 127 | ) 128 | def test_default_frozen_non_strict( 129 | self, x: int | float | str, expected: bool 130 | ) -> None: 131 | """Test that `default_frozen` does not raise an error in non-strict mode if passed a valid non-boolean type value.""" 132 | test_dict = self.valid_dict.copy() 133 | test_dict["default_frozen"] = x 134 | assert ( 135 | AssetParams.model_validate(test_dict, strict=False).default_frozen 136 | == expected 137 | ) 138 | 139 | def test_from_algod(self) -> None: 140 | """Tests the `from_algod` class method.""" 141 | algod_client = SimpleNamespace() 142 | algod_client.asset_info = lambda _: { 143 | "asset": { 144 | "created-at-round": 8874561, 145 | "deleted": False, 146 | "index": 31566704, 147 | "params": { 148 | "clawback": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAY5HFKQ", 149 | "creator": "2UEQTE5QDNXPI7M3TU44G6SYKLFWLPQO7EBZM7K7MHMQQMFI4QJPLHQFHM", 150 | "decimals": 6, 151 | "default-frozen": False, 152 | "freeze": "3ERES6JFBIJ7ZPNVQJNH2LETCBQWUPGTO4ROA6VFUR25WFSYKGX3WBO5GE", 153 | "manager": "37XL3M57AXBUJARWMT5R7M35OERXMH3Q22JMMEFLBYNDXXADGFN625HAL4", 154 | "metadata-hash": "MWQ3NWYwNGYwZmE5NDA3MDkxOWZkZDNlY2FhMmM1ZmQ=", 155 | "name": "USDC", 156 | "name-b64": "VVNEQw==", 157 | "reserve": "2UEQTE5QDNXPI7M3TU44G6SYKLFWLPQO7EBZM7K7MHMQQMFI4QJPLHQFHM", 158 | "total": 18446744073709551615, 159 | "unit-name": "USDC", 160 | "unit-name-b64": "VVNEQw==", 161 | "url": "https://www.centre.io/usdc", 162 | "url-b64": "aHR0cHM6Ly93d3cuY2VudHJlLmlvL3VzZGM=", 163 | }, 164 | }, 165 | "current-round": 41738357, 166 | } 167 | asset_params = AssetParams.from_algod(algod_client, 31566704) # type: ignore[arg-type] 168 | 169 | assert asset_params.unit_name == "USDC" 170 | assert asset_params.asset_name == "USDC" 171 | assert asset_params.decimals == 6 172 | 173 | algod_client.asset_info = lambda _: { 174 | "index": 31566704, 175 | "params": { 176 | "creator": "2UEQTE5QDNXPI7M3TU44G6SYKLFWLPQO7EBZM7K7MHMQQMFI4QJPLHQFHM", 177 | "decimals": 6, 178 | "default-frozen": False, 179 | "freeze": "3ERES6JFBIJ7ZPNVQJNH2LETCBQWUPGTO4ROA6VFUR25WFSYKGX3WBO5GE", 180 | "manager": "37XL3M57AXBUJARWMT5R7M35OERXMH3Q22JMMEFLBYNDXXADGFN625HAL4", 181 | "name": "USDC", 182 | "name-b64": "VVNEQw==", 183 | "reserve": "2UEQTE5QDNXPI7M3TU44G6SYKLFWLPQO7EBZM7K7MHMQQMFI4QJPLHQFHM", 184 | "total": 18446744073709551615, 185 | "unit-name": "USDC", 186 | "unit-name-b64": "VVNEQw==", 187 | "url": "https://www.centre.io/usdc", 188 | "url-b64": "aHR0cHM6Ly93d3cuY2VudHJlLmlvL3VzZGM=", 189 | }, 190 | } 191 | asset_params = AssetParams.from_algod(algod_client, 31566704) # type: ignore[arg-type] 192 | 193 | assert asset_params.unit_name == "USDC" 194 | assert asset_params.asset_name == "USDC" 195 | assert asset_params.decimals == 6 196 | 197 | asset_params = AssetParams.from_algod(algod_client, 0) # type: ignore[arg-type] 198 | 199 | assert asset_params.unit_name == "ALGO" 200 | assert asset_params.asset_name == "ALGO" 201 | assert asset_params.decimals == 6 202 | -------------------------------------------------------------------------------- /tests/test_settings.py: -------------------------------------------------------------------------------- 1 | """Tests for the `Settings` class.""" 2 | 3 | from algobase.settings import Settings 4 | 5 | 6 | class TestSettings: 7 | """Tests for the `Settings` class.""" 8 | 9 | def test_settings(self) -> None: 10 | """Test that the settings are loaded correctly.""" 11 | 12 | def callable(settings: Settings) -> bool: 13 | """Function to test that a settings object can be piped to a callable. 14 | 15 | Args: 16 | settings (Settings): The settings object. 17 | 18 | Returns: 19 | bool: True if the object passed is a settings object. 20 | """ 21 | return isinstance(settings, Settings) 22 | 23 | settings = Settings() 24 | assert settings | callable 25 | -------------------------------------------------------------------------------- /tests/test_types/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for algobase.""" 2 | -------------------------------------------------------------------------------- /tests/test_utils/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for algobase.""" 2 | -------------------------------------------------------------------------------- /tests/test_utils/test_cid.py: -------------------------------------------------------------------------------- 1 | """Tests for the CID utility functions.""" 2 | 3 | from algobase.utils.cid import cid_to_algorand_address 4 | 5 | 6 | def test_cid_to_algorand_address() -> None: 7 | """Test the cid_to_algorand_address() function.""" 8 | # Test case comes from: https://github.com/algorandfoundation/ARCs/blob/main/ARCs/arc-0019.md 9 | assert ( 10 | cid_to_algorand_address("QmQZyq4b89RfaUw8GESPd2re4hJqB8bnm4kVHNtyQrHnnK") 11 | == "EEQYWGGBHRDAMTEVDPVOSDVX3HJQIG6K6IVNR3RXHYOHV64ZWAEISS4CTI" 12 | ) 13 | -------------------------------------------------------------------------------- /tests/test_utils/test_hash.py: -------------------------------------------------------------------------------- 1 | """Unit tests for the hash functions.""" 2 | 3 | import pytest 4 | 5 | from algobase.utils.hash import sha256, sha512_256 6 | 7 | 8 | @pytest.mark.parametrize( 9 | "data, expected_digest", 10 | [ 11 | ( 12 | b"hello", 13 | b",\xf2M\xba_\xb0\xa3\x0e&\xe8;*\xc5\xb9\xe2\x9e\x1b\x16\x1e\\\x1f\xa7B^s\x043b\x93\x8b\x98$", 14 | ), 15 | ( 16 | b"world", 17 | b"Hn\xa4b$\xd1\xbbO\xb6\x80\xf3O|\x9a\xd9j\x8f$\xec\x88\xbes\xea\x8eZle&\x0e\x9c\xb8\xa7", 18 | ), 19 | ], 20 | ) 21 | def test_sha256(data: bytes, expected_digest: bytes) -> None: 22 | """Test that sha256() returns the correct hash digest.""" 23 | assert sha256(data) == expected_digest 24 | 25 | 26 | @pytest.mark.parametrize( 27 | "data, expected_digest", 28 | [ 29 | ( 30 | b"hello", 31 | b"\xe3\r\x87\xcf\xa2\xa7]\xb5E\xea\xc4\xd6\x1b\xaf\x97\x03f\xa85|\x7fr\xfa\x95\xb5-\n\xcc\xb6\x98\xf1:", 32 | ), 33 | ( 34 | b"world", 35 | b"\xb8\x00\x7f\xc6@\xbe\xf3\xe2\xf1\x0e\xa7\xad\x96\x81\xf6\xfd\xbd\x13(\x87@i`\xf3eE+\xa0\xa1^e\xe2", 36 | ), 37 | ], 38 | ) 39 | def test_sha512_256(data: bytes, expected_digest: bytes) -> None: 40 | """Test that sha512_256() returns the correct hash digest.""" 41 | assert sha512_256(data) == expected_digest 42 | -------------------------------------------------------------------------------- /tests/test_utils/test_read.py: -------------------------------------------------------------------------------- 1 | """Unit tests for the algobase.utils.read functions.""" 2 | import pytest 3 | 4 | from algobase.utils.read import read_ipfs_gateways, read_mime_types 5 | 6 | 7 | def test_read_ipfs_gateways() -> None: 8 | """Test that read_ipfs_gateways() returns a list of IPFS gateways.""" 9 | gateways = read_ipfs_gateways() 10 | assert gateways and isinstance(gateways, list) 11 | assert all(isinstance(gateway, str) for gateway in gateways) 12 | assert "https://ipfs.io" in gateways 13 | 14 | 15 | @pytest.mark.parametrize( 16 | "mime_type", 17 | [ 18 | "application/json", 19 | "text/html", 20 | "image/jpeg", 21 | "video/mp4", 22 | "audio/mpeg", 23 | ], 24 | ) 25 | def test_read_mime_types(mime_type: str) -> None: 26 | """Test that read_mime_types() returns a list of MIME types.""" 27 | mime_types = read_mime_types() 28 | assert mime_types and isinstance(mime_types, list) 29 | assert mime_type in mime_types 30 | -------------------------------------------------------------------------------- /tests/test_utils/test_url.py: -------------------------------------------------------------------------------- 1 | """Tests for the URL utility functions.""" 2 | 3 | import pytest 4 | 5 | from algobase.utils.url import decode_url_braces 6 | 7 | 8 | @pytest.mark.parametrize( 9 | "x, expected", 10 | [ 11 | ("https://example.com/%7Bid%7D", "https://example.com/{id}"), 12 | ( 13 | "ipfs://QmWS1VAdMD353A6SDk9wNyvkT14kyCiZrNDYAad4w1tKqT/%7Blocale%7D.json", 14 | "ipfs://QmWS1VAdMD353A6SDk9wNyvkT14kyCiZrNDYAad4w1tKqT/{locale}.json", 15 | ), 16 | ("https://example.com/", "https://example.com/"), 17 | ], 18 | ) 19 | def test_decode_url_braces(x: str, expected: str) -> None: 20 | """Test that decode_url_braces() decodes braces in a URL.""" 21 | assert decode_url_braces(x) == expected 22 | -------------------------------------------------------------------------------- /tests/types.py: -------------------------------------------------------------------------------- 1 | """Type aliases for test parameters.""" 2 | 3 | from typing import Any, TypeAlias 4 | 5 | # FixtureValue: TypeAlias = str | int | bool | bytes | None 6 | # FixtureDict: TypeAlias = dict[ 7 | # str, FixtureValue | list[FixtureValue] | dict[str, FixtureValue] 8 | # ] 9 | 10 | FixtureDict: TypeAlias = dict[str, Any] 11 | --------------------------------------------------------------------------------