├── .gitattributes ├── .github ├── CODEOWNERS ├── gpg-keys │ ├── quaid.asc │ └── samj.asc ├── scripts │ └── verify-gpg-signatures.sh └── workflows │ ├── canary.yml │ └── verify-gpg-signatures.yml ├── .gitignore ├── .vscode ├── settings.json.sample ├── tasks.json.sample └── tasks │ ├── start_backend.ps1 │ └── start_backend.sh ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── __init__.py ├── __main__.py ├── abilities ├── .gitignore ├── README.md ├── airllm │ └── 0.1.0 │ │ ├── main.py │ │ └── metadata.json ├── ansible │ └── 0.1.0 │ │ └── metadata.json ├── aws │ └── 0.1.0 │ │ └── metadata.json ├── beautifulsoup │ ├── 0.1.0 │ │ └── metadata.json │ └── 0.1.1 │ │ └── metadata.json ├── chroma │ └── 0.1.0 │ │ ├── README.md │ │ └── metadata.json ├── langchain │ └── 0.1.0 │ │ └── metadata.json ├── llama_cpp │ └── 0.1.0 │ │ └── metadata.json ├── llamafile │ └── 0.1.0 │ │ ├── .gitignore │ │ └── metadata.json ├── requirements.txt ├── sample-metadata.json ├── schema-metadata.json └── validate-metadata.py ├── alembic.ini ├── apis ├── openai │ └── openapi.yaml └── paios │ ├── README.md │ ├── abilities.http │ ├── asset.http │ ├── config.http │ ├── downloads.http │ ├── openapi.yaml │ ├── options.http │ ├── personas.http │ ├── resource.http │ └── users.http ├── app.py ├── backend ├── __init__.py ├── __main__.py ├── api │ ├── AbilitiesView.py │ ├── AssetsView.py │ ├── AuthView.py │ ├── ConfigView.py │ ├── DownloadsView.py │ ├── PersonasView.py │ ├── ResourcesView.py │ ├── SharesView.py │ ├── UsersView.py │ └── __init__.py ├── app.py ├── db.py ├── dependencies │ ├── ContainerDependency.py │ ├── Dependency.py │ ├── DependencyState.py │ ├── LinuxDependency.py │ ├── PythonDependency.py │ └── ResourceDependency.py ├── encryption.py ├── env.py ├── managers │ ├── AbilitiesManager.py │ ├── AssetsManager.py │ ├── AuthManager.py │ ├── CasbinRoleManager.py │ ├── ConfigManager.py │ ├── DownloadsManager.py │ ├── Manager.py │ ├── PersonasManager.py │ ├── README.md │ ├── ResourcesManager.py │ ├── SharesManager.py │ ├── UsersManager.py │ └── __init__.py ├── models.py ├── pagination.py ├── rbac_model.conf ├── redirector.py ├── requirements.txt ├── schemas.py ├── templates │ └── email_verification_template.html ├── tests │ └── test_db.py └── utils.py ├── common ├── __init__.py ├── cert.py ├── config.py ├── log.py ├── mail.py ├── paths.py ├── requirements.txt └── utils.py ├── data └── README.md ├── frontend ├── .eslintrc.js ├── .gitignore ├── README.md ├── index.html ├── package-lock.json ├── package.json ├── prettier.config.js ├── public │ ├── favicon.png │ └── manifest.json ├── src │ ├── App.tsx │ ├── CustomLayout.tsx │ ├── CustomMenu.tsx │ ├── Dashboard.tsx │ ├── Login.css │ ├── Login.tsx │ ├── VerifyEmail.tsx │ ├── abilities.tsx │ ├── apiBackend.ts │ ├── apis │ │ └── auth.ts │ ├── assets.tsx │ ├── assets │ │ └── paios.png │ ├── authProvider.tsx │ ├── components │ │ ├── CheckedField.tsx │ │ ├── FormattedSizeField.tsx │ │ ├── FormattedTransferRateField.tsx │ │ └── ProgressField.tsx │ ├── dataProvider.ts │ ├── dependencies │ │ ├── DebianDependency.tsx │ │ ├── PythonDependency.tsx │ │ └── ResourceDependency.tsx │ ├── downloads.tsx │ ├── index.tsx │ ├── resources.tsx │ ├── shares.tsx │ ├── users.tsx │ ├── utils │ │ ├── authUtils.ts │ │ └── formatSize.ts │ └── vite-env.d.ts ├── tsconfig.json └── vite.config.ts ├── migrations ├── README.md ├── env.py ├── script.py.mako └── versions │ ├── 008645bff529_add_role_to_user_model.py │ ├── 0d66a93c6c1f_added_user_table.py │ ├── 1128b8cc9a3d_new_column_added_in_user_table.py │ ├── 187855982332_added_session_table.py │ ├── 4ce11e8569dc_added_share_table.py │ ├── 56a640fb45b2_added_config_table.py │ ├── 75aaaf2cd1a2_added_resource_table.py │ ├── 91d051f98616_added_cred_table.py │ ├── cb6e97a5186c_added_asset_table.py │ ├── e7cfcff87b8e_remove_role_from_user_table.py │ └── f5235ab5e888_added_persona_table.py └── scripts ├── remove_environment.py └── setup_environment.py /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | 3 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # These owners will be the default owners for everything in 2 | # the repo. Unless a later match takes precedence, 3 | # @samj and @quaid will be requested for 4 | # review when someone opens a pull request. 5 | * @samj @quaid 6 | 7 | # Specific ownership for GPG verification scripts 8 | /.github/scripts/verify-gpg-signature.sh @samj @quaid 9 | 10 | # Ownership for workflow files 11 | /.github/workflows/ @samj @quaid 12 | 13 | # Ownership for GPG keys directory 14 | /.gpg-keys/ @samj @quaid 15 | 16 | # Add more specific rules as needed 17 | -------------------------------------------------------------------------------- /.github/gpg-keys/quaid.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP PUBLIC KEY BLOCK----- 2 | 3 | mDMEZpg4VBYJKwYBBAHaRw8BAQdAM1dMwf493lgKdlskA02KMYab7TmKLqxq2vWx 4 | 05KM0fe0TUthcnN0ZW4gR3JhaGFtIFdhZGUgKHBlcnNvbmFsIGZvciBjb21tdW5p 5 | dHkgY29udHJpYnV0aW9ucykgPHF1YWlkQGlxdWFpZC5vcmc+iJkEExYKAEEWIQRF 6 | 2ng90Vq9PPllUPamdFnReSMK3gUCZpg4VAIbAwUJCWYBgAULCQgHAgIiAgYVCgkI 7 | CwIEFgIDAQIeBwIXgAAKCRCmdFnReSMK3l2GAQCmLvXo3waGXnFUrv/K/7u2o0z6 8 | CpU+umN8Onm2sGBLBQD+IcobNvAXI2vwdBEoLFDjJnTrP5qOpKgrnM6F7XS9BAe4 9 | OARmmDhUEgorBgEEAZdVAQUBAQdALTGIQmmQ5FhUj7W/anDehg0SjLTrbF54VkQ3 10 | xevB6R4DAQgHiH4EGBYKACYWIQRF2ng90Vq9PPllUPamdFnReSMK3gUCZpg4VAIb 11 | DAUJCWYBgAAKCRCmdFnReSMK3qkIAQDxoeNPvyqn0NE/mZi8ri8yLi4MYHeJ9M2u 12 | 2E7F1tuLGgD/QJZokI5BhqNgABH/ZHXaopKVDoVnJ1rsQEMUflwotwM= 13 | =YbBk 14 | -----END PGP PUBLIC KEY BLOCK----- 15 | -------------------------------------------------------------------------------- /.github/gpg-keys/samj.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP PUBLIC KEY BLOCK----- 2 | 3 | mDMEYqXAMxYJKwYBBAHaRw8BAQdAsx4W9W94kIiBnxtUbSXpO2jyJCIU3CXColQy 4 | 35itZBW0J1NhbSBKb2huc3RvbiAoUGVyc29uYWwpIDxzYW1qQHNhbWoubmV0Poic 5 | BBMWCgBEAhsDBQsJCAcCAiICBhUKCQgLAgQWAgMBAh4HAheABQkFyv+mFiEEAoOj 6 | 66S6n5dKx1/pGI5dwnpU+iUFAmaRbuUCGQEACgkQGI5dwnpU+iXMewD/aIaL4uTh 7 | F+g5NWUru4SwxXmnHEKP9pMTqTI68tIqzYIA/2gOHsAt7XGhdvOSBCt1zpKTFssd 8 | N+V9XLSHrKom0uYDtCdTYW0gSm9obnN0b24gKERlYmlhbikgPHNhbWpAZGViaWFu 9 | Lm9yZz6ImQQTFgoAQQIbAwULCQgHAgIiAgYVCgkICwIEFgIDAQIeBwIXgAUJBcr/ 10 | phYhBAKDo+ukup+XSsdf6RiOXcJ6VPolBQJmkW7dAAoJEBiOXcJ6VPolLUkA/iBy 11 | NuvkMb+rlUuGOVfy5rhVgc2RHjqXwyop1VjB2VfIAP4+1tDogUBMbuJAJgnoVfmJ 12 | 5fBlBLeHv7fmjv7/3Zs+CbQpU2FtIEpvaG5zdG9uIChBY3VtaW5vKSA8c2FtakBh 13 | Y3VtaW5vLmNvbT6ImQQTFgoAQQIbAwULCQgHAgIiAgYVCgkICwIEFgIDAQIeBwIX 14 | gAUJBcr/phYhBAKDo+ukup+XSsdf6RiOXcJ6VPolBQJmkW7lAAoJEBiOXcJ6VPol 15 | 50wBAMqGon76sRziv/2Uor3aI7RdeglcxzcCt37vnYjlnrHeAP0TUqzhulQY30ps 16 | BdP+tIkoXS9iAhVd2B55TPgaiGimCLQtU2FtIEpvaG5zdG9uIChHZW9yZ2lhIFRl 17 | Y2gpIDxzYW1qQGdhdGVjaC5lZHU+iJkEExYKAEEWIQQCg6PrpLqfl0rHX+kYjl3C 18 | elT6JQUCZpFuYAIbAwUJBcr/pgULCQgHAgIiAgYVCgkICwIEFgIDAQIeBwIXgAAK 19 | CRAYjl3CelT6JSMmAQDsS2IhZLQEmTwXLah2ZFl6fn6fsI/0l8SG/H/qIYGReAD/ 20 | cUyf3tkk69OmBWnvDwszwEfxPojvrM+//33mi/HCYw+4OARipcAzEgorBgEEAZdV 21 | AQUBAQdATaZcnkVMti6GOcdarI4oID3/hGNTVEPGRB0fdMTzkEkDAQgHiH4EGBYK 22 | ACYCGwwWIQQCg6PrpLqfl0rHX+kYjl3CelT6JQUCZo+MLgUJBcr/ewAKCRAYjl3C 23 | elT6Ja+AAP9syrqmcjVS9Dn/m857Oz+YMOsRM96AX6FdcO9m+rxgYAD/QhFU5c/c 24 | +t/avwNUirI4YGqMh4BTZCAwyLiLfuEdjQM= 25 | =DVU7 26 | -----END PGP PUBLIC KEY BLOCK----- 27 | -------------------------------------------------------------------------------- /.github/workflows/canary.yml: -------------------------------------------------------------------------------- 1 | name: Build and Commit to Canary 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | 12 | environment: canary # Specify the environment 13 | 14 | permissions: 15 | contents: write # Grant write permission to the contents scope 16 | 17 | steps: 18 | - name: Checkout code 19 | uses: actions/checkout@v4 20 | 21 | - name: Set up Node.js 22 | uses: actions/setup-node@v4 23 | with: 24 | node-version: '20.x' 25 | 26 | - name: Install frontend dependencies 27 | working-directory: frontend 28 | run: npm ci 29 | 30 | - name: Build frontend 31 | working-directory: frontend 32 | run: npm run build 33 | 34 | - name: Import GPG key 35 | run: | 36 | echo "${{ secrets.GPG_PRIVATE_KEY }}" | gpg --batch --import 37 | env: 38 | GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }} # F1E9CF7FAEE383E9 39 | 40 | - name: Commit and push to canary branch 41 | run: | 42 | export GPG_TTY=$(tty) 43 | git config --global user.name '${{ vars.GPG_USER_NAME }}' 44 | git config --global user.email '${{ vars.GPG_USER_EMAIL }}' 45 | git config --global commit.gpgSign true 46 | git config --global user.signingkey ${{ vars.GPG_KEY_ID }} 47 | git checkout main 48 | git checkout -B canary 49 | git add -f frontend/dist 50 | git commit -S -m "Update canary with latest build from main" 51 | git push origin canary --force 52 | env: 53 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 54 | -------------------------------------------------------------------------------- /.github/workflows/verify-gpg-signatures.yml: -------------------------------------------------------------------------------- 1 | name: Verify GPG Signatures on Commits 2 | 3 | on: 4 | push: 5 | branches: 6 | - '**' 7 | workflow_dispatch: 8 | inputs: 9 | check_all_commits: 10 | description: 'Check all commits in the repository' 11 | required: true 12 | default: false 13 | type: boolean 14 | 15 | jobs: 16 | verify-signatures: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: Checkout code 20 | uses: actions/checkout@v4 21 | with: 22 | fetch-depth: 0 23 | - name: Set up GPG 24 | run: | 25 | mkdir -p ~/.gnupg 26 | echo "use-agent" >> ~/.gnupg/gpg.conf 27 | echo "pinentry-mode loopback" >> ~/.gnupg/gpg.conf 28 | chmod 700 ~/.gnupg 29 | 30 | - name: Run GPG signature verification 31 | run: | 32 | chmod +x .github/scripts/verify-gpg-signatures.sh 33 | if ! .github/scripts/verify-gpg-signatures.sh; then 34 | echo "GPG signature verification failed or encountered an error." 35 | exit 1 36 | fi 37 | env: 38 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 39 | GITHUB_EVENT_NAME: ${{ github.event_name }} 40 | GITHUB_BASE_REF: ${{ github.base_ref }} 41 | GITHUB_HEAD_REF: ${{ github.head_ref }} 42 | GITHUB_EVENT_BEFORE: ${{ github.event.before }} 43 | GITHUB_SHA: ${{ github.sha }} 44 | CHECK_ALL_COMMITS: ${{ github.event.inputs.check_all_commits || 'false' }} 45 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | #dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | 162 | # Editor directories and files 163 | .vscode/* 164 | !.vscode/extensions.json 165 | !.vscode/*.sample 166 | !.vscode/tasks 167 | .idea 168 | .DS_Store 169 | *.suo 170 | *.ntvs* 171 | *.njsproj 172 | *.sln 173 | *.sw? 174 | 175 | # data 176 | data/* 177 | !data/README.md 178 | *.sqlite3 179 | *.db 180 | 181 | -------------------------------------------------------------------------------- /.vscode/settings.json.sample: -------------------------------------------------------------------------------- 1 | { 2 | "python.analysis.extraPaths": ["./"], 3 | } 4 | -------------------------------------------------------------------------------- /.vscode/tasks.json.sample: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "Start Frontend Server", 6 | "type": "shell", 7 | "command": "npm run dev", 8 | "isBackground": true, 9 | "presentation": { 10 | "reveal": "always", 11 | "panel": "new", 12 | "group": "frontend" 13 | }, 14 | "options": { 15 | "cwd": "${workspaceFolder}/frontend" 16 | }, 17 | "problemMatcher": { 18 | "owner": "custom", 19 | "pattern": [ 20 | { 21 | "regexp": ".", 22 | "file": 1, 23 | "location": 2, 24 | "message": 3 25 | } 26 | ], 27 | "background": { 28 | "activeOnStart": true, 29 | "beginsPattern": ".", 30 | "endsPattern": "." 31 | } 32 | } 33 | }, 34 | { 35 | "label": "Start Backend Server", 36 | "type": "shell", 37 | "command": "./.vscode/tasks/start_backend.sh", 38 | "windows": { 39 | "command": "pwsh .vscode/tasks/start_backend.ps1", 40 | }, 41 | "presentation": { 42 | "reveal": "always", 43 | "panel": "new", 44 | "group": "backend" 45 | }, 46 | "problemMatcher": { 47 | "owner": "custom", 48 | "pattern": [ 49 | { 50 | "regexp": ".", 51 | "file": 1, 52 | "location": 2, 53 | "message": 3 54 | } 55 | ], 56 | "background": { 57 | "activeOnStart": true, 58 | "beginsPattern": ".", 59 | "endsPattern": "." 60 | } 61 | } 62 | }, 63 | { 64 | "label": "Run Both Servers", 65 | "dependsOn": [ 66 | "Start Frontend Server", 67 | "Start Backend Server" 68 | ], 69 | "presentation": { 70 | "reveal": "always", 71 | "panel": "shared" 72 | } 73 | } 74 | ] 75 | } 76 | -------------------------------------------------------------------------------- /.vscode/tasks/start_backend.ps1: -------------------------------------------------------------------------------- 1 | & .\.venv\Scripts\Activate.ps1 2 | python __main__.py 3 | -------------------------------------------------------------------------------- /.vscode/tasks/start_backend.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | source .venv/bin/activate 3 | python -m paios 4 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Personal AI Operating System (pAI-OS) 2 | 3 | Thank you for your interest in contributing to pAI-OS! We welcome contributions from the community to help make pAI-OS even better. 4 | 5 | _By making contributions, you are signing off on the Developer Certificate of Origin (DCO) below._ 6 | 7 | ## Getting Started 8 | 9 | To get started with contributing to pAI-OS, please follow these steps: 10 | 11 | 1. Fork the pAI-OS repository on GitHub. 12 | 2. Clone your forked repository to your local machine. 13 | 3. Create a new branch for your changes. 14 | 4. Make your desired changes to the codebase. 15 | 5. Test your changes thoroughly. 16 | 6. Commit your changes with descriptive commit messages. 17 | 7. Push your changes to your forked repository. 18 | 8. Submit a pull request to the main pAI-OS repository. 19 | 20 | ## Code Style 21 | 22 | We follow a specific code style in the pAI-OS project to ensure consistency and readability. Please make sure to adhere to the following guidelines: 23 | 24 | - Use consistent indentation (e.g., 4 spaces). 25 | - Follow the naming conventions for variables, functions, and classes. 26 | - Write clear and concise comments to explain your code. 27 | 28 | ## Reporting Issues 29 | 30 | If you encounter any bugs or issues while using pAI-OS, please report them on the GitHub issue tracker. When reporting an issue, please provide the following information: 31 | 32 | - A clear and concise description of the issue. 33 | - Steps to reproduce the issue. 34 | - Any relevant error messages or logs. 35 | 36 | ## Feature Requests 37 | 38 | If you have an idea for a new feature or improvement for pAI-OS, we would love to hear it! Please open a GitHub issue and provide a detailed description of your proposed feature. 39 | 40 | ## Code Reviews 41 | 42 | All contributions to pAI-OS go through a code review process. This helps ensure the quality and maintainability of the codebase. Please be open to feedback and make any necessary changes based on the code review comments. 43 | 44 | ## License 45 | By contributing to pAI-OS, you acknowledge that your contributions will be licensed under [permissive software license/s](https://en.wikipedia.org/wiki/Permissive_software_license) (currently the [MIT License](https://opensource.org/licenses/MIT)) and that you have read and signed-off on the Developer Certificate of Origin (DCO), whether or not you explicitly sign-off on each commit. 46 | 47 | ## Contact 48 | 49 | If you have any questions or need further assistance, please reach out to the project maintainers at [contact@paios.org](mailto:contact@paios.org). 50 | 51 | We appreciate your contributions to pAI-OS! 52 | 53 | ## Certification 54 | 55 | Developer Certificate of Origin 56 | Version 1.1 57 | 58 | Copyright (C) 2004, 2006 The Linux Foundation and its contributors. 59 | 60 | Everyone is permitted to copy and distribute verbatim copies of this 61 | license document, but changing it is not allowed. 62 | 63 | 64 | Developer's Certificate of Origin 1.1 65 | 66 | By making a contribution to this project, I certify that: 67 | 68 | (a) The contribution was created in whole or in part by me and I 69 | have the right to submit it under the open source license 70 | indicated in the file; or 71 | 72 | (b) The contribution is based upon previous work that, to the best 73 | of my knowledge, is covered under an appropriate open source 74 | license and I have the right under that license to submit that 75 | work with modifications, whether created in whole or in part 76 | by me, under the same open source license (unless I am 77 | permitted to submit under a different license), as indicated 78 | in the file; or 79 | 80 | (c) The contribution was provided directly to me by some other 81 | person who certified (a), (b) or (c) and I have not modified 82 | it. 83 | 84 | (d) I understand and agree that this project and the contribution 85 | are public and that a record of the contribution (including all 86 | personal information I submit with it, including my sign-off) is 87 | maintained indefinitely and may be redistributed consistent with 88 | this project or the open source license(s) involved. 89 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright © 2024 pAI-OS Contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the “Software”), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Personal Artificial Intelligence Operating System (pAI-OS) 2 | 3 | ## Getting Started 4 | 5 | ### Users 6 | 7 | An easy to use release is coming soon, but for now you can follow the instructions below to get started. 8 | 9 | ### Early Adopters 10 | 11 | > 💡 **Tip:** Download and install [Python](https://www.python.org/downloads/) if you can't run it from the terminal. 12 | 13 | Open the Terminal application. 14 | 15 | Clone the canary branch from the repository: 16 | 17 | ```sh 18 | git clone --branch canary --single-branch https://github.com/pAI-OS/paios.git 19 | ``` 20 | 21 | Setup and run the server: 22 | 23 | # Alembic 24 | 25 | Alembic is used to manage database versioning using migrations. 26 | 27 | ## Upgrade schema 28 | 29 | `alembic upgrade head` 30 | 31 | ## Downgrade schema 32 | 33 | `alembic downgrade -1` 34 | 35 | ## Update schema 36 | 37 | Update backend/models.py then run: 38 | 39 | `alembic revision --autogenerate -m "added asset table"` 40 | 41 | ** NOTE: If you get an error about an already existing table, you may want to drop the table and run 'alembic upgrade head' again. ** 42 | 43 | _POSIX (Linux/macOS/etc.)_ 44 | 45 | ```sh 46 | python3 paios/scripts/setup_environment.py (only on first run) 47 | source paios/.venv/bin/activate 48 | python3 -m paios 49 | ``` 50 | 51 | _Windows_ 52 | 53 | ```sh 54 | python .\paios\scripts\setup_environment.py (only on first run) 55 | .\paios\.venv\Scripts\Activate.ps1 56 | python -m paios 57 | ``` 58 | 59 | Visit [https://localhost:8443/](https://localhost:8443/) 60 | 61 | ### Developers 62 | 63 | Clone the entire repository 64 | 65 | ```sh 66 | git clone https://github.com/pAI-OS/paios.git 67 | ``` 68 | 69 | > 🤖 **Tip:** Early Adopters don't need to build the frontend on the canary branch because it's done automatically on every commit to main. 70 | 71 | Build the frontend: 72 | 73 | ```sh 74 | cd frontend 75 | npm run build 76 | cd .. 77 | ``` 78 | 79 | Run the uvicorn server: 80 | 81 | ```sh 82 | python -m paios 83 | ``` 84 | 85 | Visit [https://localhost:8443/](https://localhost:8443/) 86 | 87 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /__main__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import sys 4 | import signal 5 | from pathlib import Path 6 | # Ensure the parent directory is in sys.path so relative imports work. 7 | base_dir = Path(__file__).parent 8 | if base_dir not in sys.path: 9 | sys.path.append(str(base_dir)) 10 | from common.paths import backend_dir, venv_dir, cert_dir 11 | from common.config import logging_config 12 | from backend.utils import get_env_key 13 | from common.mail import send 14 | 15 | # check environment 16 | from backend.env import check_env 17 | check_env() 18 | 19 | # set up logging 20 | from common.log import get_logger 21 | logger = get_logger(__name__) 22 | 23 | def handle_keyboard_interrupt(signum, frame): 24 | cleanup() 25 | asyncio.get_event_loop().stop() 26 | 27 | def cleanup(): 28 | # Perform any necessary cleanup here 29 | logger.info("Performing cleanup tasks.") 30 | 31 | if __name__ == "__main__": 32 | # Set up signal handlers 33 | #signal.signal(signal.SIGINT, handle_keyboard_interrupt) 34 | #signal.signal(signal.SIGTERM, handle_keyboard_interrupt) 35 | 36 | # Ensure certificates are generated 37 | from common.cert import check_cert 38 | check_cert() 39 | 40 | # Create the app 41 | logger.info("Creating the app.") 42 | from app import create_app 43 | app = create_app() 44 | 45 | # Define host and port 46 | host = get_env_key("PAIOS_HOST", "localhost") 47 | port = int(get_env_key("PAIOS_PORT", 8443)) 48 | 49 | # Log connection details 50 | logger.info(f"You can access pAI-OS at https://{host}:{port}.") 51 | #asyncio.run(send("samj@samj.net", "pAI-OS started up", f"You can access pAI-OS at https://{host}:{port}.")) 52 | logger.info("Bypass certificate warnings if using self-signed certificates.") 53 | 54 | # Run the app 55 | import uvicorn 56 | 57 | cert_path = cert_dir / "cert.pem" 58 | key_path = cert_dir / "key.pem" 59 | 60 | logger.info("Running the app with uvicorn.") 61 | try: 62 | uvicorn.run( 63 | "app:create_app", 64 | host=host, 65 | port=port, 66 | factory=True, 67 | workers=1, 68 | reload=True, 69 | reload_dirs=[backend_dir], 70 | reload_excludes=[venv_dir], 71 | log_config=logging_config, 72 | ssl_certfile=str(cert_path), 73 | ssl_keyfile=str(key_path), 74 | #ssl_keyfile_password=key_passphrase # Pass the passphrase if the key is encrypted 75 | ) 76 | except PermissionError as e: 77 | logger.error(f"Permission error: {e}. Ensure the application has access to the certificate and key files.") 78 | except KeyboardInterrupt: 79 | #handle_keyboard_interrupt(None, None) 80 | pass 81 | finally: 82 | cleanup() 83 | -------------------------------------------------------------------------------- /abilities/.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore pAI-OS installed files 2 | installing 3 | installed 4 | upgrading 5 | uninstalling 6 | -------------------------------------------------------------------------------- /abilities/README.md: -------------------------------------------------------------------------------- 1 | # pAI-OS Abilities 2 | 3 | The Personal Artificial Intelligence Operating System (pAI-OS) abilities directory is a library of modular components, known as "abilities," which extend the core functionality of pAI-OS. Each ability is housed in its own directory and serves a specific purpose, from data storage solutions to protocol support and advanced AI integrations. 4 | 5 | ## Overview 6 | 7 | Abilities in paios are akin to plugins in WordPress. They are designed to be easily integrated into the core system to provide additional features and capabilities. They range from foundational utilities like vector and relational storage handlers to cutting-edge functionalities such as Solid protocol integration, optical character recognition (OCR), Retrieve-Augment-Generate (RAG) orchestration, and large language model (LLM) interfaces. 8 | 9 | ## Structure 10 | 11 | In this `abilities` folder, you'll find a collection of subdirectories, each named after the ability it represents (for example, `pgsql` for PostgreSQL support or `openai` for OpenAI's API integration). Within each subdirectory, the ability is fully contained, including all necessary code, documentation, and configurations needed for pAI-OS to utilize it. 12 | 13 | ## Getting Started 14 | 15 | To use an ability, you should first navigate to its respective directory. There you will find a dedicated README.md file with detailed instructions on how to install, configure, and use the ability within your instance of pAI-OS. 16 | 17 | ### Installation 18 | 19 | Each ability typically includes an `install` script or an installation guide with instructions on integrating it with pAI-OS. The general steps to install an ability are as follows: 20 | 21 | 1. Clone the paios repository. 22 | 2. Navigate to the `abilities` directory: `cd abilities/` 23 | 3. Enter the directory of the desired ability: `cd /` 24 | 4. Follow the installation instructions specific to the ability. 25 | 26 | ### Configuration 27 | 28 | Many abilities will require some form of configuration to function correctly. This may include setting environment variables, editing configuration files, or providing necessary API keys. Specific configuration guidelines are provided within each ability's directory. 29 | 30 | ### Usage 31 | 32 | After installation and configuration, the ability can be utilized as part of the pAI-OS. Detailed usage instructions will be provided in the ability-specific README.md. 33 | 34 | ## Available Abilities 35 | 36 | Here are some examples of abilities we expect to be made available within pAI-OS: 37 | 38 | - `pgsql` - Adds PostgreSQL relational storage capability. 39 | - `openai` - Integrates with OpenAI's API for LLM features. 40 | - `chromadb` - Provides efficient vector data storage solutions. 41 | - `solid` - Supports interoperability with Solid protocol. 42 | - `ocr` - Implements optical character recognition tools. 43 | - `rag_orchestration` - Manages RAG orchestration for efficient task management. 44 | 45 | _Note: This list will grow as more abilities are developed and added to the repository._ 46 | 47 | ## Contributing 48 | 49 | pAI-OS thrives on community contributions. If you have created an ability that you believe would benefit the pAI-OS community, please see the CONTRIBUTING.md file for guidelines on how to submit your ability for inclusion in this directory. 50 | 51 | For questions, suggestions, or contributions, please open an issue or submit a pull request. 52 | 53 | Happy building! 54 | -------------------------------------------------------------------------------- /abilities/airllm/0.1.0/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from airllm import AutoModel 3 | 4 | MAX_LENGTH = 128 5 | # could use hugging face model repo id: 6 | model = AutoModel.from_pretrained("garage-bAInd/Platypus2-70B-instruct") 7 | 8 | # or use model's local path... 9 | #model = AutoModel.from_pretrained("/home/ubuntu/.cache/huggingface/hub/models--garage-bAInd--Platypus2-70B-instruct/snapshots/b585e74bcaae02e52665d9ac6d23f4d0dbc81a0f") 10 | 11 | input_text = [ 12 | 'What is the capital of United States?', 13 | #'I like', 14 | ] 15 | 16 | input_tokens = model.tokenizer(input_text, 17 | return_tensors="pt", 18 | return_attention_mask=False, 19 | truncation=True, 20 | max_length=MAX_LENGTH, 21 | padding=False) 22 | 23 | generation_output = model.generate( 24 | input_tokens['input_ids'].cuda(), 25 | max_new_tokens=20, 26 | use_cache=True, 27 | return_dict_in_generate=True) 28 | 29 | output = model.tokenizer.decode(generation_output.sequences[0]) 30 | 31 | print(output) 32 | -------------------------------------------------------------------------------- /abilities/airllm/0.1.0/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "airllm", 3 | "name": "AirLLM", 4 | "versions": { 5 | "package": "0.1.0", 6 | "product": "2.8" 7 | }, 8 | "description": "AirLLM optimizes inference memory usage, allowing 70B large language models to run inference on a single 4GB GPU card.", 9 | "author": { 10 | "name": "Gavin Li", 11 | "url": "https://github.com/lyogavin/Anima/tree/main/air_llm" 12 | }, 13 | "maintainer": { 14 | "name": "Sam Johnston", 15 | "email": "samj@samj.net", 16 | "url": "http://samjohnston.org" 17 | }, 18 | "language": "python", 19 | "scripts": { 20 | "start": "main.py" 21 | }, 22 | "license": "Apache-2.0", 23 | "dependencies": [ 24 | { 25 | "type": "python", 26 | "id": "airllm", 27 | "name": "AirLLM", 28 | "versions": 29 | { 30 | "required": ">=2.8" 31 | } 32 | }, 33 | { 34 | "type": "python", 35 | "id": "mlx", 36 | "name": "MLX", 37 | "versions": 38 | { 39 | "required": ">=0.12" 40 | } 41 | } 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /abilities/ansible/0.1.0/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "ansible", 3 | "name": "Ansible", 4 | "versions": { 5 | "package": "0.1.0", 6 | "product": "9" 7 | }, 8 | "description": "IT automation engine that automates provisioning, configuration management, application deployment, orchestration, and many other IT processes.", 9 | "author": { 10 | "name": "Ansible Project", 11 | "url": "https://ansible.com/" 12 | }, 13 | "maintainer": { 14 | "name": "Sam Johnston", 15 | "email": "samj@samj.net", 16 | "url": "http://samjohnston.org" 17 | }, 18 | "language": "python", 19 | "license": "GPL-3.0-or-later", 20 | "dependencies": [ 21 | { 22 | "type": "python", 23 | "id": "ansible", 24 | "name": "Ansible", 25 | "versions": { 26 | "required": ">=9.0.0" 27 | } 28 | } 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /abilities/aws/0.1.0/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "aws", 3 | "name": "Amazon Web Services", 4 | "versions": { 5 | "package": "0.1.0", 6 | "product": "1.35" 7 | }, 8 | "description": "Amazon Web Services (AWS) integration", 9 | "author": { 10 | "name": "Amazon Web Services (AWS)", 11 | "url": "https://aws.amazon.com/sdk-for-python/" 12 | }, 13 | "maintainer": { 14 | "name": "Sam Johnston", 15 | "email": "samj@samj.net", 16 | "url": "https://samjohnston.org" 17 | }, 18 | "language": "python", 19 | "license": "MIT", 20 | "dependencies": [ 21 | { 22 | "type": "python", 23 | "id": "boto3", 24 | "name": "Boto3", 25 | "versions": { 26 | "required": ">=1.35.0" 27 | } 28 | } 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /abilities/beautifulsoup/0.1.0/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "beautifulsoup", 3 | "name": "Beautiful Soup", 4 | "versions": { 5 | "package": "0.1.0", 6 | "product": "4" 7 | }, 8 | "description": "Screen-scraping library", 9 | "author": { 10 | "name": "Leonard Richardson", 11 | "url": "https://www.crummy.com/software/BeautifulSoup/" 12 | }, 13 | "maintainer": { 14 | "name": "Sam Johnston", 15 | "email": "samj@samj.net", 16 | "url": "http://samjohnston.org" 17 | }, 18 | "repository": { 19 | "type": "git", 20 | "url": "https://git.launchpad.net/beautifulsoup" 21 | }, 22 | "language": "python", 23 | "license": "MIT", 24 | "dependencies": [ 25 | { 26 | "type": "python", 27 | "id": "beautifulsoup4", 28 | "name": "Beautiful Soup", 29 | "versions": { 30 | "required": ">=4" 31 | } 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /abilities/beautifulsoup/0.1.1/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "beautifulsoup", 3 | "name": "Beautiful Soup", 4 | "versions": { 5 | "package": "0.1.1", 6 | "product": "4" 7 | }, 8 | "description": "Python library for pulling data out of HTML and XML files.", 9 | "author": { 10 | "name": "Leonard Richardson", 11 | "url": "https://www.crummy.com/software/BeautifulSoup/" 12 | }, 13 | "maintainer": { 14 | "name": "Sam Johnston", 15 | "email": "samj@samj.net", 16 | "url": "http://samjohnston.org" 17 | }, 18 | "repository": { 19 | "type": "git", 20 | "url": "https://git.launchpad.net/beautifulsoup" 21 | }, 22 | "language": "python", 23 | "license": "MIT", 24 | "dependencies": [ 25 | { 26 | "type": "python", 27 | "id": "beautifulsoup4", 28 | "name": "Beautiful Soup", 29 | "versions": { 30 | "required": ">=4" 31 | } 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /abilities/chroma/0.1.0/README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /abilities/chroma/0.1.0/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "chroma", 3 | "name": "Chroma embedding database", 4 | "versions": { 5 | "package": "0.1.0", 6 | "product": "0.5" 7 | }, 8 | "description": "The open-source embedding database. The fastest way to build Python or JavaScript LLM apps with memory!", 9 | "author": { 10 | "name": "Chroma", 11 | "url": "https://www.trychroma.com/" 12 | }, 13 | "maintainer": { 14 | "name": "Sam Johnston", 15 | "email": "samj@samj.net", 16 | "url": "http://samjohnston.org" 17 | }, 18 | "repository": { 19 | "type": "git", 20 | "url": "https://github.com/chroma-core/chroma.git" 21 | }, 22 | "language": "python", 23 | "license": "Apache-2.0", 24 | "scripts": { 25 | "start": "chroma run" 26 | }, 27 | "dependencies": [ 28 | { 29 | "id": "chromadb", 30 | "type": "python", 31 | "name": "ChromaDB", 32 | "versions": { 33 | "required": ">=0.5.0" 34 | } 35 | } 36 | ] 37 | } 38 | -------------------------------------------------------------------------------- /abilities/llama_cpp/0.1.0/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "llama_cpp", 3 | "name": "llama.cpp", 4 | "versions": { 5 | "package": "0.1.0", 6 | "product": "3091" 7 | }, 8 | "description": "LLM inference in C/C++", 9 | "repository": { 10 | "type": "git", 11 | "url": "https://github.com/ggerganov/llama.cpp.git" 12 | }, 13 | "author": { 14 | "name": "ggml authors", 15 | "url": "https://github.com/ggerganov/llama.cpp" 16 | }, 17 | "maintainer": { 18 | "name": "Sam Johnston", 19 | "email": "samj@samj.net", 20 | "url": "http://samjohnston.org" 21 | }, 22 | "language": "c++", 23 | "license": "MIT", 24 | "dependencies": [ 25 | { 26 | "type": "python", 27 | "id": "fetch_llama_cpp", 28 | "name": "fetch_llama_cpp", 29 | "description": "Fetches the latest and best version of llama.cpp for your system.", 30 | "versions": { 31 | "required": ">=0.1.0" 32 | } 33 | } 34 | ] 35 | } 36 | -------------------------------------------------------------------------------- /abilities/llamafile/0.1.0/.gitignore: -------------------------------------------------------------------------------- 1 | # ignore models 2 | *.zip 3 | *.llamafile 4 | 5 | # ignore logs 6 | llama.log 7 | -------------------------------------------------------------------------------- /abilities/requirements.txt: -------------------------------------------------------------------------------- 1 | jsonschema 2 | 3 | -------------------------------------------------------------------------------- /abilities/sample-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "my_ability", 3 | "versions": { 4 | "package": "0.1.0", 5 | "product": "0.2" 6 | }, 7 | "description": "A sample ability", 8 | "main": "main.py", 9 | "repository": { 10 | "type": "git", 11 | "url": "git+https://github.com/username/repository.git" 12 | }, 13 | "author": { 14 | "name": "Your Name", 15 | "email": "your.email@example.com", 16 | "url": "http://your-website.com" 17 | }, 18 | "maintainer": { 19 | "name": "Your Name", 20 | "email": "your.email@example.com", 21 | "url": "http://your-website.com" 22 | }, 23 | "license": "MIT", 24 | "scripts": { 25 | "start": "python3 main.py arg1 arg2", 26 | "test": "pytest" 27 | }, 28 | "dependencies": { 29 | "abilities": [ 30 | { 31 | "name": "chroma", 32 | "version": "1.0.0" 33 | } 34 | ], 35 | "container": [ 36 | { 37 | "name": "tensorflow/tensorflow", 38 | "version": "2.5.0", 39 | "priority": "recommended" 40 | } 41 | ], 42 | "linux": [ 43 | { 44 | "name": "ssl-development", 45 | "version": "1.1.1", 46 | "priority": "required", 47 | "packages": [ 48 | { 49 | "debian": "libssl-dev", 50 | "fedora": "openssl-devel" 51 | } 52 | ] 53 | } 54 | ], 55 | "python": [ 56 | { 57 | "name": "numpy", 58 | "version": "1.21.0", 59 | "priority": "recommended" 60 | } 61 | ], 62 | "resource": [ 63 | { 64 | "name": "llava.llamafile", 65 | "url": "https://example.com/models/llava.llamafile", 66 | "filename": "models/llava.llamafile" 67 | } 68 | ] 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /abilities/validate-metadata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import json 3 | from jsonschema import validate 4 | import sys 5 | import os.path 6 | 7 | if len(sys.argv) != 2: 8 | print("Usage: validate-metadata.py ") 9 | sys.exit(1) 10 | 11 | ability = sys.argv[1] 12 | metadata_file = os.path.join(ability, "metadata.json") 13 | 14 | # Load the JSON Schema 15 | with open("schema-metadata.json", "r") as file: 16 | schema = json.load(file) 17 | 18 | # Load the metadata.json file to validate 19 | with open(metadata_file, "r") as file: 20 | metadata = json.load(file) 21 | 22 | # Validate the metadata against the schema 23 | validate(instance=metadata, schema=schema) 24 | -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = migrations 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 10 | # for all available tokens 11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 12 | 13 | # sys.path path, will be prepended to sys.path if present. 14 | # defaults to the current working directory. 15 | prepend_sys_path = . 16 | 17 | # timezone to use when rendering the date within the migration file 18 | # as well as the filename. 19 | # If specified, requires the python>=3.9 or backports.zoneinfo library. 20 | # Any required deps can installed by adding `alembic[tz]` to the pip requirements 21 | # string value is passed to ZoneInfo() 22 | # leave blank for localtime 23 | # timezone = 24 | 25 | # max length of characters to apply to the 26 | # "slug" field 27 | # truncate_slug_length = 40 28 | 29 | # set to 'true' to run the environment during 30 | # the 'revision' command, regardless of autogenerate 31 | # revision_environment = false 32 | 33 | # set to 'true' to allow .pyc and .pyo files without 34 | # a source .py file to be detected as revisions in the 35 | # versions/ directory 36 | # sourceless = false 37 | 38 | # version location specification; This defaults 39 | # to migrations/versions. When using multiple version 40 | # directories, initial revisions must be specified with --version-path. 41 | # The path separator used here should be the separator specified by "version_path_separator" below. 42 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions 43 | 44 | # version path separator; As mentioned above, this is the character used to split 45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 47 | # Valid values for version_path_separator are: 48 | # 49 | # version_path_separator = : 50 | # version_path_separator = ; 51 | # version_path_separator = space 52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 53 | 54 | # set to 'true' to search source files recursively 55 | # in each "version_locations" directory 56 | # new in Alembic version 1.10 57 | # recursive_version_locations = false 58 | 59 | # the output encoding used when revision files 60 | # are written from script.py.mako 61 | # output_encoding = utf-8 62 | 63 | sqlalchemy.url = sqlite:///./data/paios.db 64 | 65 | 66 | [post_write_hooks] 67 | # post_write_hooks defines scripts or Python functions that are run 68 | # on newly generated revision scripts. See the documentation for further 69 | # detail and examples 70 | 71 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 72 | # hooks = black 73 | # black.type = console_scripts 74 | # black.entrypoint = black 75 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 76 | 77 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary 78 | # hooks = ruff 79 | # ruff.type = exec 80 | # ruff.executable = %(here)s/.venv/bin/ruff 81 | # ruff.options = --fix REVISION_SCRIPT_FILENAME 82 | 83 | # Logging configuration 84 | [loggers] 85 | keys = root,sqlalchemy,alembic 86 | 87 | [handlers] 88 | keys = console 89 | 90 | [formatters] 91 | keys = generic 92 | 93 | [logger_root] 94 | level = WARN 95 | handlers = console 96 | qualname = 97 | 98 | [logger_sqlalchemy] 99 | level = WARN 100 | handlers = 101 | qualname = sqlalchemy.engine 102 | 103 | [logger_alembic] 104 | level = INFO 105 | handlers = 106 | qualname = alembic 107 | 108 | [handler_console] 109 | class = StreamHandler 110 | args = (sys.stderr,) 111 | level = NOTSET 112 | formatter = generic 113 | 114 | [formatter_generic] 115 | format = %(levelname)-5.5s [%(name)s] %(message)s 116 | datefmt = %H:%M:%S 117 | -------------------------------------------------------------------------------- /apis/paios/README.md: -------------------------------------------------------------------------------- 1 | # pAI-OS API 2 | 3 | ## Overview 4 | 5 | The pAI-OS API allows management interfaces to communicate with the backend services for functionality like enumerating, configuring, and activiating plugins ("abilities"). 6 | 7 | ## Editing 8 | 9 | The pAI-OS API is specified in OpenAPI format and was created in Stoplight. 10 | 11 | ## Mocking 12 | 13 | APIs can be mocked locally using tools like Stoplight's [Prism](https://github.com/stoplightio/prism), which can be installed as follows: 14 | 15 | npm install -g @stoplight/prism-cli 16 | 17 | To start the local prism server: 18 | 19 | prism proxy apis/paios/openapi.yaml 20 | 21 | prism can also load the API directly from the repo: 22 | 23 | prism proxy 'https://raw.githubusercontent.com/pAI-OS/Paios/main/apis/paios/openapi.yaml' 24 | -------------------------------------------------------------------------------- /apis/paios/abilities.http: -------------------------------------------------------------------------------- 1 | # API testing with Visual Studio Code - REST Client Extention 2 | 3 | POST https://localhost:8443/api/v1/abilities/chroma/start 4 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 5 | Content-Type: application/json 6 | 7 | ### 8 | 9 | POST https://localhost:8443/api/v1/abilities/chroma/stop 10 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 11 | Content-Type: application/json 12 | -------------------------------------------------------------------------------- /apis/paios/asset.http: -------------------------------------------------------------------------------- 1 | # API testing with Visual Studio Code - REST Client Extention 2 | 3 | POST https://localhost:8443/api/v1/assets 4 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 5 | Content-Type: application/json 6 | 7 | { 8 | "title": "Attention Is All You Need", 9 | "creator": "Ashish Vaswani et al", 10 | "subject": "Artificial Intelligence", 11 | "description": "We propose a new simple network architecture, the Transformer, based solely on attention mechanisms, dispensing with recurrence and convolutions entirely." 12 | } 13 | 14 | ### 15 | 16 | PUT https://localhost:8443/api/v1/assets/1cbb0bc5-bae2-4b9d-9555-f2282f767047 17 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 18 | Content-Type: application/json 19 | 20 | { 21 | "title": "Generative Adversarial Networks", 22 | "creator": "Goodfellow et al", 23 | "subject": "Artificial Intelligence", 24 | "description": "We propose a new framework for estimating generative models via an adversarial process, in which we simultaneously train two models: a generative model G that captures the data distribution, and a discriminative model D that estimates the probability that a sample came from the training data rather than G." 25 | } 26 | 27 | ### 28 | 29 | GET https://localhost:8443/api/v1/assets/1cbb0bc5-bae2-4b9d-9555-f2282f767047 30 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 31 | Content-Type: application/json 32 | 33 | ### 34 | 35 | DELETE https://localhost:8443/api/v1/assets/1cbb0bc5-bae2-4b9d-9555-f2282f767047 36 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 37 | Content-Type: application/json 38 | -------------------------------------------------------------------------------- /apis/paios/config.http: -------------------------------------------------------------------------------- 1 | # API testing with Visual Studio Code - REST Client Extention 2 | 3 | PUT https://localhost:8443/api/v1/config/openapi-key 4 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 5 | Content-Type: application/json 6 | 7 | "abc123" 8 | 9 | ### 10 | 11 | GET https://localhost:8443/api/v1/config/openapi-key 12 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 13 | Content-Type: application/json 14 | -------------------------------------------------------------------------------- /apis/paios/downloads.http: -------------------------------------------------------------------------------- 1 | ### Test single large download 2 | 3 | POST https://localhost:8443/api/v1/downloads 4 | Content-Type: application/json 5 | 6 | [ 7 | { 8 | "source_url": "https://releases.ubuntu.com/24.04/ubuntu-24.04-desktop-amd64.iso", 9 | "file_name": "ubuntu-24.04-desktop-amd64.iso" 10 | } 11 | ] 12 | 13 | ### Test single small download 14 | 15 | POST https://localhost:8443/api/v1/downloads 16 | Content-Type: application/json 17 | 18 | [ 19 | { 20 | "source_url": "https://cdimage.debian.org/debian-cd/current/amd64/iso-cd/debian-12.5.0-amd64-netinst.iso", 21 | "file_name": "debian-12.5.0-amd64-netinst.iso" 22 | } 23 | ] 24 | 25 | ### Test download with target directory specified 26 | 27 | POST https://localhost:8443/api/v1/downloads 28 | Content-Type: application/json 29 | 30 | [ 31 | { 32 | "source_url": "https://cdimage.debian.org/debian-cd/current/amd64/iso-cd/debian-12.5.0-amd64-netinst.iso", 33 | "file_name": "debian-12.5.0-amd64-netinst.iso", 34 | "target_directory": "downloads_test_target" 35 | } 36 | ] 37 | 38 | ### Test download without filename specified 39 | 40 | POST https://localhost:8443/api/v1/downloads 41 | Content-Type: application/json 42 | 43 | [ 44 | { 45 | "source_url": "https://cdimage.debian.org/debian-cd/current/amd64/iso-cd/debian-12.5.0-amd64-netinst.iso" 46 | } 47 | ] 48 | 49 | ### Test multiple downloads 50 | 51 | POST https://localhost:8443/api/v1/downloads 52 | Content-Type: application/json 53 | 54 | [ 55 | { "source_url": "https://releases.ubuntu.com/24.04/ubuntu-24.04-desktop-amd64.iso", "file_name": "ubuntu-24.04-desktop-amd64.iso" }, 56 | { "source_url": "https://cdimage.debian.org/debian-cd/current/amd64/iso-cd/debian-12.5.0-amd64-netinst.iso", "file_name": "debian-12.5.0-amd64-netinst.iso" } 57 | ] 58 | 59 | ### 60 | 61 | POST https://localhost:8443/api/v1/downloads 62 | Content-Type: application/json 63 | 64 | [ 65 | { 66 | "source_url": "https://releases.ubuntu.com/24.04/ubuntu-24.04-desktop-amd64.iso", 67 | "file_name": "ubuntu-24.04-desktop-amd64.iso" 68 | } 69 | ] 70 | 71 | ### 72 | 73 | POST https://localhost:8443/api/v1/downloads 74 | Content-Type: application/json 75 | 76 | [ 77 | { 78 | "source_url": "https://cdimage.debian.org/debian-cd/current/amd64/iso-cd/debian-12.5.0-amd64-netinst.iso", 79 | "file_name": "ubuntu-24.04-desktop-amd64.iso", 80 | "file_hash": "sha512:33c08e56c83d13007e4a5511b9bf2c4926c4aa12fd5dd56d493c0653aecbab380988c5bf1671dbaea75c582827797d98c4a611f7fb2b131fbde2c677d5258ec9" 81 | } 82 | ] 83 | 84 | ### Path traversal attack 85 | 86 | POST https://localhost:8443/api/v1/downloads 87 | Content-Type: application/json 88 | 89 | [ 90 | { 91 | "source_url": "https://example.com/passwd", 92 | "file_name": "../../etc" 93 | } 94 | ] 95 | 96 | ### 97 | 98 | POST https://localhost:8443/api/v1/downloads/ac56536c-96a1-4dda-a664-d2262cbf5c7e/pause 99 | Content-Type: application/json 100 | 101 | ### 102 | 103 | POST https://localhost:8443/api/v1/downloads/ac56536c-96a1-4dda-a664-d2262cbf5c7e/resume 104 | Content-Type: application/json 105 | 106 | ### 107 | 108 | GET https://localhost:8443/api/v1/downloads/4fd98231-02ab-4321-ac57-d9dc96fa6d01 109 | Content-Type: application/json 110 | 111 | ### 112 | 113 | DELETE https://localhost:8443/api/v1/downloads/83bdac3a-e5de-4783-a9b9-dabdcae2a1d2 114 | Content-Type: application/json 115 | -------------------------------------------------------------------------------- /apis/paios/options.http: -------------------------------------------------------------------------------- 1 | OPTIONS https://localhost:8443/api/v1/users 2 | Origin: http://localhost 3 | Access-Control-Request-Method: GET 4 | -------------------------------------------------------------------------------- /apis/paios/personas.http: -------------------------------------------------------------------------------- 1 | # API testing with Visual Studio Code - REST Client Extention 2 | 3 | POST https://localhost:8443/api/v1/personas 4 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 5 | Content-Type: application/json 6 | 7 | { 8 | "name": "Attention Is All You Need", 9 | "description": "Ashish Vaswani et al", 10 | "voice_id": "Artificial Intelligence", 11 | "face_id": "We propose a new simple network architecture, the Transformer, based solely on attention mechanisms, dispensing with recurrence and convolutions entirely." 12 | } 13 | 14 | ### 15 | 16 | PUT https://localhost:8443/api/v1/personas/1cbb0bc5-bae2-4b9d-9555-f2282f767047 17 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 18 | Content-Type: application/json 19 | 20 | { 21 | "name": "Generative Adversarial Networks", 22 | "description": "Goodfellow et al", 23 | "voice_id": "Artificial Intelligence", 24 | "face_id": "We propose a new framework for estimating generative models via an adversarial process, in which we simultaneously train two models: a generative model G that captures the data distribution, and a discriminative model D that estimates the probability that a sample came from the training data rather than G." 25 | } 26 | 27 | ### 28 | 29 | GET https://localhost:8443/api/v1/personas/1cbb0bc5-bae2-4b9d-9555-f2282f767047 30 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 31 | Content-Type: application/json 32 | 33 | ### 34 | 35 | DELETE https://localhost:8443/api/v1/personas/1cbb0bc5-bae2-4b9d-9555-f2282f767047 36 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 37 | Content-Type: application/json -------------------------------------------------------------------------------- /apis/paios/resource.http: -------------------------------------------------------------------------------- 1 | # API testing with Visual Studio Code - REST Client Extention 2 | 3 | POST https://localhost:8443/api/v1/resources 4 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 5 | Content-Type: application/json 6 | 7 | { 8 | "name": "My Resource", 9 | "uri": "https://localhost:8443/resources/my-resource" 10 | } 11 | 12 | ### 13 | PUT https://localhost:8443/api/v1/resources/llm-api 14 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 15 | Content-Type: application/json 16 | 17 | { 18 | "id": "llm-api", 19 | "name": "LLM API", 20 | "uri": "https://localhost:8443/resources/llm-api" 21 | } 22 | 23 | ### 24 | PUT https://localhost:8443/api/v1/resources/imap-sync 25 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 26 | Content-Type: application/json 27 | 28 | { 29 | "id": "imap-sync", 30 | "name": "E-mail Sync (IMAP)", 31 | "uri": "imap://imap.gmail.com:993" 32 | } 33 | 34 | ### 35 | 36 | DELETE https://localhost:8443/api/v1/resources/llm-api 37 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 38 | Content-Type: application/json 39 | 40 | ### 41 | 42 | GET https://localhost:8443/api/v1/resources/llm-api 43 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 44 | Content-Type: application/json 45 | 46 | ### 47 | 48 | GET https://localhost:8443/api/v1/resources 49 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 50 | Content-Type: application/json 51 | -------------------------------------------------------------------------------- /apis/paios/users.http: -------------------------------------------------------------------------------- 1 | # API testing with Visual Studio Code - REST Client Extention 2 | 3 | POST https://localhost:8443/api/v1/users 4 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 5 | Content-Type: application/json 6 | 7 | { 8 | "name": "Tony Rogerson", 9 | "email": "tony.rogerson@gmail.com" 10 | } 11 | 12 | ### 13 | PUT https://localhost:8443/api/v1/users/5bae4a90-ce69-4483-86ef-d723258f21e5 14 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 15 | Content-Type: application/json 16 | 17 | { 18 | "id": "5bae4a90-ce69-4483-86ef-d723258f21e5", 19 | "name": "Alice Smith", 20 | "email": "alice.smith@gmail.com" 21 | } 22 | 23 | ### 24 | PUT https://localhost:8443/api/v1/users/1cbb0bc5-bae2-4b9d-9555-f2282f767047 25 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 26 | Content-Type: application/json 27 | 28 | { 29 | "id": "1cbb0bc5-bae2-4b9d-9555-f2282f767047", 30 | "name": "Bob Jones", 31 | "email": "bob.jones@gmail.com" 32 | } 33 | 34 | ### 35 | 36 | DELETE https://localhost:8443/api/v1/users/5bae4a90-ce69-4483-86ef-d723258f21e5 37 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 38 | Content-Type: application/json 39 | 40 | ### 41 | 42 | DELETE https://localhost:8443/api/v1/users/1cbb0bc5-bae2-4b9d-9555-f2282f767047 43 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 44 | Content-Type: application/json 45 | 46 | ### 47 | 48 | GET https://localhost:8443/api/v1/users/5bae4a90-ce69-4483-86ef-d723258f21e5 49 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 50 | Content-Type: application/json 51 | 52 | ### 53 | 54 | GET https://localhost:8443/api/v1/users/1cbb0bc5-bae2-4b9d-9555-f2282f767047 55 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 56 | Content-Type: application/json 57 | 58 | ### 59 | 60 | GET https://localhost:8443/api/v1/users 61 | Authorization: Bearer {{PAIOS_BEARER_TOKEN}} 62 | Content-Type: application/json 63 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from backend.app import create_backend_app 3 | from starlette.staticfiles import StaticFiles 4 | from backend.redirector import redirector 5 | 6 | # set up logging 7 | from common.log import get_logger 8 | logger = get_logger(__name__) 9 | 10 | def create_app(): 11 | app = create_backend_app() 12 | add_redirector_app(app) 13 | add_frontend_app(app) 14 | return app 15 | 16 | def add_frontend_app(app): 17 | # Add a route for serving static files if the frontend dist directory exists 18 | static_dir = Path(__file__).parent / 'frontend' / 'dist' 19 | if static_dir.is_dir(): 20 | # Add a route for serving static files only if the directory exists 21 | app.add_url_rule( 22 | '/{path:path}', 23 | endpoint='frontend', 24 | view_func=StaticFiles(directory=static_dir, check_dir=False, html=True) 25 | ) 26 | else: 27 | logger.info(f"Skipping serving frontend: {static_dir} directory not found. Options:") 28 | logger.info("- Use 'npm run build' to generate the frontend") 29 | logger.info("- Use 'npm run dev' to run it separately") 30 | logger.info("- Use the API only") 31 | 32 | def add_redirector_app(app): 33 | # Add a route for handling URL redirection for bot access 34 | app.add_url_rule( 35 | '/r/{url_key:str}', 36 | endpoint='redirector', 37 | view_func=redirector 38 | ) 39 | 40 | -------------------------------------------------------------------------------- /backend/__init__.py: -------------------------------------------------------------------------------- 1 | # backend package 2 | import os 3 | from common import paths 4 | from dotenv import load_dotenv 5 | 6 | # Load environment variables 7 | load_dotenv(paths.base_dir / '.env') 8 | 9 | # List of directories to ensure exist 10 | required_directories = [ 11 | paths.data_dir 12 | ] 13 | 14 | # Create directories if they do not exist 15 | for directory in required_directories: 16 | if not os.path.exists(directory): 17 | os.makedirs(directory) 18 | -------------------------------------------------------------------------------- /backend/__main__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import signal 4 | import asyncio 5 | from pathlib import Path 6 | 7 | # Ensure the parent directory is in sys.path so relative imports work. 8 | base_dir = Path(__file__).parent 9 | if base_dir not in sys.path: 10 | sys.path.append(str(base_dir)) 11 | from common.paths import backend_dir, venv_dir, cert_dir 12 | from common.config import logging_config 13 | from backend.utils import get_env_key 14 | 15 | # check environment 16 | from backend.env import check_env 17 | check_env() 18 | 19 | # set up logging 20 | from common.log import get_logger 21 | logger = get_logger(__name__) 22 | 23 | def handle_keyboard_interrupt(signum, frame): 24 | print(f"KeyboardInterrupt (ID: {signum}) has been caught. Cleaning up...") 25 | cleanup() 26 | asyncio.get_event_loop().stop() 27 | 28 | def cleanup(): 29 | # Perform any necessary cleanup here 30 | logger.info("Performing cleanup tasks.") 31 | 32 | if __name__ == "__main__": 33 | # Set up signal handlers 34 | signal.signal(signal.SIGINT, handle_keyboard_interrupt) 35 | signal.signal(signal.SIGTERM, handle_keyboard_interrupt) 36 | 37 | # Ensure certificates are generated 38 | from common.cert import check_cert 39 | check_cert() 40 | 41 | # Create the app 42 | logger.info("Creating the app.") 43 | from app import create_app 44 | app = create_app() 45 | 46 | # Define host and port 47 | host = get_env_key("PAIOS_HOST", "localhost") 48 | port = int(get_env_key("PAIOS_PORT", 8443)) 49 | 50 | # Log connection details 51 | logger.info(f"You can access pAI-OS at https://{host}:{port}.") 52 | logger.info("Bypass certificate warnings if using self-signed certificates.") 53 | 54 | # Run the app 55 | import uvicorn 56 | 57 | cert_path = cert_dir / "cert.pem" 58 | key_path = cert_dir / "key.pem" 59 | 60 | logger.info("Running the app with uvicorn.") 61 | try: 62 | uvicorn.run( 63 | "app:create_app", 64 | host=host, 65 | port=port, 66 | factory=True, 67 | workers=1, 68 | reload=True, 69 | reload_dirs=[backend_dir], 70 | reload_excludes=[venv_dir], 71 | log_config=logging_config, 72 | ssl_certfile=str(cert_path), 73 | ssl_keyfile=str(key_path), 74 | #ssl_keyfile_password=key_passphrase # Pass the passphrase if the key is encrypted 75 | ) 76 | except PermissionError as e: 77 | logger.error(f"Permission error: {e}. Ensure the application has access to the certificate and key files.") 78 | except KeyboardInterrupt: 79 | pass 80 | finally: 81 | cleanup() 82 | -------------------------------------------------------------------------------- /backend/api/AssetsView.py: -------------------------------------------------------------------------------- 1 | from starlette.responses import JSONResponse, Response 2 | from backend.managers.AssetsManager import AssetsManager 3 | from common.paths import api_base_url 4 | from backend.pagination import parse_pagination_params 5 | from backend.schemas import AssetCreateSchema, AssetSchema 6 | from typing import List 7 | 8 | class AssetsView: 9 | def __init__(self): 10 | self.am = AssetsManager() 11 | 12 | async def get(self, id: str): 13 | asset = await self.am.retrieve_asset(id) 14 | if asset is None: 15 | return JSONResponse({"error": "Asset not found"}, status_code=404) 16 | return JSONResponse(asset.model_dump(), status_code=200) 17 | 18 | async def post(self, body: AssetCreateSchema): 19 | new_asset = await self.am.create_asset(body) 20 | return JSONResponse(new_asset.model_dump(), status_code=201, headers={'Location': f'{api_base_url}/assets/{new_asset.id}'}) 21 | 22 | async def put(self, id: str, body: AssetCreateSchema): 23 | updated_asset = await self.am.update_asset(id, body) 24 | if updated_asset is None: 25 | return JSONResponse({"error": "Asset not found"}, status_code=404) 26 | return JSONResponse(updated_asset.model_dump(), status_code=200) 27 | 28 | async def delete(self, id: str): 29 | success = await self.am.delete_asset(id) 30 | if not success: 31 | return JSONResponse({"error": "Asset not found"}, status_code=404) 32 | return Response(status_code=204) 33 | 34 | async def search(self, filter: str = None, range: str = None, sort: str = None): 35 | result = parse_pagination_params(filter, range, sort) 36 | if isinstance(result, JSONResponse): 37 | return result 38 | 39 | offset, limit, sort_by, sort_order, filters = result 40 | 41 | # Extract the free text search query 42 | query = filters.pop('q', None) 43 | 44 | assets, total_count = await self.am.retrieve_assets( 45 | limit=limit, 46 | offset=offset, 47 | sort_by=sort_by, 48 | sort_order=sort_order, 49 | filters=filters, 50 | query=query 51 | ) 52 | headers = { 53 | 'X-Total-Count': str(total_count), 54 | 'Content-Range': f'assets {offset}-{offset + len(assets) - 1}/{total_count}' 55 | } 56 | return JSONResponse([asset.model_dump() for asset in assets], status_code=200, headers=headers) 57 | -------------------------------------------------------------------------------- /backend/api/AuthView.py: -------------------------------------------------------------------------------- 1 | from starlette.responses import JSONResponse 2 | from backend.managers.AuthManager import AuthManager 3 | from backend.managers.CasbinRoleManager import CasbinRoleManager 4 | from backend.schemas import AuthOptionsRequest, RegistrationOptions, VerifyAuthentication, AuthenticationOptions, VerifyRegistration 5 | from connexion import request 6 | from uuid import uuid4 7 | from backend.models import Session 8 | from sqlalchemy import delete 9 | 10 | GENERIC_AUTH_ERROR = "Something went wrong" 11 | 12 | 13 | class AuthView: 14 | def __init__(self): 15 | self.am = AuthManager() 16 | self.cb = CasbinRoleManager() 17 | 18 | async def auth_options(self, body: AuthOptionsRequest): 19 | challenge, options, type = await self.am.auth_options(body["email"]) 20 | 21 | if not options: 22 | return JSONResponse({"error": GENERIC_AUTH_ERROR}, status_code=500) 23 | 24 | response = JSONResponse({"options": options, "flow": type}, status_code=200) 25 | response.set_cookie(key="challenge",value=challenge, secure=True, httponly=True, samesite='strict') 26 | return response 27 | 28 | async def webauthn_register_options(self, body: RegistrationOptions): 29 | challenge, options = await self.am.webauthn_register_options(body["email"]) 30 | 31 | if not options: 32 | return JSONResponse({"error": GENERIC_AUTH_ERROR}, status_code=500) 33 | 34 | response = JSONResponse({"options": options}, status_code=200) 35 | response.set_cookie(key="challenge",value=challenge, secure=True, httponly=True, samesite='strict') 36 | return response 37 | 38 | async def webauthn_register(self, body: VerifyRegistration): 39 | challenge = request.cookies.get("challenge") 40 | res = await self.am.webauthn_register(challenge, body["email"], body["user_id"], body["att_resp"]) 41 | if not res: 42 | return JSONResponse({"message": GENERIC_AUTH_ERROR}, status_code=401) 43 | 44 | response = JSONResponse({"message": "Success"}, status_code=200) 45 | response.set_cookie(key="challenge",value="", expires=0,secure=True, httponly=True, samesite='strict') 46 | 47 | return response 48 | 49 | async def webauthn_login_options(self, body: AuthenticationOptions): 50 | challenge, options = await self.am.webauthn_login_options(body["email"]) 51 | 52 | if not options: 53 | return JSONResponse({"error": GENERIC_AUTH_ERROR}, status_code=500) 54 | 55 | response = JSONResponse({"options": options}, status_code=200) 56 | response.set_cookie(key="challenge", value=challenge, secure=True, httponly=True, samesite='strict') 57 | return response 58 | 59 | async def webauthn_login(self, body: VerifyAuthentication): 60 | challenge = request.cookies.get("challenge") 61 | token, role = await self.am.webauthn_login(challenge, body["email"], body["auth_resp"]) 62 | if not token: 63 | return JSONResponse({"message": "Failed"}, status_code=401) 64 | 65 | permissions = self.cb.get_resource_access(role,"ADMIN_PORTAL") 66 | response = JSONResponse({"message": "Success", "token": token, "permissions": permissions}, status_code=200) 67 | response.set_cookie(key="challenge",value="", expires=0,secure=True, httponly=True, samesite='strict') 68 | 69 | return response 70 | 71 | async def verify_email(self, body): 72 | isValid = await self.am.verify_email(body["token"]) 73 | 74 | if not isValid: 75 | return JSONResponse({"message": "Email validation failed."}, status_code=400) 76 | 77 | return JSONResponse({"message": "Success"}, status_code=200) 78 | 79 | async def logout(self): 80 | session_token = request.cookies.get("session_token") 81 | if session_token: 82 | await self.am.delete_session(session_token) 83 | 84 | response = JSONResponse({"message": "Logged out successfully"}, status_code=200) 85 | response.delete_cookie(key="session_token", secure=True, httponly=True, samesite='strict') 86 | return response 87 | -------------------------------------------------------------------------------- /backend/api/ConfigView.py: -------------------------------------------------------------------------------- 1 | from starlette.responses import JSONResponse, Response 2 | from backend.managers.ConfigManager import ConfigManager 3 | from backend.schemas import ConfigSchema 4 | 5 | class ConfigView: 6 | def __init__(self): 7 | self.cm = ConfigManager() 8 | 9 | async def get(self, key: str): 10 | config_item = await self.cm.retrieve_config_item(key) 11 | if config_item is None: 12 | return JSONResponse(status_code=404, content={"error": "Config item not found"}) 13 | return JSONResponse(config_item.model_dump(), status_code=200) 14 | 15 | async def put(self, key: str, body: ConfigSchema): 16 | print(f"ConfigView: PUT {key}->{body}") 17 | updated_config = await self.cm.update_config_item(key, body.value) 18 | if updated_config: 19 | return JSONResponse(updated_config.model_dump(), status_code=200) 20 | return JSONResponse({"error": "Failed to update config item"}, status_code=400) 21 | 22 | async def delete(self, key: str): 23 | success = await self.cm.delete_config_item(key) 24 | if success: 25 | return Response(status_code=204) 26 | return JSONResponse({"error": "Config item not found"}, status_code=404) 27 | 28 | async def list(self): 29 | config_items = await self.cm.retrieve_all_config_items() 30 | return JSONResponse([item.model_dump() for item in config_items], status_code=200) 31 | 32 | async def create(self, body: ConfigSchema): 33 | new_config = await self.cm.create_config_item(body.value) 34 | return JSONResponse(new_config.model_dump(), status_code=201) 35 | -------------------------------------------------------------------------------- /backend/api/DownloadsView.py: -------------------------------------------------------------------------------- 1 | from starlette.responses import Response, JSONResponse 2 | from backend.managers.DownloadsManager import DownloadsManager 3 | from backend.pagination import parse_pagination_params 4 | 5 | class DownloadsView: 6 | def __init__(self): 7 | self.manager = DownloadsManager() 8 | 9 | async def get(self): 10 | downloads = await self.manager.retrieve_downloads() 11 | return JSONResponse(status_code=200, content=downloads) 12 | 13 | # TODO: Downloads that already exist should be rejected straight away 14 | async def post(self, body: list): 15 | if not body or not isinstance(body, list): 16 | return JSONResponse(status_code=400, content={"message": "Invalid request: body must be a list of download details"}) 17 | 18 | try: 19 | download_ids = await self.manager.queue_downloads(body) 20 | return JSONResponse(status_code=200, content=[{"id": id} for id in download_ids]) 21 | except ValueError as e: 22 | return JSONResponse(status_code=400, content={"message": str(e)}) 23 | except Exception as e: 24 | print(f"Unexpected error occurred: {str(e)}") 25 | return JSONResponse(status_code=500, content={"message": "An unexpected error occurred queuing download"}) 26 | 27 | async def put(self): 28 | return JSONResponse(status_code=501, content={"message": "Not Implemented"}) 29 | 30 | async def delete(self, id: str): 31 | await self.manager.delete_download(id) 32 | return Response(status_code=204) 33 | 34 | async def search(self, filter: str = None, range: str = None, sort: str = None): 35 | result = parse_pagination_params(filter, range, sort) 36 | if isinstance(result, JSONResponse): 37 | return result 38 | 39 | offset, limit, sort_by, sort_order, filters = result 40 | 41 | downloads, total_count = await self.manager.retrieve_downloads(limit=limit, offset=offset) 42 | headers = { 43 | 'X-Total-Count': str(total_count), 44 | 'Content-Range': f'downloads {offset}-{offset + len(downloads) - 1}/{total_count}' 45 | } 46 | return JSONResponse(downloads, status_code=200, headers=headers) 47 | 48 | # custom functions 49 | 50 | async def pause(self, id: str): 51 | await self.manager.pause_download(id) 52 | return JSONResponse(status_code=200, content={"message": "Download paused"}) 53 | 54 | async def resume(self, id: str): 55 | await self.manager.resume_download(id) 56 | return JSONResponse(status_code=200, content={"message": "Download resumed"}) 57 | -------------------------------------------------------------------------------- /backend/api/PersonasView.py: -------------------------------------------------------------------------------- 1 | from starlette.responses import JSONResponse, Response 2 | from backend.managers.PersonasManager import PersonasManager 3 | from common.paths import api_base_url 4 | from backend.pagination import parse_pagination_params 5 | from backend.schemas import PersonaCreateSchema 6 | 7 | 8 | class PersonasView: 9 | def __init__(self): 10 | self.pm = PersonasManager() 11 | 12 | async def get(self, id: str): 13 | persona = await self.pm.retrieve_persona(id) 14 | if persona is None: 15 | return JSONResponse({"error": "Persona not found"}, status_code=404) 16 | return JSONResponse(persona.dict(), status_code=200) 17 | 18 | async def post(self, body: PersonaCreateSchema): 19 | id = await self.pm.create_persona(body) 20 | persona = await self.pm.retrieve_persona(id) 21 | return JSONResponse(persona.dict(), status_code=201, headers={'Location': f'{api_base_url}/personas/{id}'}) 22 | 23 | async def put(self, id: str, body: PersonaCreateSchema): 24 | await self.pm.update_persona(id, body) 25 | persona = await self.pm.retrieve_persona(id) 26 | if persona is None: 27 | return JSONResponse({"error": "Persona not found"}, status_code=404) 28 | return JSONResponse(persona.dict(), status_code=200) 29 | 30 | async def delete(self, id: str): 31 | success = await self.pm.delete_persona(id) 32 | if not success: 33 | return JSONResponse({"error": "Persona not found"}, status_code=404) 34 | return Response(status_code=204) 35 | 36 | async def search(self, filter: str = None, range: str = None, sort: str = None): 37 | result = parse_pagination_params(filter, range, sort) 38 | if isinstance(result, JSONResponse): 39 | return result 40 | 41 | offset, limit, sort_by, sort_order, filters = result 42 | 43 | personas, total_count = await self.pm.retrieve_personas( 44 | limit=limit, 45 | offset=offset, 46 | sort_by=sort_by, 47 | sort_order=sort_order, 48 | filters=filters 49 | ) 50 | headers = { 51 | 'X-Total-Count': str(total_count), 52 | 'Content-Range': f'personas {offset}-{offset + len(personas) - 1}/{total_count}' 53 | } 54 | return JSONResponse([persona.dict() for persona in personas], status_code=200, headers=headers) -------------------------------------------------------------------------------- /backend/api/ResourcesView.py: -------------------------------------------------------------------------------- 1 | from starlette.responses import JSONResponse, Response 2 | from common.paths import api_base_url 3 | from backend.managers.ResourcesManager import ResourcesManager 4 | from backend.pagination import parse_pagination_params 5 | from backend.schemas import ChannelCreateSchema 6 | from typing import List 7 | 8 | class ResourcesView: 9 | def __init__(self): 10 | self.cm = ResourcesManager() 11 | 12 | async def get(self, resource_id: str): 13 | resource = await self.cm.retrieve_resource(resource_id) 14 | if resource is None: 15 | return JSONResponse({"error": "Resource not found"}, status_code=404) 16 | return JSONResponse(resource.model_dump(), status_code=200) 17 | 18 | async def post(self, body: ChannelCreateSchema): 19 | new_resource = await self.cm.create_resource(body) 20 | return JSONResponse(new_resource.model_dump(), status_code=201, headers={'Location': f'{api_base_url}/resources/{new_resource.id}'}) 21 | 22 | async def put(self, resource_id: str, body: ChannelCreateSchema): 23 | updated_resource = await self.cm.update_resource(resource_id, body) 24 | if updated_resource is None: 25 | return JSONResponse({"error": "Resource not found"}, status_code=404) 26 | return JSONResponse(updated_resource.model_dump(), status_code=200) 27 | 28 | async def delete(self, resource_id: str): 29 | success = await self.cm.delete_resource(resource_id) 30 | if not success: 31 | return JSONResponse({"error": "Resource not found"}, status_code=404) 32 | return Response(status_code=204) 33 | 34 | async def search(self, filter: str = None, range: str = None, sort: str = None): 35 | result = parse_pagination_params(filter, range, sort) 36 | if isinstance(result, JSONResponse): 37 | return result 38 | 39 | offset, limit, sort_by, sort_order, filters = result 40 | 41 | resources, total_count = await self.cm.retrieve_resources(limit=limit, offset=offset, sort_by=sort_by, sort_order=sort_order, filters=filters) 42 | headers = { 43 | 'X-Total-Count': str(total_count), 44 | 'Content-Range': f'resources {offset}-{offset + len(resources) - 1}/{total_count}' 45 | } 46 | return JSONResponse([resource.model_dump() for resource in resources], status_code=200, headers=headers) 47 | -------------------------------------------------------------------------------- /backend/api/SharesView.py: -------------------------------------------------------------------------------- 1 | from starlette.responses import JSONResponse, Response 2 | from common.paths import api_base_url 3 | from backend.managers.SharesManager import SharesManager 4 | from backend.pagination import parse_pagination_params 5 | from datetime import datetime, timezone 6 | 7 | class SharesView: 8 | def __init__(self): 9 | self.slm = SharesManager() 10 | 11 | async def get(self, id: str): 12 | share = await self.slm.retrieve_share(id) 13 | if share is None: 14 | return JSONResponse(headers={"error": "Share not found"}, status_code=404) 15 | return JSONResponse(share.model_dump(), status_code=200) 16 | 17 | async def post(self, body: dict): 18 | expiration_dt = None 19 | if 'expiration_dt' in body and body['expiration_dt'] is not None: 20 | expiration_dt = datetime.fromisoformat(body['expiration_dt']).astimezone(tz=timezone.utc) 21 | user_id = None 22 | if 'user_id' in body and body['user_id']: 23 | user_id = body['user_id'] 24 | new_share = await self.slm.create_share(resource_id=body['resource_id'], 25 | user_id=user_id, 26 | expiration_dt=expiration_dt, 27 | is_revoked=False) 28 | return JSONResponse(new_share.model_dump(), status_code=201, headers={'Location': f'{api_base_url}/shares/{new_share.id}'}) 29 | 30 | async def put(self, id: str, body: dict): 31 | expiration_dt = None 32 | if 'expiration_dt' in body and body['expiration_dt'] is not None: 33 | expiration_dt = datetime.fromisoformat(body['expiration_dt']).astimezone(tz=timezone.utc) 34 | user_id = None 35 | if 'user_id' in body and body['user_id']: 36 | user_id = body['user_id'] 37 | updated_share = await self.slm.update_share(id, 38 | resource_id=body['resource_id'], 39 | user_id=user_id, 40 | expiration_dt=expiration_dt, 41 | is_revoked=body['is_revoked']) 42 | if updated_share is None: 43 | return JSONResponse({"error": "Share not found"}, status_code=404) 44 | return JSONResponse(updated_share.model_dump(), status_code=200) 45 | 46 | async def delete(self, id: str): 47 | success = await self.slm.delete_share(id) 48 | if not success: 49 | return JSONResponse({"error": "Share not found"}, status_code=404) 50 | return Response(status_code=204) 51 | 52 | async def search(self, filter: str = None, range: str = None, sort: str = None): 53 | result = parse_pagination_params(filter, range, sort) 54 | if isinstance(result, JSONResponse): 55 | return result 56 | 57 | offset, limit, sort_by, sort_order, filters = result 58 | 59 | shares, total_count = await self.slm.retrieve_shares(limit=limit, offset=offset, sort_by=sort_by, sort_order=sort_order, filters=filters) 60 | headers = { 61 | 'X-Total-Count': str(total_count), 62 | 'Content-Range': f'shares {offset}-{offset + len(shares) - 1}/{total_count}' 63 | } 64 | return JSONResponse([share.model_dump() for share in shares], status_code=200, headers=headers) 65 | -------------------------------------------------------------------------------- /backend/api/UsersView.py: -------------------------------------------------------------------------------- 1 | from starlette.responses import JSONResponse, Response 2 | from common.paths import api_base_url 3 | from backend.managers.UsersManager import UsersManager 4 | from backend.managers.CasbinRoleManager import CasbinRoleManager 5 | from backend.pagination import parse_pagination_params 6 | from aiosqlite import IntegrityError 7 | from functools import wraps 8 | from connexion.exceptions import Forbidden 9 | from connexion import context 10 | 11 | def check_permission(action, resourceId="user"): 12 | def decorator(f): 13 | @wraps(f) 14 | async def decorated_function(*args, **kwargs): 15 | cb = CasbinRoleManager() 16 | token_info = context.context['token_info'] 17 | if not cb.check_permissions(token_info["uid"], action, resourceId, "ADMIN_PORTAL"): 18 | raise Forbidden(detail="Insufficient permissions") 19 | 20 | return await f(*args, **kwargs) 21 | 22 | return decorated_function 23 | return decorator 24 | 25 | class UsersView: 26 | def __init__(self): 27 | self.um = UsersManager() 28 | 29 | @check_permission("show") 30 | async def get(self, id: str): 31 | user = await self.um.retrieve_user(id) 32 | if user is None: 33 | return JSONResponse(status_code=404, headers={"error": "User not found"}) 34 | return JSONResponse(user.model_dump(), status_code=200) 35 | 36 | @check_permission("create") 37 | async def post(self, body: dict): 38 | try: 39 | id = await self.um.create_user(body['name'], body['email']) 40 | return JSONResponse({"id": id}, status_code=201, headers={'Location': f'{api_base_url}/users/{id}'}) 41 | except IntegrityError: 42 | return JSONResponse({"message": "A user with the provided details already exists."}, status_code=400) 43 | 44 | @check_permission("edit") 45 | async def put(self, id: str, body: dict): 46 | await self.um.update_user(id, body['name'], body['email']) 47 | return JSONResponse({"message": "User updated successfully"}, status_code=200) 48 | 49 | @check_permission("delete") 50 | async def delete(self, id: str): 51 | await self.um.delete_user(id) 52 | return Response(status_code=204) 53 | 54 | @check_permission("list") 55 | async def search(self, filter: str = None, range: str = None, sort: str = None): 56 | result = parse_pagination_params(filter, range, sort) 57 | if isinstance(result, JSONResponse): 58 | return result 59 | 60 | offset, limit, sort_by, sort_order, filters = result 61 | 62 | users, total_count = await self.um.retrieve_users(limit=limit, offset=offset, sort_by=sort_by, sort_order=sort_order, filters=filters) 63 | 64 | # Convert Pydantic models to dictionaries 65 | users_dict = [user.model_dump() for user in users] 66 | 67 | headers = { 68 | 'X-Total-Count': str(total_count), 69 | 'Content-Range': f'users {offset}-{offset+len(users)}/{total_count}', 70 | 'Access-Control-Expose-Headers': 'Content-Range' 71 | } 72 | return JSONResponse(users_dict, status_code=200, headers=headers) 73 | -------------------------------------------------------------------------------- /backend/api/__init__.py: -------------------------------------------------------------------------------- 1 | # import all the views to satisfy Connexion's MethodView resolver 2 | # otherwise connexion will throw "TypeError: 'module' object is not callable" 3 | from .AbilitiesView import AbilitiesView 4 | from .AssetsView import AssetsView 5 | from .ResourcesView import ResourcesView 6 | from .ConfigView import ConfigView 7 | from .DownloadsView import DownloadsView 8 | from .UsersView import UsersView 9 | from .PersonasView import PersonasView 10 | from .SharesView import SharesView 11 | from .AuthView import AuthView 12 | -------------------------------------------------------------------------------- /backend/app.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | from connexion import AsyncApp 4 | from connexion.resolver import MethodResolver 5 | from connexion.middleware import MiddlewarePosition 6 | from starlette.middleware.cors import CORSMiddleware 7 | from backend.db import init_db 8 | from backend.utils import get_env_key 9 | 10 | def create_backend_app(): 11 | # Initialize the database 12 | init_db() 13 | 14 | apis_dir = Path(__file__).parent.parent / 'apis' / 'paios' 15 | connexion_app = AsyncApp(__name__, specification_dir=apis_dir) 16 | 17 | allow_origins = [ 18 | 'http://localhost:5173', # Default Vite dev server 19 | 'https://localhost:8443', # Secure port for local development 20 | ] 21 | 22 | # Add PAIOS server URL if environment variables are set 23 | paios_scheme = get_env_key('PAIOS_SCHEME', 'https') 24 | paios_host = get_env_key('PAIOS_HOST', 'localhost') 25 | paios_port = get_env_key('PAIOS_PORT', '8443') 26 | 27 | if paios_host: 28 | paios_url = f"{paios_scheme}://{paios_host}" 29 | if paios_port: 30 | paios_url += f":{paios_port}" 31 | allow_origins.append(paios_url) 32 | 33 | # Allow overriding origins from environment variables 34 | additional_origins = os.environ.get('PAIOS_ALLOW_ORIGINS') 35 | if additional_origins: 36 | allow_origins.extend(additional_origins.split(',')) 37 | 38 | # Add CORS middleware 39 | connexion_app.add_middleware( 40 | CORSMiddleware, 41 | position=MiddlewarePosition.BEFORE_EXCEPTION, 42 | allow_origins=allow_origins, 43 | allow_credentials=True, 44 | allow_methods=["GET","POST","PUT","DELETE","PATCH","HEAD","OPTIONS"], 45 | allow_headers=["Content-Range", "X-Total-Count"], 46 | expose_headers=["Content-Range", "X-Total-Count"], 47 | ) 48 | 49 | # Add API with validation 50 | connexion_app.add_api( 51 | 'openapi.yaml', 52 | resolver=MethodResolver('backend.api'), 53 | resolver_error=501, 54 | # TODO: Validation has a performance impact and may want to be disabled in production 55 | validate_responses=True, # Validate responses against the OpenAPI spec 56 | strict_validation=True # Validate requests strictly against the OpenAPI spec 57 | ) 58 | return connexion_app -------------------------------------------------------------------------------- /backend/db.py: -------------------------------------------------------------------------------- 1 | # database helper functions 2 | import os 3 | import logging 4 | from sqlmodel import SQLModel 5 | from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession 6 | from sqlalchemy.orm import sessionmaker 7 | from alembic import command 8 | from alembic.config import Config as AlembicConfig 9 | from common.paths import base_dir, db_path, db_url 10 | from contextlib import asynccontextmanager 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | # Define SQLModelBase class 15 | class SQLModelBase(SQLModel): 16 | pass 17 | 18 | # Create async engine 19 | engine = create_async_engine(db_url, echo=False) 20 | 21 | # Create async session factory 22 | AsyncSessionLocal = sessionmaker( 23 | bind=engine, 24 | class_=AsyncSession, 25 | expire_on_commit=False, 26 | ) 27 | 28 | # use alembic to create the database or migrate to the latest schema 29 | def init_db(): 30 | alembic_cfg = AlembicConfig() 31 | os.makedirs(db_path.parent, exist_ok=True) 32 | alembic_cfg.set_main_option("script_location", str(base_dir / "migrations")) 33 | alembic_cfg.set_main_option("sqlalchemy.url", db_url.replace("+aiosqlite", "")) # because Alembic doesn't like async apparently 34 | command.upgrade(alembic_cfg, "head") 35 | 36 | @asynccontextmanager 37 | async def db_session_context(): 38 | session = AsyncSessionLocal() 39 | try: 40 | yield session 41 | await session.commit() 42 | except Exception: 43 | await session.rollback() 44 | raise 45 | finally: 46 | await session.close() 47 | -------------------------------------------------------------------------------- /backend/dependencies/ContainerDependency.py: -------------------------------------------------------------------------------- 1 | from .Dependency import Dependency 2 | 3 | class ContainerDependency(Dependency): 4 | # TODO: Container dependencies need to be implemented when required 5 | 6 | def handle_exception(self, exception): 7 | super().handle_exception(exception) 8 | # Implementation for handling exception 9 | 10 | def refresh_status(self): 11 | raise NotImplementedError 12 | #container_name = self.dependency.get('name') 13 | #import docker 14 | #client = docker.from_env() 15 | #try: 16 | # container = client.containers.get(container_name) 17 | # self.dependency['status'] = container.status == 'running' 18 | #except docker.errors.NotFound: 19 | # self.dependency['status'] = False 20 | 21 | def start(self, ability, dependency): 22 | # Implementation for starting the dependency 23 | pass 24 | 25 | def stop(self, ability, dependency): 26 | # Implementation for stopping the dependency 27 | pass 28 | 29 | async def _install(self, ability, dependency): 30 | # Placeholder implementation for installing the dependency 31 | return {"message": "Dependency installation not yet implemented"} 32 | -------------------------------------------------------------------------------- /backend/dependencies/Dependency.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from backend.dependencies.DependencyState import DependencyState 3 | import threading 4 | import asyncio 5 | import logging 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | class Dependency(ABC): 10 | def __init__(self): 11 | pass 12 | 13 | @abstractmethod 14 | def handle_exception(self, exception): 15 | logger.error(f"Unexpected error: {exception}") 16 | return {"error": "An unexpected error occurred during dependency installation."} 17 | 18 | @abstractmethod 19 | def refresh_status(self, ability, dependency): 20 | pass 21 | 22 | @abstractmethod 23 | def start(self, ability, dependency, background=False): 24 | pass 25 | 26 | @abstractmethod 27 | def stop(self, ability, dependency, background=False): 28 | pass 29 | 30 | @abstractmethod 31 | async def _install(self, ability, dependency, background=False): 32 | pass 33 | 34 | async def install(self, ability, dependency, background=False): 35 | 36 | async def install_task(ability, dependency, background): 37 | try: 38 | logger.info(f"Started installation of dependency {dependency['id']}") 39 | await self._install(ability, dependency, background) 40 | logger.info(f"Completed installation of dependency {dependency['id']}") 41 | except Exception as e: 42 | self.handle_exception(e) 43 | 44 | if background: 45 | logger.info(f"Installation of dependency {dependency['id']} started in background") 46 | self._run_in_background(install_task, ability, dependency, background) 47 | else: 48 | logger.info(f"Installation of dependency {dependency['id']} started") 49 | return await install_task(ability, dependency, background) 50 | 51 | def _default_callback(self, result): 52 | try: 53 | if result is None: 54 | logger.info("Task completed successfully.") 55 | elif isinstance(result, dict) and 'message' in result: 56 | logger.info(result['message']) 57 | else: 58 | logger.error(f"Unexpected result: {result}") 59 | except Exception as e: 60 | logger.error(f"Error in default callback: {e}") 61 | 62 | def _run_in_background(self, task_function, *args, callback_function=None): 63 | def task_callback(loop): 64 | try: 65 | asyncio.set_event_loop(loop) 66 | result = loop.run_until_complete(task_function(*args)) 67 | if callback_function: 68 | callback_function(result) 69 | else: 70 | self._default_callback(result) 71 | except Exception as e: 72 | logger.error(f"Unexpected error during the background task: {e}", exc_info=True) 73 | if callback_function: 74 | callback_function({"message": f"An unexpected error occurred: {str(e)}"}) 75 | else: 76 | self._default_callback({"message": f"An unexpected error occurred: {str(e)}"}) 77 | 78 | loop = asyncio.new_event_loop() 79 | task_thread = threading.Thread(target=task_callback, args=(loop,)) 80 | task_thread.start() 81 | -------------------------------------------------------------------------------- /backend/dependencies/DependencyState.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | class DependencyState(Enum): 4 | AVAILABLE = "available" 5 | INSTALLING = "installing" 6 | INSTALLED = "installed" 7 | FAILED = "failed" 8 | -------------------------------------------------------------------------------- /backend/dependencies/LinuxDependency.py: -------------------------------------------------------------------------------- 1 | from .Dependency import Dependency 2 | 3 | class LinuxDependency(Dependency): 4 | # TODO: Linux dependencies need to be implemented when required 5 | def handle_exception(self, exception): 6 | super().handle_exception(exception) 7 | # Implementation for handling exception 8 | 9 | def refresh_status(self): 10 | raise NotImplementedError 11 | #package_name = self.dependency.get('name') 12 | #import subprocess 13 | #result = subprocess.run(['dpkg', '-s', package_name], stdout=subprocess.PIPE, stderr=subprocess.PIPE) 14 | #self.dependency['status'] = result.returncode == 0 15 | 16 | def start(self, ability, dependency): 17 | # Implementation for starting the dependency 18 | pass 19 | 20 | def stop(self, ability, dependency): 21 | # Implementation for stopping the dependency 22 | pass 23 | 24 | async def _install(self, ability, dependency): 25 | # Placeholder implementation for installing the dependency 26 | return {"message": "Dependency installation not yet implemented"} 27 | -------------------------------------------------------------------------------- /backend/dependencies/ResourceDependency.py: -------------------------------------------------------------------------------- 1 | from backend.dependencies.Dependency import Dependency 2 | 3 | class ResourceDependency(Dependency): 4 | def handle_exception(self, exception): 5 | super().handle_exception(exception) 6 | # Implementation for handling exception 7 | 8 | def refresh_status(self, ability, dependency): 9 | # Implementation for refreshing status 10 | pass 11 | 12 | def start(self, ability, dependency): 13 | # Implementation for starting the dependency 14 | pass 15 | 16 | def stop(self, ability, dependency): 17 | # Implementation for stopping the dependency 18 | pass 19 | 20 | async def _install(self, ability, dependency): 21 | # Placeholder implementation for installing the dependency 22 | return {"message": "Dependency installation not yet implemented"} 23 | -------------------------------------------------------------------------------- /backend/encryption.py: -------------------------------------------------------------------------------- 1 | import json 2 | from cryptography.fernet import Fernet 3 | from common.utils import get_env_key 4 | 5 | class Encryption: 6 | _instance = None 7 | 8 | # Singleton pattern so we only read from .env once for the life of the application 9 | def __new__(cls, *args, **kwargs): 10 | if cls._instance is None: 11 | cls._instance = super(Encryption, cls).__new__(cls) 12 | cls._instance._initialized = False 13 | return cls._instance 14 | 15 | def __init__(self, encryption_key=None): 16 | if self._initialized: 17 | return 18 | self._initialized = True 19 | # Helper function to get the encryption key from environment variables or generate a new one and save it to .env 20 | self.encryption_key = encryption_key if encryption_key else get_env_key('PAIOS_DB_ENCRYPTION_KEY', lambda: Fernet.generate_key().decode()) 21 | 22 | # Encrypt a value using Fernet encryption 23 | def encrypt_value(self, value): 24 | f = Fernet(self.encryption_key) 25 | if type(value) in (dict, list): 26 | value = json.dumps(value) 27 | encrypted_value = f.encrypt(value.encode()) 28 | return encrypted_value 29 | 30 | # Decrypt a value using Fernet encryption 31 | def decrypt_value(self, encrypted_value): 32 | f = Fernet(self.encryption_key) 33 | decrypted_value = f.decrypt(encrypted_value).decode() 34 | return decrypted_value 35 | -------------------------------------------------------------------------------- /backend/env.py: -------------------------------------------------------------------------------- 1 | # Check if the environment is set up correctly 2 | def check_env(): 3 | import os 4 | import sys 5 | from pathlib import Path 6 | 7 | if not (hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix)): 8 | print("Error: Running under the system python ({})\n".format(sys.prefix)) 9 | venv_path = Path(__file__).resolve().parent.parent / '.venv' 10 | if not venv_path.exists(): 11 | print("No virtual environment found at {} so you will need to create one.".format(venv_path)) 12 | if os.name == "posix": # Linux/Mac 13 | print("\nYou can use the scripts/setup_environment.sh script to do this, or do it manually:") 14 | print(" python3 -m venv .venv") 15 | print(" source .venv/bin/activate") 16 | print(" pip install -r backend/requirements.txt") 17 | elif os.name == "nt": # Windows 18 | print("\nYou can use the scripts\\setup_environment.ps1 script to do this, or do it manually from the root directory:\n") 19 | print(" python -m venv .venv") 20 | print(" .venv\\Scripts\\activate") 21 | print(" pip install -r backend\\requirements.txt\n") 22 | sys.exit(1) 23 | else: 24 | print(f"Virtual environment found at {venv_path}. You can activate it with:\n") 25 | if os.name == "posix": # Linux/Mac 26 | print(f" source {venv_path}/bin/activate") 27 | elif os.name == "nt": # Windows 28 | print(f" {venv_path}\\Scripts\\activate.ps1") 29 | print(f"\nOnce you have activated the virtual environment, run this again.") 30 | sys.exit(1) 31 | 32 | required_modules = ['connexion', 'uvicorn', 'sqlalchemy', 'alembic', 'aiosqlite'] 33 | for module in required_modules: 34 | try: 35 | __import__(module) 36 | except ImportError: 37 | print(f"Required module {module} is not installed.") 38 | sys.exit(1) 39 | -------------------------------------------------------------------------------- /backend/managers/CasbinRoleManager.py: -------------------------------------------------------------------------------- 1 | from casbin import Enforcer 2 | from casbin_sqlalchemy_adapter import Adapter 3 | from pathlib import Path 4 | from common.paths import db_path 5 | from threading import Lock 6 | 7 | class CasbinRoleManager: 8 | _instance = None 9 | _lock = Lock() # Add this line if you want thread safety 10 | 11 | def __new__(cls): 12 | if cls._instance is None: 13 | with cls._lock: 14 | if cls._instance is None: 15 | cls._instance = super(CasbinRoleManager, cls).__new__(cls) 16 | cls._instance.init_casbin() 17 | return cls._instance 18 | 19 | def init_casbin(self): 20 | path = Path(db_path) # Ensure correct path 21 | adapter = Adapter(f"sqlite:///{path}") # Format as SQLite URL 22 | model_path = str(Path(__file__).parent.parent / 'rbac_model.conf') # Convert to string 23 | self.enforcer = Enforcer(model_path, adapter) # Use Enforcer correctly 24 | self.add_default_rules() 25 | 26 | def add_default_rules(self): 27 | default_rules = [ 28 | ("user", "ADMIN_PORTAL", "ALL", "list"), 29 | ("user", "ADMIN_PORTAL", "ALL", "show"), 30 | ("admin", "ADMIN_PORTAL", "ALL", "list"), 31 | ("admin", "ADMIN_PORTAL", "ALL", "show"), 32 | ("admin", "ADMIN_PORTAL", "ALL", "create"), 33 | ("admin", "ADMIN_PORTAL", "ALL", "edit"), 34 | ("admin", "ADMIN_PORTAL", "ALL", "delete") 35 | ] 36 | 37 | for rule in default_rules: 38 | self.enforcer.add_policy(*rule) 39 | 40 | def get_enforcer(self): 41 | return self.enforcer 42 | 43 | def check_permissions(self, user_id, res_act, res_id, domain): 44 | return self.enforcer.enforce(user_id, domain, res_id, res_act) 45 | 46 | def get_permissions(self, role, domain): 47 | return self.enforcer.get_permissions_for_user_in_domain(role, domain) 48 | 49 | def get_resource_access(self, role, domain): 50 | permissions = self.get_permissions(role, domain) 51 | output = {} 52 | for _,_, resource, action in permissions: 53 | if resource not in output: 54 | output[resource] = [] 55 | 56 | if action not in output[resource]: 57 | output[resource].append(action) 58 | 59 | return output 60 | 61 | def assign_user_role(self, user_id, domain, role): 62 | self.enforcer.add_role_for_user_in_domain(user_id, role, domain) 63 | 64 | def get_admin_users(self, domain): 65 | return self.enforcer.get_users_for_role_in_domain("admin",domain) 66 | 67 | def get_roles_for_user_in_domain(self, user_id, domain): 68 | return self.enforcer.get_roles_for_user_in_domain(user_id, domain) 69 | 70 | def get_user_roles(self, user_id, domain): 71 | return ",".join(self.enforcer.get_roles_for_user_in_domain(user_id, domain)) 72 | 73 | -------------------------------------------------------------------------------- /backend/managers/ConfigManager.py: -------------------------------------------------------------------------------- 1 | from uuid import uuid4 2 | from threading import Lock 3 | from sqlalchemy import select, insert, update, delete 4 | from backend.models import Config 5 | from backend.db import db_session_context, init_db 6 | from backend.encryption import Encryption 7 | from backend.schemas import ConfigSchema 8 | 9 | class ConfigManager: 10 | _instance = None 11 | _lock = Lock() 12 | 13 | def __new__(cls, *args, **kwargs): 14 | if not cls._instance: 15 | with cls._lock: 16 | if not cls._instance: 17 | cls._instance = super(ConfigManager, cls).__new__(cls, *args, **kwargs) 18 | return cls._instance 19 | 20 | def __init__(self, tenant=None): 21 | if not hasattr(self, '_initialized'): 22 | with self._lock: 23 | if not hasattr(self, '_initialized'): 24 | self.encryption = Encryption() 25 | self.tenant = tenant 26 | # db.init_db() 27 | self._initialized = True 28 | 29 | async def create_config_item(self, value): 30 | key = str(uuid4()) 31 | encrypted_value = self.encryption.encrypt_value(value) 32 | async with db_session_context() as session: 33 | new_config = Config(key=key, value=encrypted_value) 34 | session.add(new_config) 35 | await session.commit() 36 | return ConfigSchema(key=key, value=value) 37 | 38 | async def retrieve_config_item(self, key): 39 | async with db_session_context() as session: 40 | result = await session.execute(select(Config).filter(Config.key == key)) 41 | config = result.scalar_one_or_none() 42 | if config: 43 | decrypted_value = self.encryption.decrypt_value(config.value) 44 | return ConfigSchema(key=config.key, value=decrypted_value) 45 | return None 46 | 47 | async def update_config_item(self, key, value): 48 | encrypted_value = self.encryption.encrypt_value(value) 49 | async with db_session_context() as session: 50 | stmt = update(Config).where(Config.key == key).values(value=encrypted_value) 51 | result = await session.execute(stmt) 52 | if result.rowcount == 0: 53 | new_config = Config(key=key, value=encrypted_value) 54 | session.add(new_config) 55 | await session.commit() 56 | return ConfigSchema(key=key, value=value) 57 | 58 | async def delete_config_item(self, key): 59 | async with db_session_context() as session: 60 | stmt = delete(Config).where(Config.key == key) 61 | result = await session.execute(stmt) 62 | await session.commit() 63 | return result.rowcount > 0 64 | 65 | async def retrieve_all_config_items(self): 66 | async with db_session_context() as session: 67 | result = await session.execute(select(Config)) 68 | configs = result.scalars().all() 69 | return [ConfigSchema(key=config.key, value=self.encryption.decrypt_value(config.value)) 70 | for config in configs] -------------------------------------------------------------------------------- /backend/managers/Manager.py: -------------------------------------------------------------------------------- 1 | # TODO: Consider creating a common interface like Dependency.py for functions like notfiying of new apps 2 | -------------------------------------------------------------------------------- /backend/managers/README.md: -------------------------------------------------------------------------------- 1 | # Managers 2 | 3 | To encapsulate related operations in a class that can be reused across the project, we use the Manager pattern. 4 | 5 | The managers are singletons so expensive startup operations like creating directories, initialising databases, setting up environments, etc. are not performed on each instantiation. 6 | -------------------------------------------------------------------------------- /backend/managers/ResourcesManager.py: -------------------------------------------------------------------------------- 1 | from uuid import uuid4 2 | from threading import Lock 3 | from sqlalchemy import select, insert, update, delete, func 4 | from backend.models import Resource 5 | from backend.db import db_session_context 6 | from backend.schemas import ChannelCreateSchema, ChannelSchema 7 | from typing import List, Tuple, Optional, Dict, Any 8 | 9 | class ResourcesManager: 10 | _instance = None 11 | _lock = Lock() 12 | 13 | def __new__(cls, *args, **kwargs): 14 | if not cls._instance: 15 | with cls._lock: 16 | if not cls._instance: 17 | cls._instance = super(ResourcesManager, cls).__new__(cls, *args, **kwargs) 18 | return cls._instance 19 | 20 | def __init__(self): 21 | if not hasattr(self, '_initialized'): 22 | with self._lock: 23 | if not hasattr(self, '_initialized'): 24 | # db.init_db() 25 | self._initialized = True 26 | 27 | async def create_resource(self, resource_data: ChannelCreateSchema) -> ChannelSchema: 28 | async with db_session_context() as session: 29 | new_resource = Resource(id=str(uuid4()), **resource_data.model_dump()) 30 | session.add(new_resource) 31 | await session.commit() 32 | await session.refresh(new_resource) 33 | return ChannelSchema(id=new_resource.id, **resource_data.model_dump()) 34 | 35 | async def update_resource(self, id: str, resource_data: ChannelCreateSchema) -> Optional[ChannelSchema]: 36 | async with db_session_context() as session: 37 | stmt = update(Resource).where(Resource.id == id).values(**resource_data.dict()) 38 | result = await session.execute(stmt) 39 | if result.rowcount > 0: 40 | await session.commit() 41 | updated_resource = await session.get(Resource, id) 42 | return ChannelSchema(id=updated_resource.id, **resource_data.model_dump()) 43 | return None 44 | 45 | async def delete_resource(self, id: str) -> bool: 46 | async with db_session_context() as session: 47 | stmt = delete(Resource).where(Resource.id == id) 48 | result = await session.execute(stmt) 49 | await session.commit() 50 | return result.rowcount > 0 51 | 52 | async def retrieve_resource(self, id: str) -> Optional[ChannelSchema]: 53 | async with db_session_context() as session: 54 | result = await session.execute(select(Resource).filter(Resource.id == id)) 55 | resource = result.scalar_one_or_none() 56 | if resource: 57 | return ChannelSchema(id=resource.id, name=resource.name, uri=resource.uri) 58 | return None 59 | 60 | async def retrieve_resources(self, offset: int = 0, limit: int = 100, sort_by: Optional[str] = None, 61 | sort_order: str = 'asc', filters: Optional[Dict[str, Any]] = None) -> Tuple[List[ChannelSchema], int]: 62 | async with db_session_context() as session: 63 | query = select(Resource) 64 | 65 | if filters: 66 | for key, value in filters.items(): 67 | if isinstance(value, list): 68 | query = query.filter(getattr(Resource, key).in_(value)) 69 | else: 70 | query = query.filter(getattr(Resource, key) == value) 71 | 72 | if sort_by and sort_by in ['id', 'name', 'uri']: 73 | order_column = getattr(Resource, sort_by) 74 | query = query.order_by(order_column.desc() if sort_order.lower() == 'desc' else order_column) 75 | 76 | query = query.offset(offset).limit(limit) 77 | 78 | result = await session.execute(query) 79 | resources = [ChannelSchema(id=resource.id, name=resource.name, uri=resource.uri) 80 | for resource in result.scalars().all()] 81 | 82 | # Get total count 83 | count_query = select(func.count()).select_from(Resource) 84 | if filters: 85 | for key, value in filters.items(): 86 | if isinstance(value, list): 87 | count_query = count_query.filter(getattr(Resource, key).in_(value)) 88 | else: 89 | count_query = count_query.filter(getattr(Resource, key) == value) 90 | 91 | total_count = await session.execute(count_query) 92 | total_count = total_count.scalar() 93 | 94 | return resources, total_count 95 | -------------------------------------------------------------------------------- /backend/managers/UsersManager.py: -------------------------------------------------------------------------------- 1 | from uuid import uuid4 2 | from threading import Lock 3 | from sqlalchemy import select, insert, update, delete, func 4 | from backend.models import User 5 | from backend.db import db_session_context 6 | from backend.schemas import UserSchema 7 | from backend.managers.CasbinRoleManager import CasbinRoleManager 8 | 9 | class UsersManager: 10 | _instance = None 11 | _lock = Lock() 12 | 13 | def __new__(cls, *args, **kwargs): 14 | if not cls._instance: 15 | with cls._lock: 16 | if not cls._instance: 17 | cls._instance = super(UsersManager, cls).__new__(cls, *args, **kwargs) 18 | return cls._instance 19 | 20 | def __init__(self): 21 | if not hasattr(self, '_initialized'): 22 | with self._lock: 23 | if not hasattr(self, '_initialized'): 24 | # db.init_db() 25 | self._initialized = True 26 | 27 | async def create_user(self, name, email): 28 | async with db_session_context() as session: 29 | new_user = User(id=str(uuid4()), name=name, email=email) 30 | session.add(new_user) 31 | await session.commit() 32 | return new_user.id 33 | 34 | async def update_user(self, id, name, email): 35 | async with db_session_context() as session: 36 | stmt = update(User).where(User.id == id).values(name=name, email=email) 37 | await session.execute(stmt) 38 | await session.commit() 39 | 40 | async def delete_user(self, id): 41 | async with db_session_context() as session: 42 | stmt = delete(User).where(User.id == id) 43 | await session.execute(stmt) 44 | await session.commit() 45 | 46 | async def retrieve_user(self, id): 47 | async with db_session_context() as session: 48 | result = await session.execute(select(User).filter(User.id == id)) 49 | user = result.scalar_one_or_none() 50 | cb = CasbinRoleManager() 51 | return UserSchema(id=user.id, name=user.name, email=user.email, role=cb.get_user_roles(user.id, "ADMIN_PORTAL")) if user else None 52 | 53 | async def retrieve_users(self, offset=0, limit=100, sort_by=None, sort_order='asc', filters=None): 54 | async with db_session_context() as session: 55 | query = select(User) 56 | 57 | if filters: 58 | for key, value in filters.items(): 59 | if isinstance(value, list): 60 | query = query.filter(getattr(User, key).in_(value)) 61 | else: 62 | query = query.filter(getattr(User, key) == value) 63 | 64 | if sort_by and sort_by in ['id', 'name', 'email']: 65 | order_column = getattr(User, sort_by) 66 | query = query.order_by(order_column.desc() if sort_order.lower() == 'desc' else order_column) 67 | 68 | query = query.offset(offset).limit(limit) 69 | 70 | result = await session.execute(query) 71 | cb = CasbinRoleManager() 72 | users = [UserSchema( 73 | id=user.id, 74 | name=user.name, 75 | email=user.email, 76 | role=cb.get_user_roles(user.id, "ADMIN_PORTAL") 77 | ) for user in result.scalars().all()] 78 | 79 | # Get total count 80 | count_query = select(func.count()).select_from(User) 81 | if filters: 82 | for key, value in filters.items(): 83 | if isinstance(value, list): 84 | count_query = count_query.filter(getattr(User, key).in_(value)) 85 | else: 86 | count_query = count_query.filter(getattr(User, key) == value) 87 | 88 | total_count = await session.execute(count_query) 89 | total_count = total_count.scalar() 90 | 91 | return users, total_count -------------------------------------------------------------------------------- /backend/managers/__init__.py: -------------------------------------------------------------------------------- 1 | from .AbilitiesManager import AbilitiesManager 2 | from .AssetsManager import AssetsManager 3 | from .ResourcesManager import ResourcesManager 4 | from .ConfigManager import ConfigManager 5 | from .DownloadsManager import DownloadsManager 6 | from .UsersManager import UsersManager 7 | from .PersonasManager import PersonasManager 8 | from .AuthManager import AuthManager 9 | from .CasbinRoleManager import CasbinRoleManager 10 | 11 | # List of manager classes 12 | manager_classes = [ 13 | AbilitiesManager, 14 | AssetsManager, 15 | ResourcesManager, 16 | ConfigManager, 17 | DownloadsManager, 18 | UsersManager, 19 | AuthManager, 20 | PersonasManager, 21 | CasbinRoleManager 22 | ] 23 | 24 | # Initialize the managers dynamically 25 | managers = {cls.__name__.lower(): cls() for cls in manager_classes} 26 | 27 | # Expose the initialized managers for easy access as e.g. backend.managers.managers['abilitiesmanager'] 28 | __all__ = ['managers'] + list(managers.keys()) 29 | -------------------------------------------------------------------------------- /backend/models.py: -------------------------------------------------------------------------------- 1 | from uuid import uuid4 2 | from datetime import datetime 3 | from sqlmodel import Field, Relationship 4 | from backend.db import SQLModelBase 5 | from typing import List, Optional, ForwardRef 6 | 7 | # Forward references 8 | UserRef = ForwardRef("User") 9 | CredRef = ForwardRef("Cred") 10 | SessionRef = ForwardRef("Session") 11 | 12 | class Config(SQLModelBase, table=True): 13 | key: str = Field(primary_key=True) 14 | value: str | None = Field(default=None) 15 | 16 | class Resource(SQLModelBase, table=True): 17 | id: str = Field(primary_key=True, default_factory=lambda: str(uuid4())) 18 | name: str = Field() 19 | uri: str = Field() 20 | 21 | class User(SQLModelBase, table=True): 22 | id: str = Field(primary_key=True, default_factory=lambda: str(uuid4())) 23 | webauthn_user_id: str = Field(unique=True, default_factory=lambda: str(uuid4())) 24 | name: Optional[str] = Field(default=None) 25 | email: str = Field() 26 | webauthn_user_id: str = Field() 27 | creds: List["Cred"] = Relationship(back_populates="user") 28 | sessions: List["Session"] = Relationship(back_populates="user") 29 | emailVerified: bool = Field(default=False) 30 | 31 | class Cred(SQLModelBase, table=True): 32 | id: str = Field(primary_key=True, default_factory=lambda: str(uuid4())) 33 | public_key: str = Field() 34 | webauthn_user_id: str = Field(foreign_key="user.webauthn_user_id") 35 | backed_up: str = Field() 36 | name: str | None = Field(default=None) 37 | transports: str = Field() 38 | user: "User" = Relationship(back_populates="creds") 39 | 40 | class Session(SQLModelBase, table=True): 41 | id: str = Field(primary_key=True, default_factory=lambda: str(uuid4())) 42 | user_id: str = Field(foreign_key="user.id") 43 | token: str = Field() 44 | expires_at: datetime = Field() 45 | user: User = Relationship(back_populates="sessions") 46 | 47 | class Asset(SQLModelBase, table=True): 48 | id: str = Field(primary_key=True, default_factory=lambda: str(uuid4())) 49 | user_id: str | None = Field(default=None, foreign_key="user.id") 50 | title: str = Field() 51 | creator: str | None = Field(default=None) 52 | subject: str | None = Field(default=None) 53 | description: str | None = Field(default=None) 54 | 55 | class Persona(SQLModelBase, table=True): 56 | id: str = Field(primary_key=True, default_factory=lambda: str(uuid4())) 57 | name: str = Field() 58 | description: str | None = Field(default=None) 59 | voice_id: str | None = Field(default=None) 60 | face_id: str | None = Field(default=None) 61 | 62 | class Share(SQLModelBase, table=True): 63 | id: str = Field(primary_key=True) # the short URL tag, eg abcd-efgh-ijkl 64 | resource_id: str = Field(foreign_key="resource.id") # the bot ID 65 | user_id: str | None = Field(default=None) # the user granted access (optional) 66 | expiration_dt: datetime | None = Field(default=None) # the link expiration date/time (optional) 67 | is_revoked: bool = Field() 68 | 69 | # Resolve forward references 70 | User.model_rebuild() 71 | Cred.model_rebuild() 72 | Session.model_rebuild() 73 | -------------------------------------------------------------------------------- /backend/pagination.py: -------------------------------------------------------------------------------- 1 | import json 2 | from starlette.responses import JSONResponse 3 | 4 | def parse_pagination_params(filter=None, range=None, sort=None): 5 | try: 6 | # Parse range parameter 7 | if range: 8 | range_list = json.loads(range) 9 | if isinstance(range_list, list) and len(range_list) == 2: 10 | offset, limit = range_list 11 | limit = limit - offset + 1 12 | else: 13 | return JSONResponse({"error": "Invalid range format"}, status_code=400) 14 | else: 15 | offset, limit = 0, 100 16 | 17 | # Parse sort parameter 18 | if sort: 19 | sort_list = json.loads(sort) 20 | if isinstance(sort_list, list) and len(sort_list) == 2: 21 | sort_by, sort_order = sort_list 22 | else: 23 | return JSONResponse({"error": "Invalid sort format"}, status_code=400) 24 | else: 25 | sort_by, sort_order = None, 'asc' 26 | 27 | # Parse filter parameter 28 | if filter: 29 | filters = json.loads(filter) 30 | else: 31 | filters = None 32 | 33 | return offset, limit, sort_by, sort_order, filters 34 | 35 | except (ValueError, TypeError): 36 | return JSONResponse({"error": "Invalid query parameter format"}, status_code=400) 37 | -------------------------------------------------------------------------------- /backend/rbac_model.conf: -------------------------------------------------------------------------------- 1 | [request_definition] 2 | r = sub, dom, obj, act 3 | 4 | [policy_definition] 5 | p = sub, dom, obj, act 6 | 7 | [policy_effect] 8 | e = some(where (p.eft == allow)) 9 | 10 | [matchers] 11 | m = g(r.sub, p.sub, r.dom) && r.dom == p.dom && (r.obj == p.obj || p.obj == "ALL") && r.act == p.act 12 | [role_definition] 13 | g = _, _, _ -------------------------------------------------------------------------------- /backend/redirector.py: -------------------------------------------------------------------------------- 1 | from threading import Lock 2 | from starlette.responses import RedirectResponse, PlainTextResponse 3 | from backend.managers.SharesManager import SharesManager 4 | from backend.managers.ResourcesManager import ResourcesManager 5 | 6 | # set up logging 7 | from common.log import get_logger 8 | logger = get_logger(__name__) 9 | 10 | async def redirector(request): 11 | response = await Redirector().handle_get(request) 12 | return response 13 | 14 | class Redirector: 15 | _instance = None 16 | _lock = Lock() 17 | 18 | def __new__(cls, *args, **kwargs): 19 | if not cls._instance: 20 | with cls._lock: 21 | if not cls._instance: 22 | logger.info("Creating Redirector instance.") 23 | cls._instance = super(Redirector, cls).__new__(cls, *args, **kwargs) 24 | return cls._instance 25 | 26 | def __init__(self): 27 | self.slm = SharesManager() 28 | self.rm = ResourcesManager() 29 | 30 | async def handle_get(self, request): 31 | logger.debug("redirection request for url_key = {}".format(request.path_params['url_key'])) 32 | url_key = request.path_params['url_key'] 33 | share = await self.slm.retrieve_share(url_key) 34 | logger.debug("share: {}".format(share)) 35 | if share is None or share.is_revoked: 36 | logger.error("Redirection failed: share link {} doesn't exist or has been revoked.".format(url_key)) 37 | return PlainTextResponse(f"Invalid Share Link.", status_code=404) 38 | resource = await self.rm.retrieve_resource(share.resource_id) 39 | logger.debug("resource: {}".format(resource)) 40 | if resource is None: 41 | logger.error("Redirection failed: share link {} resource not found.".format(url_key)) 42 | return PlainTextResponse(f"Unknown Resource.", status_code=500) # should not happen 43 | redirect_url = resource.uri 44 | logger.info("Share link {} ({}) redirected to {}.".format(url_key, resource.name, redirect_url)) 45 | return RedirectResponse(url=redirect_url) 46 | -------------------------------------------------------------------------------- /backend/requirements.txt: -------------------------------------------------------------------------------- 1 | -r ../common/requirements.txt 2 | python-dotenv 3 | pydantic 4 | requests 5 | openapi-spec-validator 6 | starlette 7 | connexion[async,swagger-ui,uvicorn] 8 | cryptography 9 | setuptools 10 | packaging 11 | alembic 12 | sqlalchemy 13 | sqlmodel 14 | aiosqlite 15 | asyncio 16 | aiohttp 17 | aioftp 18 | aiofiles 19 | structlog 20 | webauthn 21 | greenlet 22 | pyjwt 23 | casbin 24 | itsdangerous 25 | casbin_sqlalchemy_adapter 26 | jinja2 -------------------------------------------------------------------------------- /backend/schemas.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from pydantic import BaseModel, field_serializer 3 | from typing import Optional 4 | 5 | 6 | # We have *Create schemas because API clients ideally don't set the id field, it's set by the server 7 | # Alternatively we could have made the id optional but then we would have to check if it's set by the client 8 | 9 | # Config schemas 10 | class ConfigBaseSchema(BaseModel): 11 | value: Optional[str] = None 12 | 13 | class ConfigSchema(ConfigBaseSchema): 14 | key: str 15 | 16 | # Resource schemas 17 | class ChannelBaseSchema(BaseModel): 18 | name: str 19 | uri: str 20 | 21 | class ChannelCreateSchema(ChannelBaseSchema): 22 | pass 23 | 24 | class ChannelSchema(ChannelBaseSchema): 25 | id: str 26 | 27 | # Persona schemas 28 | class PersonaBaseSchema(BaseModel): 29 | name: str 30 | description: Optional[str] = None 31 | voice_id: str = None 32 | face_id: str = None 33 | 34 | class PersonaCreateSchema(PersonaBaseSchema): 35 | pass 36 | 37 | class PersonaSchema(PersonaBaseSchema): 38 | id: str 39 | 40 | # User schemas 41 | class UserBaseSchema(BaseModel): 42 | name: str 43 | email: str 44 | 45 | class UserCreateSchema(UserBaseSchema): 46 | pass 47 | 48 | class UserSchema(UserBaseSchema): 49 | id: str 50 | name: str 51 | email: str 52 | role: str 53 | 54 | # Asset schemas 55 | class AssetBaseSchema(BaseModel): 56 | title: str 57 | user_id: Optional[str] = None 58 | creator: Optional[str] = None 59 | subject: Optional[str] = None 60 | description: Optional[str] = None 61 | 62 | class AssetCreateSchema(AssetBaseSchema): 63 | pass 64 | 65 | class AssetSchema(AssetBaseSchema): 66 | id: str 67 | 68 | # Share schemas 69 | 70 | class ShareBaseSchema(BaseModel): 71 | resource_id: str 72 | user_id: Optional[str] = None 73 | expiration_dt: Optional[datetime] = None 74 | is_revoked: Optional[bool] = False 75 | 76 | @field_serializer('user_id') 77 | def serialize_user_id(self, user_id: str, _info): 78 | if user_id: 79 | return user_id 80 | else: 81 | return "" 82 | 83 | @field_serializer('expiration_dt', when_used='unless-none') 84 | def serialize_expiration_dt(self, dt: datetime, _info): 85 | if dt: 86 | return dt.strftime('%Y-%m-%dT%H:%M:%SZ') 87 | 88 | class ShareCreateSchema(ShareBaseSchema): 89 | pass 90 | 91 | class ShareSchema(ShareBaseSchema): 92 | id: str 93 | 94 | class RegistrationOptions(BaseModel): 95 | email: str 96 | 97 | class AuthOptionsRequest(BaseModel): 98 | email: str 99 | 100 | class VerifyRegistration(BaseModel): 101 | email: str 102 | att_resp: dict 103 | challenge: str 104 | user_id: str 105 | 106 | class AuthenticationOptions(BaseModel): 107 | email: str 108 | 109 | class VerifyAuthentication(BaseModel): 110 | email: str 111 | auth_resp: dict 112 | challenge: str 113 | -------------------------------------------------------------------------------- /backend/templates/email_verification_template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | pAI-OS Email Verification 7 | 58 | 59 | 60 |
61 |

pAI-OS Email Verification

62 |

Hello,

63 |

64 | Thank you for registering with pAI-OS. Please verify your email address by 65 | clicking the button below: 66 |

67 |

68 | Verify Email 69 |

70 |

71 | If the button doesn't work, please copy and paste the following link 72 | into your browser: 73 |

74 |

{{ verification_url }}

75 | 76 |
77 | 78 | 79 | -------------------------------------------------------------------------------- /backend/tests/test_db.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from backend.managers.ConfigManager import ConfigManager 3 | import asyncio 4 | 5 | class TestConfigManager(unittest.TestCase): 6 | def setUp(self): 7 | self.config_manager = ConfigManager() 8 | 9 | def asyncTest(func): 10 | def wrapper(*args, **kwargs): 11 | return asyncio.run(func(*args, **kwargs)) 12 | return wrapper 13 | 14 | @asyncTest 15 | async def test_create_config_item(self): 16 | value = 'test_value' 17 | key = await self.config_manager.create_config_item(value) 18 | result = await self.config_manager.retrieve_config_item(key) 19 | await self.config_manager.delete_config_item(key) 20 | self.assertEqual(result, value) 21 | 22 | @asyncTest 23 | async def test_read_config_item(self): 24 | value = 'test_value' 25 | key = await self.config_manager.create_config_item(value) 26 | result = await self.config_manager.retrieve_config_item(key) 27 | await self.config_manager.delete_config_item(key) 28 | self.assertEqual(result, value) 29 | 30 | @asyncTest 31 | async def test_update_config_item(self): 32 | value = 'test_value' 33 | key = await self.config_manager.create_config_item(value) 34 | new_value = 'new_test_value' 35 | await self.config_manager.update_config_item(key, new_value) 36 | result = await self.config_manager.retrieve_config_item(key) 37 | await self.config_manager.delete_config_item(key) 38 | self.assertEqual(result, new_value) 39 | 40 | @asyncTest 41 | async def test_delete_config_item(self): 42 | value = 'test_value' 43 | key = await self.config_manager.create_config_item(value) 44 | await self.config_manager.delete_config_item(key) 45 | result = await self.config_manager.retrieve_config_item(key) 46 | self.assertIsNone(result) 47 | 48 | def tearDown(self): 49 | pass 50 | 51 | if __name__ == '__main__': 52 | unittest.main() 53 | -------------------------------------------------------------------------------- /backend/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | from dotenv import set_key 3 | from common.paths import base_dir 4 | 5 | # set up logging 6 | from common.log import get_logger 7 | logger = get_logger(__name__) 8 | 9 | def get_env_key(key_name, default=None): 10 | value = os.environ.get(key_name) 11 | if not value: 12 | # If default is a function, call it to get the value, otherwise use it as the value 13 | if default is not None: 14 | if callable(default): 15 | value = default() 16 | else: 17 | value = str(default) 18 | else: 19 | raise ValueError(f"{key_name} is not set in the environment variables") 20 | set_key(base_dir / '.env', key_name, value) 21 | return value 22 | 23 | # Returns dict with null fields removed (e.g., for OpenAPI spec compliant 24 | # responses without having to set nullable: true) 25 | def remove_null_fields(data): 26 | if isinstance(data, dict): 27 | return {k: remove_null_fields(v) for k, v in data.items() if v is not None} 28 | elif isinstance(data, list): 29 | return [remove_null_fields(item) for item in data if item is not None] 30 | else: 31 | return data 32 | 33 | # Returns dict with only keys_to_include (e.g., for OpenAPI spec compliant 34 | # responses without unexpected fields present) 35 | def filter_dict(data, keys_to_include): 36 | return {k: data[k] for k in keys_to_include if k in data} 37 | 38 | # Converts a db result into a dict with named fields (e.g., 39 | # ["x", "y"], [1, 2] -> { "x": 1, "y": 2}) 40 | def zip_fields(fields, result): 41 | return {field: result[i] for i, field in enumerate(fields)} 42 | -------------------------------------------------------------------------------- /common/__init__.py: -------------------------------------------------------------------------------- 1 | # common package 2 | import os 3 | from common import paths 4 | 5 | # List of directories to ensure exist 6 | required_directories = [ 7 | paths.data_dir, 8 | paths.cert_dir 9 | ] 10 | 11 | # Create directories if they do not exist 12 | for directory in required_directories: 13 | if not os.path.exists(directory): 14 | os.makedirs(directory) 15 | 16 | -------------------------------------------------------------------------------- /common/cert.py: -------------------------------------------------------------------------------- 1 | from cryptography import x509 2 | from cryptography.x509.oid import NameOID 3 | from cryptography.hazmat.primitives import serialization, hashes 4 | from cryptography.hazmat.primitives.asymmetric import rsa 5 | from pathlib import Path 6 | import os 7 | import stat 8 | import datetime 9 | from common.paths import cert_dir 10 | 11 | # Set up logging 12 | from common.log import get_logger 13 | logger = get_logger(__name__) 14 | 15 | def set_file_permissions(file_path): 16 | if os.name == 'posix': 17 | # Unix-based systems 18 | os.chmod(file_path, stat.S_IRUSR | stat.S_IWUSR) 19 | elif os.name == 'nt': 20 | # Windows 21 | os.system(f'icacls "{file_path}" /inheritance:r /grant:r %username%:F') 22 | 23 | def check_cert(key_passphrase: str | None = None): 24 | cert_dir.mkdir(parents=True, exist_ok=True) 25 | 26 | cert_path = cert_dir / "cert.pem" 27 | key_path = cert_dir / "key.pem" 28 | 29 | if not cert_path.exists() or not key_path.exists(): 30 | try: 31 | # Generate key 32 | key = rsa.generate_private_key( 33 | public_exponent=65537, 34 | key_size=2048, 35 | ) 36 | 37 | # Generate certificate 38 | subject = issuer = x509.Name([ 39 | x509.NameAttribute(NameOID.COUNTRY_NAME, u"US"), 40 | x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, u"California"), 41 | x509.NameAttribute(NameOID.LOCALITY_NAME, u"San Francisco"), 42 | x509.NameAttribute(NameOID.ORGANIZATION_NAME, u"pAI-OS Local Installation"), 43 | x509.NameAttribute(NameOID.COMMON_NAME, u"localhost"), 44 | ]) 45 | 46 | cert = x509.CertificateBuilder().subject_name( 47 | subject 48 | ).issuer_name( 49 | issuer 50 | ).public_key( 51 | key.public_key() 52 | ).serial_number( 53 | x509.random_serial_number() 54 | ).not_valid_before( 55 | datetime.datetime.utcnow() 56 | ).not_valid_after( 57 | # Certificate is valid for 1 year 58 | datetime.datetime.utcnow() + datetime.timedelta(days=365) 59 | ).add_extension( 60 | x509.SubjectAlternativeName([ 61 | x509.DNSName(u"localhost") 62 | ]), 63 | critical=False, 64 | ).sign(key, hashes.SHA256()) 65 | 66 | # Write key to file with encryption if passphrase is set 67 | encryption_algorithm = serialization.BestAvailableEncryption(key_passphrase.encode()) if key_passphrase else serialization.NoEncryption() 68 | with open(key_path, "wb") as f: 69 | f.write(key.private_bytes( 70 | encoding=serialization.Encoding.PEM, 71 | format=serialization.PrivateFormat.TraditionalOpenSSL, 72 | encryption_algorithm=encryption_algorithm, 73 | )) 74 | 75 | # Set restrictive permissions on the key file 76 | set_file_permissions(key_path) 77 | 78 | # Write certificate to file 79 | with open(cert_path, "wb") as f: 80 | f.write(cert.public_bytes(serialization.Encoding.PEM)) 81 | 82 | logger.info(f"Generated new certificate and key at {cert_dir}") 83 | except Exception as e: 84 | logger.error(f"Error generating certificate and key: {e}") 85 | else: 86 | logger.info(f"Using existing certificate and key.") 87 | 88 | # Example usage 89 | if __name__ == "__main__": 90 | check_cert() 91 | -------------------------------------------------------------------------------- /common/config.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from common.paths import log_dir 3 | 4 | logging_config: dict[str, Any] = { 5 | "version": 1, 6 | "disable_existing_loggers": False, 7 | "formatters": { 8 | 'standard': { 9 | 'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s', 10 | }, 11 | "uvicorn_default": { 12 | "()": "uvicorn.logging.DefaultFormatter", 13 | "fmt": "%(levelprefix)s %(message)s", 14 | "use_colors": True, 15 | }, 16 | "access": { 17 | "()": "uvicorn.logging.AccessFormatter", 18 | "fmt": '%(client_addr)s - - [%(asctime)s] "%(request_line)s" %(status_code).3s -', 19 | "datefmt": "%d/%b/%Y:%H:%M:%S %z", 20 | "use_colors": False 21 | }, 22 | 23 | }, 24 | "handlers": { 25 | # TODO: standardise formatting; for now use uvicorn_default 26 | "default": { 27 | #"formatter": "standard", 28 | "formatter": "uvicorn_default", 29 | "class": "logging.StreamHandler", 30 | "stream": "ext://sys.stderr", 31 | }, 32 | "standard": { 33 | #"formatter": "standard", 34 | "formatter": "uvicorn_default", 35 | "class": "logging.StreamHandler", 36 | "stream": "ext://sys.stderr", 37 | }, 38 | "uvicorn_default": { 39 | "formatter": "uvicorn_default", 40 | #"formatter": "standard", 41 | "class": "logging.StreamHandler", 42 | "stream": "ext://sys.stderr", 43 | }, 44 | "access": { 45 | "formatter": "access", 46 | "class": "logging.handlers.RotatingFileHandler", 47 | "filename": log_dir / "access.log", 48 | "maxBytes": 52428800, 49 | "backupCount": 9, 50 | "encoding": "utf8" 51 | }, 52 | "connexion": { 53 | "formatter": "standard", 54 | "class": "logging.handlers.RotatingFileHandler", 55 | "filename": log_dir / "connexion.log", 56 | "maxBytes": 52428800, 57 | "backupCount": 9, 58 | "encoding": "utf8" 59 | }, 60 | "backend.redirector": { 61 | "formatter": "standard", 62 | "class": "logging.handlers.RotatingFileHandler", 63 | "filename": log_dir / "redirector.log", 64 | "maxBytes": 52428800, 65 | "backupCount": 9, 66 | "encoding": "utf8" 67 | }, 68 | }, 69 | "loggers": { 70 | "": {"handlers": ["default"], "level": "INFO"}, # root logger 71 | "connexion": {"handlers": ["connexion"], "level": "INFO", "propagate": False}, 72 | "backend.redirector": {"handlers": ["backend.redirector"], "level": "INFO", "propagate": False}, 73 | "uvicorn": {"handlers": ["uvicorn_default"], "level": "INFO", "propagate": False}, 74 | "uvicorn.error": {"level": "INFO"}, 75 | "uvicorn.access": {"handlers": ["access"], "level": "INFO", "propagate": False}, 76 | "watchfiles.main": {"level": "ERROR"}, # filter watchfiles noise 77 | "sqlalchemy.engine": {"level": "WARNING", "propagate": False}, # filter sqlalchemy noise 78 | }, 79 | } 80 | -------------------------------------------------------------------------------- /common/log.py: -------------------------------------------------------------------------------- 1 | import logging.config 2 | import os 3 | from common.paths import log_dir 4 | from common.config import logging_config 5 | 6 | def setup_logging(): 7 | os.makedirs(log_dir, exist_ok=True) 8 | logging.config.dictConfig(logging_config) # could fallback to uvicorn.config.LOGGING_CONFIG but requires import that may not be available pre-setup/check_env 9 | 10 | # Set up logging configuration once on first import only 11 | setup_logging() 12 | 13 | # Define a function to get a logger 14 | def get_logger(name): 15 | return logging.getLogger(name) 16 | -------------------------------------------------------------------------------- /common/mail.py: -------------------------------------------------------------------------------- 1 | import aiosmtplib 2 | import secrets 3 | from email.mime.multipart import MIMEMultipart 4 | from email.mime.text import MIMEText 5 | from .utils import get_env_key # Assuming get_env_key is in utils module 6 | 7 | async def send(to, subject, body_text, body_html = None): 8 | # sends mail via SMTP in text and/or html format 9 | # asyncio.run(send("samj@samj.net", "pAI-OS started up", f"You can access pAI-OS at https://{host}:{port}.")) 10 | 11 | # Retrieve SMTP server details from environment variables 12 | smtp_host = get_env_key('PAIOS_SMTP_HOST', 'localhost') 13 | smtp_port = get_env_key('PAIOS_SMTP_PORT', '1025') # Default SMTP port for Mailhog 14 | smtp_from = get_env_key('PAIOS_SMTP_FROM', 'paios@localhost') 15 | smtp_user = get_env_key('PAIOS_SMTP_USER', 'paios@localhost') 16 | smtp_pass = get_env_key('PAIOS_SMTP_PASS', secrets.token_urlsafe(32)) 17 | 18 | # Create a MIME message 19 | msg = MIMEMultipart('alternative') 20 | msg['Subject'] = subject 21 | msg['From'] = smtp_from 22 | msg['To'] = to 23 | 24 | # Ensure body_html is not None 25 | if body_html is None: 26 | body_html = body_text # Fallback to plain text if HTML is not provided 27 | 28 | # Attach both plain text and HTML parts 29 | part1 = MIMEText(body_text, 'plain') 30 | part2 = MIMEText(body_html, 'html') 31 | msg.attach(part1) 32 | msg.attach(part2) 33 | 34 | # Connect to the SMTP server and send the email 35 | try: 36 | await aiosmtplib.send( 37 | msg, 38 | hostname=smtp_host, 39 | port=smtp_port, 40 | start_tls=False, 41 | username=None, 42 | password=None, 43 | ) 44 | except Exception as e: 45 | print(f"Failed to send email: {e}") 46 | -------------------------------------------------------------------------------- /common/paths.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | # api 4 | api_base_url = '/api/v1' 5 | 6 | # paios 7 | base_dir = Path(__file__).resolve().parent.parent 8 | common_dir = base_dir / 'common' 9 | backend_dir = base_dir / 'backend' 10 | frontend_dir = base_dir / 'frontend' 11 | env_file = backend_dir / '.env' 12 | 13 | # python venv 14 | venv_dir = base_dir / '.venv' 15 | venv_bin_dir = venv_dir / 'bin' 16 | 17 | # data 18 | data_dir = base_dir / 'data' 19 | cert_dir = data_dir / 'cert' 20 | apps_dir = data_dir / 'apps' 21 | envs_dir = data_dir / 'envs' 22 | log_dir = data_dir / 'log' 23 | 24 | # logs 25 | log_db_path = 'file:log?mode=memory&cache=shared' 26 | 27 | # abilities 28 | abilities_subdir = 'abilities' 29 | abilities_dir = base_dir / abilities_subdir 30 | abilities_data_dir = data_dir / abilities_subdir 31 | 32 | # paths 33 | db_name = 'paios.db' 34 | db_path = data_dir / db_name 35 | db_url = f"sqlite+aiosqlite:///{db_path}" 36 | downloads_dir = data_dir / 'downloads' 37 | 38 | -------------------------------------------------------------------------------- /common/requirements.txt: -------------------------------------------------------------------------------- 1 | # common requirements 2 | cryptography 3 | aiosmtplib 4 | -------------------------------------------------------------------------------- /common/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | from dotenv import set_key 3 | from common.paths import base_dir 4 | 5 | # set up logging 6 | from common.log import get_logger 7 | logger = get_logger(__name__) 8 | 9 | def get_env_key(key_name, default=None): 10 | value = os.environ.get(key_name) 11 | if not value: 12 | # If default is a function, call it to get the value, otherwise use it as the value 13 | if default is not None: 14 | if callable(default): 15 | value = default() 16 | else: 17 | value = str(default) 18 | else: 19 | raise ValueError(f"{key_name} is not set in the environment variables") 20 | set_key(base_dir / '.env', key_name, value) 21 | return value 22 | 23 | # Returns dict with null fields removed (e.g., for OpenAPI spec compliant 24 | # responses without having to set nullable: true) 25 | def remove_null_fields(data): 26 | if isinstance(data, dict): 27 | return {k: remove_null_fields(v) for k, v in data.items() if v is not None} 28 | elif isinstance(data, list): 29 | return [remove_null_fields(item) for item in data if item is not None] 30 | else: 31 | return data 32 | 33 | # Returns dict with only keys_to_include (e.g., for OpenAPI spec compliant 34 | # responses without unexpected fields present) 35 | def filter_dict(data, keys_to_include): 36 | return {k: data[k] for k in keys_to_include if k in data} 37 | 38 | # Converts a db result into a dict with named fields (e.g., 39 | # ["x", "y"], [1, 2] -> { "x": 1, "y": 2}) 40 | def zip_fields(fields, result): 41 | return {field: result[i] for i, field in enumerate(fields)} 42 | -------------------------------------------------------------------------------- /data/README.md: -------------------------------------------------------------------------------- 1 | # pAI-OS Data 2 | 3 | The data directory contains information required by paios (config, assets, artifacts, etc.) 4 | -------------------------------------------------------------------------------- /frontend/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "extends": [ 3 | "eslint:recommended", 4 | "plugin:react/recommended", 5 | "plugin:react/jsx-runtime", 6 | "plugin:react-hooks/recommended", 7 | "prettier" 8 | ], 9 | "parser": "@typescript-eslint/parser", 10 | "plugins": ["@typescript-eslint"], 11 | "env": { 12 | "browser": true, 13 | "es2021": true 14 | }, 15 | "settings": { 16 | "react": { 17 | "version": "detect" 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | 15 | # Editor directories and files 16 | .vscode/* 17 | .idea 18 | .DS_Store 19 | *.suo 20 | *.ntvs* 21 | *.njsproj 22 | *.sln 23 | *.sw? 24 | 25 | # local config 26 | .env 27 | 28 | -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | # pAI-OS Frontend 2 | 3 | ## Users 4 | 5 | The frontend is served from the same server as the backend so there's nothing to do here. 6 | 7 | ## Developers 8 | 9 | Enter the pAI-OS frontend source directory: 10 | 11 | cd frontend 12 | 13 | Install the application dependencies by running: 14 | 15 | ```sh 16 | npm install 17 | ``` 18 | 19 | Start the application in development mode by running: 20 | 21 | ```sh 22 | npm run dev 23 | ``` 24 | 25 | Access the development server at https://localhost:5173 26 | 27 | Note: This has the benefit of automatically reloading the page when files are modified without having to `npm run build`. 28 | 29 | ## Production 30 | 31 | Build the application in production mode by running: 32 | 33 | ```sh 34 | npm run build 35 | ``` 36 | 37 | ## DataProvider 38 | 39 | The included data provider use [ra-data-json-server](https://github.com/marmelab/react-admin/tree/master/packages/ra-data-json-server). It fits REST APIs powered by [JSON Server](https://github.com/typicode/json-server), such as [JSONPlaceholder](https://jsonplaceholder.typicode.com/). 40 | 41 | You'll find an `.env` file at the project root that includes a `VITE_JSON_SERVER_URL` variable. Set it to the URL of your backend. 42 | -------------------------------------------------------------------------------- /frontend/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 9 | 10 | 11 | 12 | pAI-OS 13 | 109 | 110 | 114 | 115 | 116 | 117 | 118 |
119 |
120 |
Loading...
121 |
122 |
123 | 124 | 125 | 126 | -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "paios-frontend", 3 | "private": true, 4 | "scripts": { 5 | "dev": "vite", 6 | "build": "vite build", 7 | "serve": "vite preview", 8 | "type-check": "tsc --noEmit", 9 | "lint": "eslint --fix --ext .js,.jsx,.ts,.tsx ./src", 10 | "format": "prettier --write ./src" 11 | }, 12 | "dependencies": { 13 | "@simplewebauthn/browser": "^10.0.0", 14 | "jwt-decode": "^4.0.0", 15 | "ra-data-json-server": "^4.16.0", 16 | "ra-data-simple-rest": "^4.16.17", 17 | "react": "^18.2.0", 18 | "react-admin": "^4.16.0", 19 | "react-dom": "^18.2.0" 20 | }, 21 | "devDependencies": { 22 | "@types/js-cookie": "^3.0.6", 23 | "@types/node": "^18.16.1", 24 | "@types/react": "^18.0.22", 25 | "@types/react-dom": "^18.0.7", 26 | "@typescript-eslint/eslint-plugin": "^5.60.1", 27 | "@typescript-eslint/parser": "^5.60.1", 28 | "@vitejs/plugin-react": "^4.0.1", 29 | "eslint": "^8.43.0", 30 | "eslint-config-prettier": "^8.8.0", 31 | "eslint-plugin-react": "^7.32.2", 32 | "eslint-plugin-react-hooks": "^4.6.0", 33 | "prettier": "^2.8.8", 34 | "typescript": "^5.1.6", 35 | "vite": "^4.5.5" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /frontend/prettier.config.js: -------------------------------------------------------------------------------- 1 | module.exports = {} 2 | 3 | -------------------------------------------------------------------------------- /frontend/public/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pAI-OS/paios/b65d858f12c1da472176829af4b533a9e7c246e2/frontend/public/favicon.png -------------------------------------------------------------------------------- /frontend/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "paios-frontend", 3 | "name": "{{name}}", 4 | "icons": [ 5 | { 6 | "src": "favicon.ico", 7 | "sizes": "64x64 32x32 24x24 16x16", 8 | "type": "image/x-icon" 9 | } 10 | ], 11 | "start_url": "./index.html", 12 | "display": "standalone", 13 | "theme_color": "#000000", 14 | "background_color": "#ffffff" 15 | } 16 | -------------------------------------------------------------------------------- /frontend/src/App.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Admin, CustomRoutes, RenderResourcesFunction, Resource } from 'react-admin'; 3 | import { Route } from 'react-router-dom'; 4 | import { UserList, UserCreate, UserEdit, UserShow } from "./users"; 5 | import { AbilityList, AbilityShow } from "./abilities"; 6 | import { AssetList, AssetCreate, AssetEdit, AssetShow } from "./assets"; 7 | import { ChannelList, ChannelShow } from "./resources"; 8 | import { ShareList, ShareCreate, ShareEdit, ShareShow } from "./shares"; 9 | import { DownloadsList } from "./downloads"; 10 | import { dataProvider } from "./dataProvider"; 11 | import DocIcon from "@mui/icons-material/Book"; 12 | import UserIcon from "@mui/icons-material/Group"; 13 | import ExtensionIcon from '@mui/icons-material/Extension'; 14 | import SyncAltIcon from '@mui/icons-material/SyncAlt'; 15 | import LinkIcon from '@mui/icons-material/Link'; 16 | import { Dashboard } from "./Dashboard"; 17 | import { authProvider } from "./authProvider"; 18 | import { CustomLayout } from './CustomLayout'; 19 | import Login from './Login'; 20 | import { VerifyEmail } from './VerifyEmail'; 21 | import { hasAccess, ResourcePermissions } from './utils/authUtils'; 22 | 23 | 24 | // Resource configuration 25 | const resourceConfig = [ 26 | { 27 | name: "assets", 28 | list: AssetList, 29 | create: AssetCreate, 30 | edit: AssetEdit, 31 | show: AssetShow, 32 | icon: DocIcon, 33 | recordRepresentation: 'name', 34 | }, 35 | { 36 | name: "users", 37 | list: UserList, 38 | create: UserCreate, 39 | edit: UserEdit, 40 | show: UserShow, 41 | icon: UserIcon, 42 | recordRepresentation: 'name', 43 | }, 44 | { 45 | name: "abilities", 46 | list: AbilityList, 47 | show: AbilityShow, 48 | icon: ExtensionIcon, 49 | recordRepresentation: 'id', 50 | }, 51 | { 52 | name: "resources", 53 | list: ChannelList, 54 | show: ChannelShow, 55 | icon: SyncAltIcon, 56 | recordRepresentation: 'id', 57 | }, 58 | { 59 | name: "downloads", 60 | list: DownloadsList, 61 | }, 62 | { 63 | name: "shares", 64 | list: ShareList, 65 | create: ShareCreate, 66 | edit: ShareEdit, 67 | show: ShareShow, 68 | icon: LinkIcon, 69 | recordRepresentation: 'id', 70 | }, 71 | ]; 72 | 73 | const renderResources: RenderResourcesFunction = (permissions: ResourcePermissions) => ( 74 | <> 75 | {resourceConfig.map(resource => { 76 | if (!hasAccess(resource.name, "list", permissions)) return null; 77 | 78 | return ( 79 | 89 | ); 90 | })} 91 | 92 | } /> 93 | 94 | 95 | ); 96 | 97 | export const App = () => ( 98 | 105 | {renderResources} 106 | 107 | ); 108 | -------------------------------------------------------------------------------- /frontend/src/CustomLayout.tsx: -------------------------------------------------------------------------------- 1 | import { Layout, LayoutProps } from 'react-admin'; 2 | 3 | import { CustomMenu } from './CustomMenu'; 4 | 5 | export const CustomLayout = (props: LayoutProps) => ; 6 | -------------------------------------------------------------------------------- /frontend/src/CustomMenu.tsx: -------------------------------------------------------------------------------- 1 | import { Menu } from "react-admin"; 2 | import MenuItem from '@mui/material/MenuItem'; 3 | import ListItemIcon from '@mui/material/ListItemIcon'; 4 | import ListItemText from '@mui/material/ListItemText'; 5 | import ApiIcon from '@mui/icons-material/Api'; 6 | import { apiBase } from './apiBackend'; 7 | 8 | export const CustomMenu = () => ( 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | ); 25 | 26 | export default CustomMenu; 27 | -------------------------------------------------------------------------------- /frontend/src/Dashboard.tsx: -------------------------------------------------------------------------------- 1 | import { Card, CardContent, CardHeader } from "@mui/material"; 2 | import { Title } from "react-admin"; 3 | 4 | export const Dashboard = () => ( 5 | 6 | 7 | <CardHeader title="Welcome to the Personal Artificial Intelligence Operating System (pAI-OS)" /> 8 | <CardContent>Take control of your digital life with the revolutionary pAI-OS Dashboard. This intuitive interface puts you in the driver's seat of your own AI-powered operating system. 9 | With the pAI-OS Dashboard, you can: 10 | <ul> 11 | <li>Customize your AI assistant's personality, knowledge, and capabilities to fit your unique needs and preferences</li> 12 | <li>Seamlessly connect and manage all your smart devices, online accounts, and digital services in one unified hub</li> 13 | <li>Monitor system performance, data usage, privacy settings, and more with detailed analytics and visualizations</li> 14 | <li>Automate complex tasks and workflows across apps with simple natural language commands</li> 15 | <li>Get personalized insights, recommendations and intelligent assistance to optimize your productivity and digital wellbeing</li> 16 | </ul> 17 | The pAI-OS Dashboard makes it easy to unleash the full potential of artificial intelligence to streamline and enhance every aspect of your digital experience. Take charge of your AI-driven future with pAI-OS.</CardContent> 18 | </Card> 19 | ); 20 | -------------------------------------------------------------------------------- /frontend/src/Login.css: -------------------------------------------------------------------------------- 1 | .paios-heading { 2 | font-size: 2.5em; 3 | color: #333; 4 | margin-bottom: 30px; 5 | } 6 | 7 | .auth-container { 8 | display: flex; 9 | flex-direction: column; 10 | align-items: center; 11 | justify-content: center; 12 | height: 100vh; 13 | background-color: #f5f5f5; 14 | } 15 | 16 | .logo { 17 | width: 150px; 18 | margin-bottom: 10px; 19 | } 20 | 21 | h1 { 22 | font-size: 2em; 23 | margin-bottom: 20px; 24 | } 25 | 26 | h2 { 27 | font-size: 1.8em; 28 | margin-bottom: 20px; 29 | } 30 | 31 | form { 32 | display: flex; 33 | flex-direction: column; 34 | align-items: center; 35 | width: 100%; 36 | max-width: 300px; 37 | } 38 | 39 | .input-field { 40 | width: 100%; 41 | padding: 10px; 42 | margin-bottom: 20px; 43 | border: 1px solid #ccc; 44 | border-radius: 5px; 45 | } 46 | 47 | .auth-button { 48 | width: 100%; 49 | padding: 10px; 50 | background-color: #007bff; 51 | color: white; 52 | border: none; 53 | border-radius: 5px; 54 | cursor: pointer; 55 | } 56 | 57 | .auth-link { 58 | margin-top: 20px; 59 | color: #007bff; 60 | text-decoration: none; 61 | } 62 | 63 | .auth-link button { 64 | color: #007bff; 65 | outline: none; 66 | border: none; 67 | cursor: pointer; 68 | } 69 | 70 | /* EmailVerification.css */ 71 | .verification-container { 72 | display: flex; 73 | flex-direction: column; 74 | align-items: center; 75 | justify-content: center; 76 | min-height: 100vh; 77 | background-color: #f0f4f8; 78 | padding: 20px; 79 | } 80 | 81 | .loading { 82 | font-size: 18px; 83 | color: #666; 84 | } 85 | 86 | .verification-message { 87 | text-align: center; 88 | background-color: white; 89 | border-radius: 8px; 90 | box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1); 91 | padding: 40px; 92 | max-width: 400px; 93 | width: 100%; 94 | } 95 | 96 | h1 { 97 | color: #333; 98 | margin-bottom: 20px; 99 | } 100 | 101 | p { 102 | font-size: 16px; 103 | color: #555; 104 | margin-bottom: 30px; 105 | } 106 | 107 | .login-button { 108 | padding: 10px 20px; 109 | font-size: 16px; 110 | color: white; 111 | background-color: #007bff; 112 | border: none; 113 | border-radius: 5px; 114 | cursor: pointer; 115 | transition: background-color 0.3s; 116 | } 117 | 118 | .login-button:hover { 119 | background-color: #0056b3; 120 | } 121 | -------------------------------------------------------------------------------- /frontend/src/Login.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import logo from './assets/paios.png'; 3 | import './Login.css'; 4 | import { useLogin, useNotify } from "react-admin" 5 | 6 | const Login: React.FC = () => { 7 | const [email, setEmail] = useState(''); 8 | 9 | const login = useLogin() 10 | const notify = useNotify() 11 | 12 | const handleUser = async (event: React.FormEvent<HTMLFormElement>) => { 13 | event.preventDefault() 14 | if (!email.trim()) { 15 | notify('Email field cannot be empty', { type: 'error' }); 16 | return; 17 | } 18 | 19 | try { 20 | const result = await login({ email }); 21 | if (result?.stayOnLogin) { 22 | notify('Email verification sent! Please check your inbox.', { type: 'info' }); 23 | } 24 | 25 | } catch (e) { 26 | console.error('Debug: Error in handleUser:', e); 27 | if (e instanceof Error) { 28 | if (e.name === 'InvalidStateError' || e.message.includes('The authenticator was previously registered')) { 29 | notify('User already exists. Please login instead.', { type: 'error' }); 30 | } else { 31 | notify('An error occurred. Please try again.', { type: 'error' }); 32 | } 33 | } else { 34 | notify('An unexpected error occurred. Please try again.', { type: 'error' }); 35 | } 36 | } 37 | } 38 | 39 | const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => { 40 | setEmail(e.target.value) 41 | } 42 | 43 | return ( 44 | <div className="auth-container"> 45 | <h1 className="paios-heading">pAI-OS</h1> 46 | <img src={logo} alt="pAI-OS Logo" className="logo" /> 47 | <form onSubmit={handleUser}> 48 | <input type="email" id="email" placeholder="Email" className="input-field" onChange={handleChange} value={email} /> 49 | <button type="submit" className="auth-button">Enter pAI-OS</button> 50 | </form> 51 | </div> 52 | ); 53 | }; 54 | 55 | export default Login; -------------------------------------------------------------------------------- /frontend/src/VerifyEmail.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from 'react'; 2 | import { useParams, useNavigate } from 'react-router-dom'; 3 | import { verifyEmail } from './apis/auth'; 4 | import './Login.css'; 5 | 6 | export const VerifyEmail = () => { 7 | const { token } = useParams(); 8 | const [verificationStatus, setVerificationStatus] = useState(""); 9 | const [loading, setLoading] = useState(false); 10 | const navigate = useNavigate(); 11 | 12 | useEffect(() => { 13 | const checkEmail = async () => { 14 | if (!token) { 15 | setVerificationStatus("Not a valid token") 16 | return 17 | } 18 | setLoading(true) 19 | try { 20 | const res = await verifyEmail(token!); 21 | setVerificationStatus(res.message); 22 | } catch (error) { 23 | setVerificationStatus(error.message); 24 | } finally { 25 | setLoading(false); 26 | } 27 | }; 28 | checkEmail(); 29 | }, [token]); 30 | 31 | return ( 32 | <div> 33 | <div className="verification-container"> 34 | {loading ? ( 35 | <div className="loading">Loading...</div> 36 | ) : ( 37 | <div className="verification-message"> 38 | <h1>pAI-OS Email Verification Status:</h1> 39 | <h1>{verificationStatus}</h1> 40 | <button className="login-button" onClick={() => navigate("/login")}> 41 | Click here to login 42 | </button> 43 | </div> 44 | )} 45 | </div> 46 | </div> 47 | ) 48 | } -------------------------------------------------------------------------------- /frontend/src/apiBackend.ts: -------------------------------------------------------------------------------- 1 | import { fetchUtils } from 'react-admin'; 2 | 3 | export const apiBase = import.meta.env.VITE_JSON_SERVER_URL || 'https://localhost:8443/api/v1'; 4 | 5 | export const httpClient = (url: string, options: any = {}) => { 6 | if (!options.headers) { 7 | options.headers = new Headers({ Accept: 'application/json' }); 8 | } 9 | 10 | const token = localStorage.getItem("token"); 11 | options.headers.set('Authorization', `Bearer ${token}`); 12 | return fetchUtils.fetchJson(url, options); 13 | } 14 | -------------------------------------------------------------------------------- /frontend/src/apis/auth.ts: -------------------------------------------------------------------------------- 1 | import { 2 | startAuthentication, 3 | startRegistration, 4 | } from "@simplewebauthn/browser"; 5 | import { apiBase } from "../apiBackend"; 6 | 7 | export const authentication = async (email: string) => { 8 | try { 9 | const response = await fetch(`${apiBase}/auth/webauthn/options`, { 10 | method: "POST", 11 | headers: { 12 | "Content-Type": "application/json", 13 | }, 14 | body: JSON.stringify({ email }), 15 | }); 16 | 17 | if (response.status !== 200) { 18 | throw new Error("Something went wrong"); 19 | } 20 | 21 | const res = await response.json(); 22 | const options = JSON.parse(res.options); 23 | if (res.flow === "REGISTER") return await register(email, options); 24 | else if (res.flow === "LOGIN") return await login(email, options); 25 | } catch (error) { 26 | throw new Error("Failed to register user"); 27 | } 28 | }; 29 | 30 | export const verifyEmail = async (token: string) => { 31 | try { 32 | const isValidRes = await fetch(`${apiBase}/auth/verify-email`, { 33 | method: "POST", 34 | headers: { 35 | "Content-Type": "application/json", 36 | }, 37 | body: JSON.stringify({ 38 | token, 39 | }), 40 | }); 41 | 42 | if (isValidRes.status !== 200) { 43 | throw new Error("Email validation failed."); 44 | } 45 | 46 | return await isValidRes.json(); 47 | } catch (error) { 48 | throw new Error("Email validation failed."); 49 | } 50 | }; 51 | 52 | export const login = async (email: string, options: any) => { 53 | try { 54 | const authResp = await startAuthentication(options); 55 | 56 | const verifyResponse = await fetch(`${apiBase}/auth/webauthn/login`, { 57 | method: "POST", 58 | headers: { 59 | "Content-Type": "application/json", 60 | }, 61 | body: JSON.stringify({ 62 | email, 63 | auth_resp: authResp, 64 | challenge: options.challenge, 65 | }), 66 | }); 67 | 68 | if (verifyResponse.status !== 200) { 69 | throw new Error("Failed to register user."); 70 | } 71 | 72 | return await verifyResponse.json(); 73 | } catch (error) { 74 | throw new Error("Failed to register user"); 75 | } 76 | }; 77 | 78 | export const register = async (email: string, options: any) => { 79 | try { 80 | const attResp = await startRegistration(options); 81 | 82 | const verifyResponse = await fetch(`${apiBase}/auth/webauthn/register`, { 83 | method: "POST", 84 | headers: { 85 | "Content-Type": "application/json", 86 | }, 87 | body: JSON.stringify({ 88 | email, 89 | att_resp: attResp, 90 | challenge: options.challenge, 91 | user_id: options.user.id, 92 | }), 93 | }); 94 | 95 | if (verifyResponse.status !== 200) { 96 | throw new Error("Failed to register user."); 97 | } 98 | const tokenRes = await verifyResponse.json(); 99 | return tokenRes; 100 | } catch (error) { 101 | if (error instanceof Error) { 102 | throw error; 103 | } 104 | throw new Error("Failed to register user"); 105 | } 106 | }; 107 | 108 | export const logout = async () => { 109 | await fetch(`${apiBase}/auth/logout`, { 110 | method: "POST", 111 | headers: { 112 | "Content-Type": "application/json", 113 | }, 114 | }); 115 | }; 116 | -------------------------------------------------------------------------------- /frontend/src/assets.tsx: -------------------------------------------------------------------------------- 1 | import { useRecordContext } from "react-admin"; 2 | import { Edit, Create, List, Show, ShowButton, DeleteButton, SimpleShowLayout, Datagrid, TextField, TextInput, ReferenceField, ReferenceInput, SimpleForm, EditButton } from "react-admin"; 3 | 4 | const AssetTitle = () => { 5 | const record = useRecordContext(); 6 | return <span>Asset {record ? `- ${record.title}` : ""}</span>; 7 | }; 8 | 9 | const assetFilters = [ 10 | <TextInput source="q" label="Search" alwaysOn />, 11 | <ReferenceInput source="user_id" label="User" reference="users" />, 12 | ]; 13 | 14 | export const AssetList = () => ( 15 | <List filters={assetFilters}> 16 | <Datagrid rowClick="show"> 17 | <TextField source="title" /> 18 | <ReferenceField source="user_id" reference="users" link="show" /> 19 | <ShowButton /> 20 | <EditButton /> 21 | <DeleteButton /> 22 | </Datagrid> 23 | </List> 24 | ); 25 | 26 | export const AssetShow = () => ( 27 | <Show title={<AssetTitle />}> 28 | <SimpleShowLayout> 29 | <TextField source="title" /> 30 | <ReferenceField source="user_id" reference="users" /> 31 | <TextField source="description" /> 32 | </SimpleShowLayout> 33 | </Show> 34 | ); 35 | 36 | export const AssetEdit = () => ( 37 | <Edit title={<AssetTitle />}> 38 | <SimpleForm> 39 | <ReferenceInput source="user_id" reference="users" optionText="name" /> 40 | <TextInput source="title" /> 41 | <TextInput source="description" multiline rows={5} /> 42 | </SimpleForm> 43 | </Edit> 44 | ); 45 | 46 | export const AssetCreate = () => ( 47 | <Create redirect="show"> 48 | <SimpleForm> 49 | <ReferenceInput source="user_id" reference="users" optionText="name" /> 50 | <TextInput source="title" /> 51 | <TextInput source="description" multiline rows={5} /> 52 | </SimpleForm> 53 | </Create> 54 | ); 55 | -------------------------------------------------------------------------------- /frontend/src/assets/paios.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pAI-OS/paios/b65d858f12c1da472176829af4b533a9e7c246e2/frontend/src/assets/paios.png -------------------------------------------------------------------------------- /frontend/src/authProvider.tsx: -------------------------------------------------------------------------------- 1 | import { AuthProvider, useNotify } from "react-admin"; 2 | import { authentication, logout } from "./apis/auth"; 3 | import { jwtDecode } from "jwt-decode"; 4 | 5 | interface CustomJwtPayload { 6 | roles: string[]; 7 | exp?: number; 8 | } 9 | 10 | export const authProvider: AuthProvider = { 11 | // called when the user attempts to log in 12 | login: async ({ email }: { email: string }) => { 13 | try { 14 | const res = await authentication(email) 15 | if (res.token) { 16 | localStorage.setItem("token", res.token) 17 | localStorage.setItem("permissions", JSON.stringify(res.permissions)) 18 | return Promise.resolve() 19 | } else { 20 | return { redirectTo: false, stayOnLogin: true }; 21 | } 22 | 23 | } catch (e) { 24 | return Promise.reject(e) 25 | } 26 | }, 27 | // called when the user clicks on the logout button 28 | logout: () => { 29 | logout() 30 | localStorage.clear() 31 | return Promise.resolve(); 32 | }, 33 | // called when the API returns an error 34 | checkError: ({ status }: { status: number }) => { 35 | if (status === 401 || status === 403) { 36 | logout() 37 | localStorage.removeItem("token") 38 | return Promise.reject(); 39 | } 40 | return Promise.resolve(); 41 | }, 42 | // called when the user navigates to a new location, to check for authentication 43 | checkAuth: () => { 44 | const token = localStorage.getItem("token") 45 | if (!token) return Promise.reject() 46 | 47 | const decodeToken = jwtDecode(token!); 48 | const currentTime = Math.floor(Date.now() / 1000); 49 | 50 | if (decodeToken.exp && decodeToken.exp > currentTime) { 51 | return Promise.resolve() 52 | } 53 | return Promise.reject() 54 | }, 55 | // called when the user navigates to a new location, to check for permissions / roles 56 | getPermissions: () => { 57 | const token = localStorage.getItem("token"); 58 | if (!token) return Promise.resolve([]) 59 | 60 | const permissions = localStorage.getItem("permissions") ? JSON.parse(localStorage.getItem("permissions")!) : [] 61 | return Promise.resolve(permissions); 62 | }, 63 | }; 64 | -------------------------------------------------------------------------------- /frontend/src/components/CheckedField.tsx: -------------------------------------------------------------------------------- 1 | import CheckIcon from '@mui/icons-material/Check'; 2 | import CrossIcon from '@mui/icons-material/Clear'; 3 | import { useRecordContext, FieldProps } from 'react-admin'; 4 | 5 | const getNestedValue = (obj: any, path: string) => { 6 | return path.split('.').reduce((acc, part) => acc && acc[part], obj); 7 | }; 8 | 9 | interface CheckedFieldProps extends FieldProps { 10 | source: string; 11 | label?: string; 12 | } 13 | 14 | export const CheckedField = ({ source }: CheckedFieldProps) => { 15 | const record = useRecordContext(); 16 | const value = getNestedValue(record, source); 17 | return value ? <CheckIcon /> : <CrossIcon />; 18 | }; 19 | -------------------------------------------------------------------------------- /frontend/src/components/FormattedSizeField.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { useRecordContext } from 'react-admin'; 3 | 4 | const FormattedSizeField = ({ source }: { source: string }) => { 5 | const record = useRecordContext(); 6 | if (!record) return null; 7 | 8 | const value = record[source]; 9 | if (value == null) return null; // Handle undefined or null values 10 | 11 | const formattedValue = (value / (1024 * 1024)).toFixed(2); // Assuming value is in bytes and converting to MB 12 | 13 | return <span>{formattedValue} MB</span>; 14 | }; 15 | 16 | export default FormattedSizeField; 17 | -------------------------------------------------------------------------------- /frontend/src/components/FormattedTransferRateField.tsx: -------------------------------------------------------------------------------- 1 | import { useRecordContext } from 'react-admin'; 2 | import { formatTransferRate } from '../utils/formatSize'; 3 | 4 | const FormattedTransferRateField = ({ source }: { source: string }) => { 5 | const record = useRecordContext(); 6 | if (!record) return null; 7 | const rate = record[source]; 8 | if (rate === 0) return null; 9 | return <span>{formatTransferRate(rate)}</span>; 10 | }; 11 | 12 | export default FormattedTransferRateField; 13 | -------------------------------------------------------------------------------- /frontend/src/components/ProgressField.tsx: -------------------------------------------------------------------------------- 1 | import { useRecordContext } from 'react-admin'; 2 | import LinearProgress from '@mui/material/LinearProgress'; 3 | import Typography from '@mui/material/Typography'; 4 | 5 | const LinearProgressWithLabel = ({ value }: { value: number }) => { 6 | return ( 7 | <div style={{ display: 'flex', alignItems: 'center' }}> 8 | <LinearProgress variant="determinate" value={value} style={{ width: '80%' }} /> 9 | <Typography variant="body2" style={{ marginLeft: 10 }}>{`${Math.round(value)}%`}</Typography> 10 | </div> 11 | ); 12 | }; 13 | 14 | const ProgressField = ({ source }: { source: string }) => { 15 | const record = useRecordContext(); 16 | if (!record) return null; 17 | const progress = record[source]; 18 | 19 | return ( 20 | <div style={{ width: '100%' }}> 21 | <LinearProgressWithLabel value={progress} /> 22 | </div> 23 | ); 24 | }; 25 | 26 | export default ProgressField; 27 | -------------------------------------------------------------------------------- /frontend/src/dataProvider.ts: -------------------------------------------------------------------------------- 1 | import { DataProvider } from 'react-admin'; 2 | //import jsonServerProvider from 'ra-data-json-server'; 3 | import simpleRestProvider from 'ra-data-simple-rest'; 4 | import { httpClient, apiBase } from './apiBackend'; 5 | 6 | export const dataProvider: DataProvider = simpleRestProvider(apiBase, httpClient); 7 | 8 | export default dataProvider; 9 | -------------------------------------------------------------------------------- /frontend/src/dependencies/DebianDependency.tsx: -------------------------------------------------------------------------------- 1 | // DebianDependency.tsx 2 | import { Datagrid, TextField } from 'react-admin'; 3 | 4 | export const DebianDependency = (props: { dependencies: any }) => { 5 | return ( 6 | <Datagrid data={props.dependencies} sort={{ field: 'name', order: 'ASC' }}> 7 | <TextField source="name" /> 8 | </Datagrid> 9 | ); 10 | }; 11 | -------------------------------------------------------------------------------- /frontend/src/dependencies/PythonDependency.tsx: -------------------------------------------------------------------------------- 1 | import { Button, Datagrid, TextField } from 'react-admin'; 2 | import { useState, useEffect, useRef } from 'react'; 3 | import { CheckedField } from '../components/CheckedField'; 4 | import GetAppIcon from '@mui/icons-material/GetApp'; 5 | import { useRecordContext, useNotify, useRefresh } from 'react-admin'; 6 | import { apiBase, httpClient } from '../apiBackend'; 7 | 8 | export const PythonDependency = (props: { dependencies: any, ability_id: string }) => { 9 | return ( 10 | <Datagrid data={props.dependencies} sort={{ field: 'name', order: 'ASC' }}> 11 | <TextField source="id" /> 12 | <TextField source="name" /> 13 | <CheckedField source="versions.satisfied" label="Satisfied" /> 14 | <TextField source="versions.required" label="Required" /> 15 | <TextField source="versions.installed" label="Installed" /> 16 | <TextField source="versions.latest" label="Latest" /> 17 | <InstallButton ability_id={props.ability_id} /> 18 | </Datagrid> 19 | ); 20 | }; 21 | 22 | const InstallButton = ({ ability_id }: { ability_id: string }) => { 23 | const record = useRecordContext(); 24 | const notify = useNotify(); 25 | const refresh = useRefresh(); 26 | const [isInstalling, setIsInstalling] = useState(false); 27 | const intervalId = useRef<NodeJS.Timeout | null>(null); 28 | 29 | useEffect(() => { 30 | if (intervalId.current) { 31 | clearInterval(intervalId.current); 32 | intervalId.current = null; 33 | } 34 | 35 | if (isInstalling) { 36 | intervalId.current = setInterval(() => { 37 | refresh(); 38 | }, 5000); 39 | } 40 | 41 | return () => { 42 | if (intervalId.current) { 43 | clearInterval(intervalId.current); 44 | } 45 | }; 46 | }, [isInstalling, refresh]); 47 | 48 | const handleInstallClick = (event: React.MouseEvent) => { 49 | event.stopPropagation(); 50 | setIsInstalling(true); 51 | 52 | httpClient(`${apiBase}/abilities/${ability_id}/dependencies/${record.id}/install`, { method: 'POST' }) 53 | .then(() => { 54 | notify('Python dependency installation requested'); 55 | //refresh(); 56 | }) 57 | .catch((e: any) => { 58 | notify('Error: Python dependency not installed', { type: 'warning' }); 59 | }) 60 | //.finally(() => { 61 | // setIsInstalling(false); 62 | //}); 63 | }; 64 | 65 | const isLatestVersion = record.versions.installed === record.versions.latest; 66 | const buttonLabel = isInstalling ? "Installing" : (record.versions.installed ? (record.versions.satisfied ? "Install" : "Upgrade") : "Install"); 67 | 68 | if (isLatestVersion) { 69 | return null; // Hide the button if the installed version is the latest version 70 | } 71 | 72 | return ( 73 | <Button label={buttonLabel} onClick={handleInstallClick} disabled={isInstalling || record.state === 'installing'}> 74 | <GetAppIcon /> 75 | </Button> 76 | ); 77 | }; 78 | -------------------------------------------------------------------------------- /frontend/src/dependencies/ResourceDependency.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { Button, Datagrid, TextField, useRecordContext, useNotify, useRefresh } from 'react-admin'; 3 | import DownloadIcon from '@mui/icons-material/Download'; 4 | import DeleteIcon from '@mui/icons-material/Delete'; 5 | import { apiBase, httpClient } from "../apiBackend"; 6 | import { useNavigate } from 'react-router-dom'; 7 | 8 | const DownloadButton = ({ ability_id }: { ability_id: string }) => { 9 | const record = useRecordContext(); 10 | const notify = useNotify(); 11 | const refresh = useRefresh(); 12 | const navigate = useNavigate(); 13 | const [isDownloading, setIsDownloading] = useState(false); 14 | 15 | const handleDownloadClick = (event: React.MouseEvent) => { 16 | // prevent the click event propagating to the row and calling show 17 | event.stopPropagation(); 18 | setIsDownloading(true); 19 | 20 | const downloadData = [ 21 | { 22 | source_url: record.source_url, 23 | file_name: record.file_name, 24 | file_hash: record.file_hash, 25 | target_directory: `abilities/${ability_id}/resource` 26 | } 27 | ]; 28 | 29 | httpClient(`${apiBase}/downloads`, { 30 | method: 'POST', 31 | body: JSON.stringify(downloadData) 32 | }) 33 | .then(() => { 34 | notify('Download started'); 35 | refresh(); 36 | navigate('/downloads'); // Redirect to the downloads page 37 | }) 38 | .catch((e) => { 39 | notify(e.body.message, { type: 'warning' }); 40 | setIsDownloading(false); 41 | }); 42 | }; 43 | 44 | const handleDeleteClick = (event: React.MouseEvent) => { 45 | // prevent the click event propagating to the row and calling show 46 | event.stopPropagation(); 47 | 48 | httpClient(`${apiBase}/abilities/${ability_id}/dependencies/${record.id}/download/delete`, { method: 'POST' }) 49 | .then(() => { 50 | notify('Download deletion requested'); 51 | refresh(); 52 | }) 53 | .catch((e) => { 54 | notify(e.body.message, { type: 'warning' }); 55 | }); 56 | }; 57 | 58 | // file is downloaded, show delete button 59 | if ((record.localSize || 0) === (record.file_size || 0)) { 60 | return ( 61 | <Button label="Delete" onClick={handleDeleteClick}> 62 | <DeleteIcon /> 63 | </Button> 64 | ); 65 | } 66 | 67 | // file is not downloaded, show download button 68 | return ( 69 | <Button label="Download" onClick={handleDownloadClick} disabled={isDownloading}> 70 | <DownloadIcon /> 71 | </Button> 72 | ); 73 | }; 74 | 75 | export const ResourceDependency = (props: { dependencies: any }) => { 76 | const record = useRecordContext(); 77 | const ability_id = String(record.id); 78 | 79 | return ( 80 | <Datagrid data={props.dependencies} sort={{ field: 'name', order: 'ASC' }}> 81 | <TextField source="name" /> 82 | <TextField source="file_name" /> 83 | <TextField source="source_url" /> 84 | <DownloadButton ability_id={ability_id} /> 85 | </Datagrid> 86 | ); 87 | }; 88 | -------------------------------------------------------------------------------- /frontend/src/downloads.tsx: -------------------------------------------------------------------------------- 1 | import { useEffect, useRef, useState } from 'react'; 2 | import { Link } from 'react-router-dom'; 3 | import { List, Datagrid, TextField, TextInput, useRecordContext, useNotify, useRefresh, Button } from 'react-admin'; 4 | import PauseIcon from '@mui/icons-material/Pause'; 5 | import PlayArrowIcon from '@mui/icons-material/PlayArrow'; 6 | import DeleteIcon from '@mui/icons-material/Delete'; 7 | import { apiBase, httpClient } from "./apiBackend"; 8 | import FormattedSizeField from './components/FormattedSizeField'; 9 | import FormattedTransferRateField from './components/FormattedTransferRateField'; 10 | import ProgressField from './components/ProgressField'; 11 | 12 | interface Download { 13 | id: string; 14 | source_url: string; 15 | file_name: string; 16 | target_directory: string; 17 | file_size: number; 18 | file_hash: string; 19 | downloaded: number; 20 | progress: number; 21 | status: string; 22 | } 23 | 24 | const downloadFilters = [ 25 | <TextInput source="q" label="Search" alwaysOn />, 26 | ]; 27 | 28 | const DownloadActions = ({ refresh }: { refresh: () => void }) => { 29 | const record = useRecordContext<Download>(); 30 | const notify = useNotify(); 31 | 32 | //console.log(record) 33 | 34 | const handlePauseClick = (id: string) => { 35 | httpClient(`${apiBase}/downloads/${encodeURIComponent(id)}/pause`, { method: 'POST' }) 36 | .then(() => { 37 | notify('Download paused'); 38 | refresh(); 39 | }) 40 | .catch(() => notify('Error: could not pause download', { type: 'warning' })); 41 | }; 42 | 43 | const handleResumeClick = (id: string) => { 44 | httpClient(`${apiBase}/downloads/${encodeURIComponent(id)}/resume`, { method: 'POST' }) 45 | .then(() => { 46 | notify('Download resumed'); 47 | refresh(); 48 | }) 49 | .catch(() => notify('Error: could not resume download', { type: 'warning' })); 50 | }; 51 | 52 | const handleDeleteClick = (id: string) => { 53 | httpClient(`${apiBase}/downloads/${encodeURIComponent(id)}`, { method: 'DELETE' }) 54 | .then(() => { 55 | notify('Download deleted'); 56 | refresh(); 57 | }) 58 | .catch(() => notify('Error: could not delete download', { type: 'warning' })); 59 | }; 60 | 61 | return ( 62 | <div> 63 | {record.status === 'downloading' && ( 64 | <Button label="Pause" onClick={() => handlePauseClick(record.id)}> 65 | <PauseIcon /> 66 | </Button> 67 | )} 68 | {record.status === 'paused' && ( 69 | <Button label="Resume" onClick={() => handleResumeClick(record.id)}> 70 | <PlayArrowIcon /> 71 | </Button> 72 | )} 73 | <Button label="Delete" onClick={() => handleDeleteClick(record.id)}> 74 | <DeleteIcon /> 75 | </Button> 76 | </div> 77 | ); 78 | }; 79 | 80 | export const DownloadsList = () => { 81 | const refresh = useRefresh(); 82 | const notify = useNotify(); 83 | const intervalId = useRef<NodeJS.Timeout | null>(null); 84 | const [hasError, setHasError] = useState(false); 85 | 86 | useEffect(() => { 87 | const refreshWithErrorHandling = async () => { 88 | try { 89 | await refresh(); 90 | } catch (error) { 91 | notify('Error: could not refresh downloads', { type: 'warning' }); 92 | setHasError(true); 93 | } 94 | }; 95 | 96 | if (!hasError) { 97 | intervalId.current = setInterval(() => { 98 | refreshWithErrorHandling(); 99 | }, 2000); 100 | } 101 | 102 | return () => { 103 | if (intervalId.current) { 104 | clearInterval(intervalId.current); 105 | } 106 | }; 107 | }, [refresh, hasError, notify]); 108 | 109 | return ( 110 | <List filters={downloadFilters}> 111 | <Datagrid rowClick="edit"> 112 | <TextField source="file_name" /> 113 | <TextField source="target_directory" /> 114 | <FormattedSizeField source="downloaded" /> 115 | <FormattedSizeField source="file_size" /> 116 | <FormattedTransferRateField source="transfer_rate" /> 117 | <ProgressField source="progress" /> 118 | <TextField source="status" /> 119 | <DownloadActions refresh={refresh} /> 120 | </Datagrid> 121 | </List> 122 | ); 123 | }; 124 | 125 | export default DownloadsList; 126 | -------------------------------------------------------------------------------- /frontend/src/index.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import ReactDOM from "react-dom/client"; 3 | import { App } from "./App"; 4 | 5 | ReactDOM.createRoot(document.getElementById("root")!).render( 6 | <React.StrictMode> 7 | <App /> 8 | </React.StrictMode> 9 | ); 10 | -------------------------------------------------------------------------------- /frontend/src/resources.tsx: -------------------------------------------------------------------------------- 1 | import { List, Datagrid, TextField, UrlField, Show, SimpleShowLayout } from 'react-admin'; 2 | 3 | export const ChannelList = () => ( 4 | <List> 5 | <Datagrid rowClick="show"> 6 | <TextField source="id" /> 7 | <TextField source="name" /> 8 | <UrlField source="uri" /> 9 | </Datagrid> 10 | </List> 11 | ); 12 | 13 | export const ChannelShow = () => ( 14 | <Show> 15 | <SimpleShowLayout> 16 | <TextField source="id" /> 17 | <TextField source="name" /> 18 | <UrlField source="uri" /> 19 | </SimpleShowLayout> 20 | </Show> 21 | ); 22 | -------------------------------------------------------------------------------- /frontend/src/shares.tsx: -------------------------------------------------------------------------------- 1 | import { useRecordContext } from "react-admin"; 2 | import { Create, Edit, List, Show, SimpleForm, SimpleShowLayout, Datagrid, TextField, TextInput, DateField, DateTimeInput, BooleanField, BooleanInput, ReferenceField, ReferenceInput, SelectInput } from "react-admin"; 3 | 4 | const ShareTitle = () => { 5 | const record = useRecordContext(); 6 | return <span>Shares {record ? `- ${record.name}` : ""}</span>; 7 | }; 8 | 9 | const shareFilters = [ 10 | <TextInput source="q" label="Search" alwaysOn />, 11 | <ReferenceInput source="resource_id" label="Resource" reference="resources"> 12 | <SelectInput optionText="name" /> 13 | </ReferenceInput>, 14 | ]; 15 | 16 | export const ShareList = () => ( 17 | <List filters={shareFilters}> 18 | <Datagrid rowClick="show"> 19 | <TextField source="id" /> 20 | <ReferenceField source="resource_id" reference="resources" link="show"> 21 | <TextField source="name" /> 22 | </ReferenceField> 23 | <BooleanField source="is_revoked" /> 24 | </Datagrid> 25 | </List> 26 | ); 27 | 28 | export const ShareShow = () => ( 29 | <Show title={<ShareTitle />}> 30 | <SimpleShowLayout> 31 | <TextField source="id" /> 32 | <ReferenceField source="resource_id" reference="resources" link="show"> 33 | <TextField source="name" /> 34 | </ReferenceField> 35 | <DateField source="expiration_dt" label="Expires On" showTime locales="UTC" /> 36 | <BooleanField source="is_revoked" /> 37 | </SimpleShowLayout> 38 | </Show> 39 | ); 40 | 41 | export const ShareEdit = () => ( 42 | <Edit title={<ShareTitle />}> 43 | <SimpleForm> 44 | <ReferenceInput source="resource_id" label="Resource" reference="resources"> 45 | <SelectInput optionText="name" /> 46 | </ReferenceInput> 47 | <DateTimeInput source="expiration_dt" label="Expires On (Optional)" /> 48 | <BooleanInput source="is_revoked" /> 49 | </SimpleForm> 50 | </Edit> 51 | ); 52 | 53 | export const ShareCreate = () => ( 54 | <Create redirect="show"> 55 | <SimpleForm> 56 | <ReferenceInput source="resource_id" label="Resource" reference="resources"> 57 | <SelectInput optionText="name" /> 58 | </ReferenceInput> 59 | <DateTimeInput source="expiration_dt" label="Expires On (Optional)" /> 60 | </SimpleForm> 61 | </Create> 62 | ); 63 | -------------------------------------------------------------------------------- /frontend/src/users.tsx: -------------------------------------------------------------------------------- 1 | import { useMediaQuery, Theme } from "@mui/material"; 2 | import { Create, Edit, EditButton, DeleteButton, List, SimpleList, Show, ShowButton, SimpleForm, SimpleShowLayout, Datagrid, TextField, TextInput, EmailField, SelectInput, useRecordContext, usePermissions } from "react-admin"; 3 | import { hasAccess } from "./utils/authUtils"; 4 | 5 | const UserTitle = () => { 6 | const record = useRecordContext(); 7 | return <span>Users {record ? `- ${record.name}` : ""}</span>; 8 | }; 9 | 10 | const roleChoices = [ 11 | { id: 'user', name: 'User' }, 12 | { id: 'admin', name: 'Admin' }, 13 | ]; 14 | 15 | export const UserList = () => { 16 | const isSmall = useMediaQuery<Theme>((theme) => theme.breakpoints.down("sm")); 17 | const { permissions } = usePermissions() 18 | return ( 19 | <List> 20 | {isSmall ? ( 21 | <SimpleList 22 | primaryText={(record) => record.name} 23 | secondaryText={(record) => record.email} 24 | tertiaryText={(record) => record.role} 25 | /> 26 | ) : ( 27 | <Datagrid rowClick="edit"> 28 | <TextField source="name" /> 29 | <EmailField source="email" /> 30 | <TextField source="role" /> 31 | {hasAccess("users", "show", permissions) && <ShowButton />} 32 | {hasAccess("users", "edit", permissions) && <EditButton />} 33 | {hasAccess("users", "delete", permissions) && <DeleteButton />} 34 | </Datagrid> 35 | )} 36 | </List> 37 | ); 38 | }; 39 | 40 | export const UserShow = () => ( 41 | <Show title={<UserTitle />}> 42 | <SimpleShowLayout> 43 | <TextField source="id" /> 44 | <TextField source="name" /> 45 | <EmailField source="email" /> 46 | <TextField source="role" /> 47 | </SimpleShowLayout> 48 | </Show> 49 | ); 50 | 51 | export const UserEdit = () => ( 52 | <Edit title={<UserTitle />}> 53 | <SimpleForm> 54 | <TextInput source="name" /> 55 | <TextInput source="email" /> 56 | <SelectInput source="role" choices={roleChoices} /> 57 | </SimpleForm> 58 | </Edit> 59 | ); 60 | 61 | export const UserCreate = () => ( 62 | <Create redirect="show"> 63 | <SimpleForm> 64 | <TextInput source="name" /> 65 | <TextInput source="email" /> 66 | <SelectInput source="role" choices={roleChoices} defaultValue="user" /> 67 | </SimpleForm> 68 | </Create> 69 | ); 70 | -------------------------------------------------------------------------------- /frontend/src/utils/authUtils.ts: -------------------------------------------------------------------------------- 1 | export type ResourcePermissions = { 2 | [key: string]: string[]; 3 | }; 4 | 5 | export const hasAccess = ( 6 | resourceId: string, 7 | action: string, 8 | permissions: ResourcePermissions 9 | ) => { 10 | const resource = permissions[resourceId] || permissions["ALL"]; 11 | if (!resource) return false; 12 | const hasAccess = resource.includes(action); 13 | return hasAccess; 14 | }; 15 | -------------------------------------------------------------------------------- /frontend/src/utils/formatSize.ts: -------------------------------------------------------------------------------- 1 | export const formatSize = (bytes: number): string => { 2 | const units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']; 3 | let unitIndex = 0; 4 | let size = bytes; 5 | 6 | while (size >= 1024 && unitIndex < units.length - 1) { 7 | size /= 1024; 8 | unitIndex++; 9 | } 10 | 11 | return `${size.toFixed(2)} ${units[unitIndex]}`; 12 | }; 13 | 14 | export const formatTransferRate = (bytesPerSecond: number): string => { 15 | return `${formatSize(bytesPerSecond)}/s`; 16 | }; 17 | -------------------------------------------------------------------------------- /frontend/src/vite-env.d.ts: -------------------------------------------------------------------------------- 1 | /// <reference types="vite/client" /> 2 | -------------------------------------------------------------------------------- /frontend/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "lib": [ 5 | "dom", 6 | "dom.iterable", 7 | "esnext" 8 | ], 9 | "allowJs": true, 10 | "skipLibCheck": true, 11 | "esModuleInterop": true, 12 | "allowSyntheticDefaultImports": true, 13 | "strict": true, 14 | "forceConsistentCasingInFileNames": true, 15 | "noFallthroughCasesInSwitch": true, 16 | "module": "esnext", 17 | "moduleResolution": "node", 18 | "resolveJsonModule": true, 19 | "isolatedModules": true, 20 | "noEmit": true, 21 | "jsx": "react-jsx" 22 | }, 23 | "include": [ 24 | "src" 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /frontend/vite.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vite'; 2 | import react from '@vitejs/plugin-react'; 3 | 4 | // https://vitejs.dev/config/ 5 | export default defineConfig({ 6 | plugins: [react()], 7 | define: { 8 | 'process.env': process.env, 9 | }, 10 | server: { 11 | host: true, 12 | }, 13 | base: './', 14 | }); 15 | -------------------------------------------------------------------------------- /migrations/README.md: -------------------------------------------------------------------------------- 1 | # Alembic 2 | 3 | Alembic is used to manage database versioning using migrations. 4 | 5 | ## Upgrade schema 6 | 7 | `alembic upgrade head` 8 | 9 | ## Downgrade schema 10 | 11 | `alembic downgrade -1` 12 | 13 | ## Update schema 14 | 15 | Update backend/models.py then run: 16 | 17 | `alembic revision --autogenerate -m "added asset table"` 18 | -------------------------------------------------------------------------------- /migrations/env.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import engine_from_config 2 | from sqlalchemy import pool 3 | from pathlib import Path 4 | from sqlmodel import SQLModel 5 | from pathlib import Path 6 | import sys 7 | 8 | # add the backend directory to the python path 9 | repo_root = Path(__file__).resolve().parent.parent 10 | #backend_path = repo_root / 'backend' 11 | sys.path.append(str(repo_root)) 12 | 13 | # get the db path from the backend 14 | from common.paths import db_path 15 | from backend.db import SQLModelBase 16 | import backend.models 17 | 18 | from alembic import context 19 | 20 | # this is the Alembic Config object, which provides 21 | # access to the values within the .ini file in use. 22 | config = context.config 23 | 24 | #migrations_dir = Path(__file__).resolve().parent 25 | #data_dir = migrations_dir.parent / "data" 26 | 27 | config.set_main_option("sqlalchemy.url", f"sqlite:///{db_path}") 28 | 29 | # add your model's MetaData object here 30 | # for 'autogenerate' support 31 | # from myapp import mymodel 32 | # target_metadata = mymodel.Base.metadata 33 | target_metadata = SQLModelBase.metadata 34 | 35 | # other values from the config, defined by the needs of env.py, 36 | # can be acquired: 37 | # my_important_option = config.get_main_option("my_important_option") 38 | # ... etc. 39 | 40 | 41 | def run_migrations_offline() -> None: 42 | """Run migrations in 'offline' mode. 43 | 44 | This configures the context with just a URL 45 | and not an Engine, though an Engine is acceptable 46 | here as well. By skipping the Engine creation 47 | we don't even need a DBAPI to be available. 48 | 49 | Calls to context.execute() here emit the given string to the 50 | script output. 51 | 52 | """ 53 | url = config.get_main_option("sqlalchemy.url") 54 | context.configure( 55 | url=url, 56 | target_metadata=target_metadata, 57 | literal_binds=True, 58 | dialect_opts={"paramstyle": "named"}, 59 | include_schemas=True, 60 | ) 61 | 62 | with context.begin_transaction(): 63 | context.run_migrations() 64 | 65 | 66 | def run_migrations_online() -> None: 67 | """Run migrations in 'online' mode. 68 | 69 | In this scenario we need to create an Engine 70 | and associate a connection with the context. 71 | 72 | """ 73 | connectable = engine_from_config( 74 | config.get_section(config.config_ini_section, {}), 75 | prefix="sqlalchemy.", 76 | poolclass=pool.NullPool, 77 | ) 78 | 79 | with connectable.connect() as connection: 80 | context.configure( 81 | connection=connection, 82 | target_metadata=target_metadata, 83 | include_schemas=True, 84 | ) 85 | 86 | with context.begin_transaction(): 87 | context.run_migrations() 88 | 89 | 90 | if context.is_offline_mode(): 91 | run_migrations_offline() 92 | else: 93 | run_migrations_online() 94 | -------------------------------------------------------------------------------- /migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | ${imports if imports else ""} 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = ${repr(up_revision)} 17 | down_revision: Union[str, None] = ${repr(down_revision)} 18 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} 19 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} 20 | 21 | 22 | def upgrade() -> None: 23 | ${upgrades if upgrades else "pass"} 24 | 25 | 26 | def downgrade() -> None: 27 | ${downgrades if downgrades else "pass"} 28 | -------------------------------------------------------------------------------- /migrations/versions/008645bff529_add_role_to_user_model.py: -------------------------------------------------------------------------------- 1 | """add role to user model 2 | 3 | Revision ID: 008645bff529 4 | Revises: 1128b8cc9a3d 5 | Create Date: 2024-11-06 22:35:51.811621 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = '008645bff529' 17 | down_revision: Union[str, None] = '1128b8cc9a3d' 18 | branch_labels: Union[str, Sequence[str], None] = None 19 | depends_on: Union[str, Sequence[str], None] = None 20 | 21 | 22 | def upgrade() -> None: 23 | # ### commands auto generated by Alembic - please adjust! ### 24 | op.add_column('user', sa.Column('role', sqlmodel.sql.sqltypes.AutoString(), nullable=False)) 25 | # ### end Alembic commands ### 26 | 27 | 28 | def downgrade() -> None: 29 | # ### commands auto generated by Alembic - please adjust! ### 30 | op.drop_column('user', 'role') 31 | # ### end Alembic commands ### 32 | -------------------------------------------------------------------------------- /migrations/versions/0d66a93c6c1f_added_user_table.py: -------------------------------------------------------------------------------- 1 | """Added user table 2 | 3 | Revision ID: 0d66a93c6c1f 4 | Revises: 75aaaf2cd1a2 5 | Create Date: 2024-05-08 10:49:22.471591 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = '0d66a93c6c1f' 17 | down_revision: Union[str, None] = '75aaaf2cd1a2' 18 | branch_labels: Union[str, Sequence[str], None] = None 19 | depends_on: Union[str, Sequence[str], None] = None 20 | 21 | 22 | def upgrade() -> None: 23 | op.create_table('user', 24 | sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 25 | sa.Column('webauthn_user_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False, unique=True), 26 | sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 27 | sa.Column('email', sqlmodel.sql.sqltypes.AutoString(), nullable=False, unique=True), 28 | sa.PrimaryKeyConstraint('id') 29 | ) 30 | 31 | 32 | def downgrade() -> None: 33 | op.drop_table('user') 34 | -------------------------------------------------------------------------------- /migrations/versions/1128b8cc9a3d_new_column_added_in_user_table.py: -------------------------------------------------------------------------------- 1 | """new column added in user table 2 | 3 | Revision ID: 1128b8cc9a3d 4 | Revises: 187855982332 5 | Create Date: 2024-10-23 18:47:42.217492 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = '1128b8cc9a3d' 17 | down_revision: Union[str, None] = '187855982332' 18 | branch_labels: Union[str, Sequence[str], None] = None 19 | depends_on: Union[str, Sequence[str], None] = None 20 | 21 | def upgrade() -> None: 22 | # Handle 'asset' table foreign key creation with a name for the constraint 23 | with op.batch_alter_table('asset', schema=None) as batch_op: 24 | batch_op.create_foreign_key('fk_asset_user_id', 'user', ['user_id'], ['id']) # Added a name for the foreign key 25 | 26 | # Handle 'config' table alteration 27 | with op.batch_alter_table('config', schema=None) as batch_op: 28 | batch_op.alter_column('value', 29 | existing_type=sa.VARCHAR(), 30 | nullable=True) 31 | 32 | # Handle 'cred' table alteration 33 | with op.batch_alter_table('cred', schema=None) as batch_op: 34 | batch_op.alter_column('transports', 35 | existing_type=sa.VARCHAR(), 36 | nullable=False) 37 | 38 | # Handle 'persona' table alterations 39 | with op.batch_alter_table('persona', schema=None) as batch_op: 40 | batch_op.alter_column('description', 41 | existing_type=sa.VARCHAR(), 42 | nullable=True) 43 | batch_op.alter_column('voice_id', 44 | existing_type=sa.VARCHAR(), 45 | nullable=True) 46 | batch_op.alter_column('face_id', 47 | existing_type=sa.VARCHAR(), 48 | nullable=True) 49 | 50 | # Handle 'user' table alterations and additions 51 | with op.batch_alter_table('user', schema=None) as batch_op: 52 | batch_op.add_column(sa.Column('emailVerified', sa.Boolean(), nullable=False)) 53 | batch_op.alter_column('name', 54 | existing_type=sa.VARCHAR(), 55 | nullable=True) 56 | 57 | 58 | 59 | def downgrade() -> None: 60 | # ### commands auto generated by Alembic - please adjust! ### 61 | op.alter_column('user', 'name', 62 | existing_type=sa.VARCHAR(), 63 | nullable=False) 64 | op.drop_column('user', 'emailVerified') 65 | op.alter_column('persona', 'face_id', 66 | existing_type=sa.VARCHAR(), 67 | nullable=False) 68 | op.alter_column('persona', 'voice_id', 69 | existing_type=sa.VARCHAR(), 70 | nullable=False) 71 | op.alter_column('persona', 'description', 72 | existing_type=sa.VARCHAR(), 73 | nullable=False) 74 | op.alter_column('cred', 'transports', 75 | existing_type=sa.VARCHAR(), 76 | nullable=True) 77 | op.alter_column('config', 'value', 78 | existing_type=sa.VARCHAR(), 79 | nullable=False) 80 | op.drop_constraint(None, 'asset', type_='foreignkey') 81 | # ### end Alembic commands ### 82 | -------------------------------------------------------------------------------- /migrations/versions/187855982332_added_session_table.py: -------------------------------------------------------------------------------- 1 | """Added session table 2 | 3 | Revision ID: 187855982332 4 | Revises: 91d051f98616 5 | Create Date: 2024-10-02 01:32:40.785430 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = '187855982332' 17 | down_revision: Union[str, None] = '91d051f98616' 18 | branch_labels: Union[str, Sequence[str], None] = None 19 | depends_on: Union[str, Sequence[str], None] = None 20 | 21 | 22 | def upgrade() -> None: 23 | op.create_table('session', 24 | sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 25 | sa.Column('user_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 26 | sa.Column('token', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 27 | sa.Column('expires_at', sa.DateTime(), nullable=False), 28 | sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), 29 | sa.PrimaryKeyConstraint('id') 30 | ) 31 | 32 | 33 | def downgrade() -> None: 34 | op.drop_table('asset') 35 | -------------------------------------------------------------------------------- /migrations/versions/4ce11e8569dc_added_share_table.py: -------------------------------------------------------------------------------- 1 | """added share table 2 | 3 | Revision ID: 4ce11e8569dc 4 | Revises: f5235ab5e888 5 | Create Date: 2024-08-22 15:52:22.967260 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = '4ce11e8569dc' 17 | down_revision: Union[str, None] = 'f5235ab5e888' 18 | branch_labels: Union[str, Sequence[str], None] = None 19 | depends_on: Union[str, Sequence[str], None] = None 20 | 21 | 22 | def upgrade() -> None: 23 | op.create_table('share', 24 | sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 25 | sa.Column('resource_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 26 | sa.Column('user_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 27 | sa.Column('expiration_dt', sa.DateTime(), nullable=True), 28 | sa.Column('is_revoked', sa.Boolean(), nullable=False), 29 | sa.ForeignKeyConstraint(['resource_id'], ['resource.id'], ), 30 | sa.PrimaryKeyConstraint('id') 31 | ) 32 | 33 | 34 | def downgrade() -> None: 35 | op.drop_table('share') 36 | -------------------------------------------------------------------------------- /migrations/versions/56a640fb45b2_added_config_table.py: -------------------------------------------------------------------------------- 1 | """Config model 2 | 3 | Revision ID: 56a640fb45b2 4 | Revises: 5 | Create Date: 2024-05-06 22:36:16.566789 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = '56a640fb45b2' 17 | down_revision: Union[str, None] = None 18 | branch_labels: Union[str, Sequence[str], None] = None 19 | depends_on: Union[str, Sequence[str], None] = None 20 | 21 | 22 | def upgrade() -> None: 23 | op.create_table('config', 24 | sa.Column('key', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 25 | sa.Column('value', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 26 | sa.PrimaryKeyConstraint('key') 27 | ) 28 | 29 | 30 | def downgrade() -> None: 31 | op.drop_table('config') 32 | -------------------------------------------------------------------------------- /migrations/versions/75aaaf2cd1a2_added_resource_table.py: -------------------------------------------------------------------------------- 1 | """Added resource table 2 | 3 | Revision ID: 75aaaf2cd1a2 4 | Revises: 56a640fb45b2 5 | Create Date: 2024-05-07 21:48:08.883504 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = '75aaaf2cd1a2' 17 | down_revision: Union[str, None] = '56a640fb45b2' 18 | branch_labels: Union[str, Sequence[str], None] = None 19 | depends_on: Union[str, Sequence[str], None] = None 20 | 21 | 22 | def upgrade() -> None: 23 | op.create_table('resource', 24 | sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 25 | sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 26 | sa.Column('uri', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 27 | sa.PrimaryKeyConstraint('id') 28 | ) 29 | 30 | 31 | def downgrade() -> None: 32 | op.drop_table('resource') 33 | -------------------------------------------------------------------------------- /migrations/versions/91d051f98616_added_cred_table.py: -------------------------------------------------------------------------------- 1 | """Added cred table 2 | 3 | Revision ID: 91d051f98616 4 | Revises: 4ce11e8569dc 5 | Create Date: 2024-10-02 01:31:14.538904 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = '91d051f98616' 17 | down_revision: Union[str, None] = '4ce11e8569dc' 18 | branch_labels: Union[str, Sequence[str], None] = None 19 | depends_on: Union[str, Sequence[str], None] = None 20 | 21 | 22 | def upgrade() -> None: 23 | op.create_table('cred', 24 | sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 25 | sa.Column('public_key', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 26 | sa.Column('webauthn_user_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 27 | sa.Column('backed_up', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 28 | sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 29 | sa.Column('transports', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 30 | sa.ForeignKeyConstraint(['webauthn_user_id'], ['user.webauthn_user_id'], ), 31 | sa.PrimaryKeyConstraint('id') 32 | ) 33 | 34 | 35 | def downgrade() -> None: 36 | op.drop_table('asset') 37 | 38 | -------------------------------------------------------------------------------- /migrations/versions/cb6e97a5186c_added_asset_table.py: -------------------------------------------------------------------------------- 1 | """added asset table 2 | 3 | Revision ID: cb6e97a5186c 4 | Revises: 0d66a93c6c1f 5 | Create Date: 2024-05-28 11:18:05.583239 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = 'cb6e97a5186c' 17 | down_revision: Union[str, None] = '0d66a93c6c1f' 18 | branch_labels: Union[str, Sequence[str], None] = None 19 | depends_on: Union[str, Sequence[str], None] = None 20 | 21 | 22 | def upgrade() -> None: 23 | op.create_table('asset', 24 | sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 25 | sa.Column('user_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 26 | sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 27 | sa.Column('creator', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 28 | sa.Column('subject', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 29 | sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 30 | sa.PrimaryKeyConstraint('id') 31 | ) 32 | 33 | 34 | def downgrade() -> None: 35 | op.drop_table('asset') 36 | -------------------------------------------------------------------------------- /migrations/versions/e7cfcff87b8e_remove_role_from_user_table.py: -------------------------------------------------------------------------------- 1 | """remove role from user table 2 | 3 | Revision ID: e7cfcff87b8e 4 | Revises: 008645bff529 5 | Create Date: 2024-12-23 18:37:01.454972 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = 'e7cfcff87b8e' 17 | down_revision: Union[str, None] = '008645bff529' 18 | branch_labels: Union[str, Sequence[str], None] = None 19 | depends_on: Union[str, Sequence[str], None] = None 20 | 21 | 22 | def upgrade() -> None: 23 | # ### commands auto generated by Alembic - please adjust! ### 24 | op.drop_column('user', 'role') 25 | # ### end Alembic commands ### 26 | 27 | 28 | def downgrade() -> None: 29 | # ### commands auto generated by Alembic - please adjust! ### 30 | op.add_column('user', sa.Column('role', sa.VARCHAR(), nullable=False)) 31 | # ### end Alembic commands ### 32 | -------------------------------------------------------------------------------- /migrations/versions/f5235ab5e888_added_persona_table.py: -------------------------------------------------------------------------------- 1 | """added persona table 2 | 3 | Revision ID: f5235ab5e888 4 | Revises: cb6e97a5186c 5 | Create Date: 2024-07-16 15:43:22.600859 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | import sqlmodel 13 | 14 | 15 | # revision identifiers, used by Alembic. 16 | revision: str = 'f5235ab5e888' 17 | down_revision: Union[str, None] = 'cb6e97a5186c' 18 | branch_labels: Union[str, Sequence[str], None] = None 19 | depends_on: Union[str, Sequence[str], None] = None 20 | 21 | 22 | def upgrade() -> None: 23 | op.create_table('persona', 24 | sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 25 | sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 26 | sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 27 | sa.Column('voice_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 28 | sa.Column('face_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 29 | sa.PrimaryKeyConstraint('id') 30 | ) 31 | 32 | 33 | def downgrade() -> None: 34 | op.drop_table('persona') 35 | -------------------------------------------------------------------------------- /scripts/remove_environment.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | import shutil 4 | from pathlib import Path 5 | 6 | # Ensure the parent directory is in sys.path so relative imports work. 7 | base_dir = Path(__file__).parent.parent 8 | if base_dir not in sys.path: 9 | sys.path.append(str(base_dir)) 10 | 11 | ignore_errors = True 12 | 13 | # Check if the current environment is the one we're about to delete 14 | venv_path = base_dir / '.venv' 15 | if sys.prefix == str(venv_path): 16 | print("You are currently in the virtual environment that you're trying to delete. Please deactivate it first.") 17 | sys.exit(1) 18 | 19 | # Remove the virtual environment directory and node_modules 20 | shutil.rmtree(venv_path, ignore_errors=ignore_errors) 21 | shutil.rmtree(base_dir / 'frontend' / 'node_modules', ignore_errors=ignore_errors) 22 | -------------------------------------------------------------------------------- /scripts/setup_environment.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import subprocess 3 | import sys 4 | import os 5 | import shutil 6 | from pathlib import Path 7 | 8 | # Ensure the parent directory is in sys.path so relative imports work. 9 | base_dir = Path(__file__).parent.parent 10 | if base_dir not in sys.path: 11 | sys.path.append(str(base_dir)) 12 | 13 | from common.paths import base_dir, venv_dir, backend_dir, frontend_dir, env_file 14 | 15 | # Determine the correct path for the Python executable based on the OS 16 | if os.name == 'nt': # Windows 17 | venv_python = venv_dir / 'Scripts' / 'python' 18 | else: # POSIX (Linux, macOS, etc.) 19 | venv_python = venv_dir / 'bin' / 'python' 20 | 21 | def setup_backend(): 22 | print("Setting up the backend environment...") 23 | # Use the system Python to create the virtual environment 24 | subprocess.run([sys.executable, "-m", "venv", str(venv_dir)], check=True) 25 | # Upgrade pip 26 | subprocess.run([str(venv_python), "-m", "pip", "install", "--upgrade", "pip"], check=True) 27 | # Use the Python executable from the virtual environment to install dependencies 28 | subprocess.run([str(venv_python), "-m", "pip", "install", "-r", str(backend_dir / "requirements.txt")], check=True) 29 | 30 | def build_frontend(): 31 | print("Setting up the frontend environment...") 32 | npm_path = shutil.which("npm") 33 | if npm_path: 34 | current_dir = os.getcwd() 35 | os.chdir(frontend_dir) 36 | subprocess.run([npm_path, "install"], check=True) 37 | subprocess.run([npm_path, "run", "build"], check=True) 38 | os.chdir(current_dir) 39 | else: 40 | print("Skipped as npm command not found.") 41 | print("Download Node.js to build the frontend or use a prebuilt version (e.g. canary branch): https://nodejs.org/en/download") 42 | 43 | def setup_vscode(): 44 | print("Setting up VSCode configuration...") 45 | vscode_dir = base_dir / '.vscode' 46 | vscode_dir.mkdir(exist_ok=True) 47 | 48 | sample_files = list(vscode_dir.glob('*.sample')) 49 | for sample_file in sample_files: 50 | target_file = vscode_dir / sample_file.stem 51 | if not target_file.exists(): 52 | shutil.copy(sample_file, target_file) 53 | print(f"Copied {sample_file} to {target_file}") 54 | 55 | def main(): 56 | setup_backend() 57 | build_frontend() 58 | setup_vscode() 59 | 60 | print("Setup complete.") 61 | 62 | if __name__ == "__main__": 63 | main() 64 | --------------------------------------------------------------------------------