├── .github └── workflows │ ├── app.yaml │ ├── frontend.yaml │ └── scraping.yaml ├── .gitignore ├── .gitlint ├── .node-version ├── .pre-commit-config.yaml ├── Makefile ├── README.md ├── alembic.ini ├── app ├── __init__.py ├── conftest.py ├── database.py ├── dependencies.py ├── factories.py ├── index.py ├── models.py ├── scrape.py ├── source_graph │ ├── __init__.py │ ├── client.py │ ├── factories.py │ ├── mapper.py │ ├── models.py │ └── tests │ │ ├── __init__.py │ │ ├── test_client.py │ │ └── test_mapper.py ├── tests │ ├── __init__.py │ └── test_database.py ├── types.py └── uow.py ├── db.sqlite3 ├── dependencies_index.json ├── frontend ├── .env ├── .eslintrc.json ├── .gitignore ├── .prettierignore ├── .prettierrc ├── README.md ├── components.json ├── next.config.js ├── package.json ├── pnpm-lock.yaml ├── postcss.config.js ├── public │ ├── next.svg │ └── vercel.svg ├── src │ ├── app │ │ ├── columns.tsx │ │ ├── data-table.tsx │ │ ├── dependencies-search-provider.tsx │ │ ├── favicon.ico │ │ ├── globals.css │ │ ├── layout.tsx │ │ ├── page.tsx │ │ ├── repos-search-provider.tsx │ │ ├── repos-table.tsx │ │ ├── search-form.tsx │ │ └── search-provider.tsx │ ├── components │ │ └── ui │ │ │ ├── badge.tsx │ │ │ ├── button.tsx │ │ │ ├── card.tsx │ │ │ ├── command.tsx │ │ │ ├── dialog.tsx │ │ │ ├── form.tsx │ │ │ ├── icons.tsx │ │ │ ├── input.tsx │ │ │ ├── label.tsx │ │ │ ├── multiselect.tsx │ │ │ └── table.tsx │ └── lib │ │ ├── hooks.ts │ │ ├── indexes.ts │ │ ├── query-params.ts │ │ ├── schemas.ts │ │ ├── search.ts │ │ └── utils.ts ├── tailwind.config.ts └── tsconfig.json ├── migrations ├── README ├── __init__.py ├── env.py ├── script.py.mako └── versions │ ├── 90eb9d1f9267_set_up_the_database.py │ ├── __init__.py │ └── ac7c35039d70_add_a_last_checked_revision_column.py ├── pyproject.toml ├── repos_index.json └── requirements ├── base.txt ├── dev.txt └── test.txt /.github/workflows/app.yaml: -------------------------------------------------------------------------------- 1 | name: Python App Quality and Testing 2 | 3 | on: [push] 4 | 5 | 6 | jobs: 7 | quality: 8 | runs-on: ubuntu-latest 9 | strategy: 10 | matrix: 11 | python-version: [3.11] 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Set up Python 15 | uses: actions/setup-python@v4 16 | with: # https://github.com/actions/setup-python/blob/main/docs/advanced-usage.md#caching-packages 17 | python-version: ${{ matrix.python-version }} 18 | cache: "pip" 19 | cache-dependency-path: "requirements/dev.txt" 20 | - name: Install dev dependencies 21 | run: | 22 | python -m pip install --upgrade pip 23 | python -m pip install -r requirements/dev.txt 24 | - name: Lint with ruff 25 | run: | 26 | python -m ruff check --verbose --format=github . 27 | - name: Lint with mypy 28 | run: | 29 | python -m mypy --show-error-codes --pretty --show-column-numbers --show-error-context . 30 | - name: Lint with black 31 | run: | 32 | python -m black --check --verbose . 33 | - name: Lint with pyproject-fmt 34 | run: | 35 | python -m pyproject_fmt --check --indent=4 . 36 | test: 37 | needs: [quality] 38 | runs-on: ubuntu-latest 39 | strategy: 40 | matrix: 41 | python-version: [3.11] 42 | steps: 43 | - uses: actions/checkout@v3 44 | - name: Set up Python 45 | uses: actions/setup-python@v4 46 | with: # https://github.com/actions/setup-python/blob/main/docs/advanced-usage.md#caching-packages 47 | python-version: ${{ matrix.python-version }} 48 | cache: "pip" 49 | cache-dependency-path: "requirements/test.txt" 50 | - name: Install test dependencies 51 | run: | 52 | python -m pip install --upgrade pip 53 | python -m pip install -r requirements/test.txt 54 | - name: Test with pytest 55 | run: | 56 | python -m pytest -v -s --failed-first --cov=app --cov-report=xml --cov-branch 57 | - name: Generate Coverage Report 58 | run: | 59 | python -m coverage report -m 60 | - name: Upload coverage to Codecov 61 | env: 62 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 63 | if: ${{ env.CODECOV_TOKEN }} 64 | uses: codecov/codecov-action@v1 65 | with: 66 | token: ${{ secrets.CODECOV_TOKEN }} 67 | file: coverage.xml 68 | name: python ${{ matrix.python-version }} 69 | -------------------------------------------------------------------------------- /.github/workflows/frontend.yaml: -------------------------------------------------------------------------------- 1 | # Sample workflow for building and deploying a Next.js site to GitHub Pages 2 | # 3 | # To get started with Next.js see: https://nextjs.org/docs/getting-started 4 | # 5 | name: Build and Deploy Next.js to GitHub Pages 6 | 7 | on: 8 | # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run 9 | workflow_run: 10 | workflows: 11 | - "Scraping the repositories from Source Graph" 12 | - "Python App Quality and Testing" 13 | branches: [master] 14 | types: 15 | - completed 16 | # Allows you to run this workflow manually from the Actions tab 17 | # https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow 18 | # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch 19 | workflow_dispatch: 20 | # Allows to run this workflow on push events to the master branch 21 | # https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#using-activity-types-and-filters-with-multiple-events 22 | push: 23 | branches: [master] 24 | page_build: 25 | 26 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages 27 | permissions: 28 | contents: read 29 | pages: write 30 | id-token: write 31 | 32 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. 33 | # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. 34 | concurrency: 35 | group: "pages" 36 | cancel-in-progress: false 37 | 38 | jobs: 39 | # Build job 40 | build: 41 | runs-on: ubuntu-latest 42 | # Use ``frontend`` as the working directory 43 | # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_iddefaultsrun 44 | defaults: 45 | run: 46 | working-directory: ./frontend 47 | strategy: 48 | matrix: 49 | node-version: [18.17.1] 50 | steps: 51 | - name: Checkout 52 | uses: actions/checkout@v3 53 | - name: Set up pnpm 54 | uses: pnpm/action-setup@v2 55 | with: 56 | version: 8 57 | - name: Setup Node.js 58 | uses: actions/setup-node@v3 59 | with: 60 | node-version: ${{ matrix.node-version }} 61 | cache: "pnpm" 62 | cache-dependency-path: "./frontend/pnpm-lock.yaml" 63 | - name: Install dependencies 64 | run: pnpm install 65 | - name: Setup Pages 66 | uses: actions/configure-pages@v3 67 | with: 68 | # Automatically inject basePath in your Next.js configuration file and disable 69 | # server side image optimization (https://nextjs.org/docs/api-reference/next/image#unoptimized). 70 | # 71 | # You may remove this line if you want to manage the configuration yourself. 72 | static_site_generator: next 73 | - name: Build with Next.js 74 | run: pnpm next build 75 | - name: Static HTML export with Next.js 76 | run: pnpm next export 77 | - name: Upload artifact 78 | if: ${{ !env.ACT }} # skip during local actions testing 79 | uses: actions/upload-pages-artifact@v2 80 | with: 81 | path: ./frontend/out 82 | 83 | # Lint job 84 | lint: 85 | runs-on: ubuntu-latest 86 | # Use ``frontend`` as the working directory 87 | # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_iddefaultsrun 88 | defaults: 89 | run: 90 | working-directory: ./frontend 91 | strategy: 92 | matrix: 93 | node-version: [18.17.1] 94 | steps: 95 | - name: Checkout 96 | uses: actions/checkout@v3 97 | - name: Set up pnpm 98 | uses: pnpm/action-setup@v2 99 | with: 100 | version: 8 101 | - name: Setup Node.js 102 | uses: actions/setup-node@v3 103 | with: 104 | node-version: ${{ matrix.node-version }} 105 | cache: "pnpm" 106 | cache-dependency-path: "./frontend/pnpm-lock.yaml" 107 | - name: Install dependencies 108 | run: pnpm install 109 | - name: Lint with ESLint 110 | run: pnpm lint 111 | - name: Lint with Prettier 112 | run: pnpm prettier:lint 113 | 114 | # Deployment job 115 | deploy: 116 | if: ${{ !github.event.act }} # skip during local actions testing 117 | environment: 118 | name: github-pages 119 | url: ${{ steps.deployment.outputs.page_url }} 120 | runs-on: ubuntu-latest 121 | needs: build 122 | steps: 123 | - name: Deploy to GitHub Pages 124 | id: deployment 125 | uses: actions/deploy-pages@v2 126 | -------------------------------------------------------------------------------- /.github/workflows/scraping.yaml: -------------------------------------------------------------------------------- 1 | name: Scraping the repositories from Source Graph 2 | 3 | on: 4 | schedule: 5 | # Trigger every day at midnight 6 | # https://crontab.guru/#0_0_*_*_* 7 | - cron: '0 0 * * *' 8 | # Allows you to run this workflow manually from the Actions tab 9 | # https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow 10 | # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch 11 | workflow_dispatch: 12 | 13 | concurrency: # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#concurrency 14 | group: "scraping" 15 | cancel-in-progress: false 16 | 17 | jobs: 18 | scraping: 19 | if: ${{ !github.event.act }} # skip during local actions testing 20 | runs-on: ubuntu-latest 21 | strategy: 22 | matrix: 23 | python-version: [3.11] 24 | steps: 25 | - uses: actions/checkout@v3 26 | - name: Set up Python 27 | uses: actions/setup-python@v4 28 | with: 29 | python-version: ${{ matrix.python-version }} 30 | cache: "pip" 31 | cache-dependency-path: "requirements/base.txt" 32 | - name: Install dependencies 33 | run: | 34 | python -m pip install --upgrade pip 35 | python -m pip install -r requirements/base.txt 36 | - name: Scrape the repositories 37 | run: | 38 | python -m app.scrape scrape-repos 39 | - name: Parse the dependencies 40 | run: | 41 | python -m app.scrape parse-dependencies 42 | - name: Generate the repositories index 43 | run: | 44 | python -m app.index index-repos 45 | - name: Generate the dependencies index 46 | run: | 47 | python -m app.index index-dependencies 48 | - name: Commit the changes 49 | uses: stefanzweifel/git-auto-commit-action@v4 50 | with: 51 | commit_message: "Scraped repositories from Source Graph, parsed the dependencies, and generated the indexes" 52 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | wheels/ 22 | share/python-wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .nox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | *.py,cover 49 | .hypothesis/ 50 | .pytest_cache/ 51 | cover/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | db.sqlite3 61 | db.sqlite3-journal 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | .pybuilder/ 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | # For a library or package, you might want to ignore these files since the code is 86 | # intended to run in multiple environments; otherwise, check them in: 87 | .python-version 88 | 89 | # pipenv 90 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 91 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 92 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 93 | # install all needed dependencies. 94 | #Pipfile.lock 95 | 96 | # poetry 97 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 98 | # This is especially recommended for binary packages to ensure reproducibility, and is more 99 | # commonly ignored for libraries. 100 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 101 | #poetry.lock 102 | 103 | # pdm 104 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 105 | #pdm.lock 106 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 107 | # in version control. 108 | # https://pdm.fming.dev/#use-with-ide 109 | .pdm.toml 110 | 111 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 112 | __pypackages__/ 113 | 114 | # Celery stuff 115 | celerybeat-schedule 116 | celerybeat.pid 117 | 118 | # SageMath parsed files 119 | *.sage.py 120 | 121 | # Environments 122 | .env 123 | .venv 124 | env/ 125 | venv/ 126 | ENV/ 127 | env.bak/ 128 | venv.bak/ 129 | 130 | # Spyder project settings 131 | .spyderproject 132 | .spyproject 133 | 134 | # Rope project settings 135 | .ropeproject 136 | 137 | # mkdocs documentation 138 | /site 139 | 140 | # mypy 141 | .mypy_cache/ 142 | .dmypy.json 143 | dmypy.json 144 | 145 | # Pyre type checker 146 | .pyre/ 147 | 148 | # pytype static type analyzer 149 | .pytype/ 150 | 151 | # Cython debug symbols 152 | cython_debug/ 153 | 154 | # PyCharm 155 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 156 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 157 | # and can be added to the global gitignore or merged into this file. For a more nuclear 158 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 159 | .idea/ 160 | 161 | # Ruff cache 162 | .ruff_cache/ 163 | -------------------------------------------------------------------------------- /.gitlint: -------------------------------------------------------------------------------- 1 | [general] 2 | ignore=body-is-missing 3 | -------------------------------------------------------------------------------- /.node-version: -------------------------------------------------------------------------------- 1 | v18.18.0 2 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | default_language_version: 3 | python: python3.11 4 | default_install_hook_types: [pre-commit, commit-msg] 5 | default_stages: [commit] 6 | fail_fast: false 7 | minimum_pre_commit_version: 3.3.3 8 | repos: 9 | - repo: https://github.com/pre-commit/pre-commit-hooks 10 | rev: v4.4.0 11 | hooks: 12 | - id: trailing-whitespace 13 | - id: end-of-file-fixer 14 | - id: mixed-line-ending 15 | args: ["--fix=lf"] 16 | - id: check-added-large-files 17 | - id: check-toml 18 | - id: check-yaml 19 | - repo: https://github.com/charliermarsh/ruff-pre-commit 20 | rev: v0.0.280 21 | hooks: 22 | - id: ruff 23 | args: [--fix, --exit-non-zero-on-fix] 24 | - repo: local 25 | hooks: 26 | - id: mypy 27 | name: mypy 28 | entry: | 29 | python -m mypy . 30 | types: [python] 31 | language: system 32 | require_serial: true 33 | pass_filenames: false 34 | - repo: https://github.com/ambv/black 35 | rev: 23.7.0 36 | hooks: 37 | - id: black 38 | - repo: https://github.com/tox-dev/pyproject-fmt 39 | rev: "0.13.0" 40 | hooks: 41 | - id: pyproject-fmt 42 | args: ["--indent=4"] 43 | - repo: https://github.com/pre-commit/mirrors-prettier 44 | rev: "v3.0.3" 45 | hooks: 46 | - id: prettier 47 | entry: prettier --write --list-different --ignore-unknown --config frontend/.prettierrc --ignore-path frontend/.prettierignore 48 | files: ^frontend/ 49 | language_version: 18.18.0 50 | - repo: https://github.com/jorisroovers/gitlint 51 | rev: "v0.19.1" 52 | hooks: 53 | - id: gitlint 54 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # we want bash behaviour in all shell invocations 2 | SHELL := bash 3 | # Run each target in a separate shell 4 | .ONESHELL: 5 | # Fail on error inside any functions or subshells 6 | .SHELLFLAGS := -eu -o pipefail -c 7 | # Remove partially created files on error 8 | .DELETE_ON_ERROR: 9 | # Warn when an undefined variable is referenced 10 | MAKEFLAGS += --warn-undefined-variables 11 | # Disable built-in rules 12 | MAKEFLAGS += --no-builtin-rules 13 | # A catalog of requirements files 14 | REQUIREMENTS?=requirements 15 | 16 | help: # Show this help 17 | @echo "Usage: make [target]" 18 | @echo "" 19 | @echo "Targets:" 20 | @echo " help Show this help" 21 | @echo " requirements-base Compile base requirements" 22 | @echo " requirements-test Compile test requirements" 23 | @echo " requirements-dev Compile dev requirements" 24 | @echo " requirements Compile all requirements" 25 | @echo " install Install the app locally" 26 | @echo " install-front Install frontend" 27 | @echo " install-test Install the app locally with test dependencies" 28 | @echo " install-dev Install the app locally with dev dependencies" 29 | @echo " install-test-dev Install the app locally with test and dev dependencies" 30 | @echo " init-test-dev Install the app locally with test and dev dependencies. Also install pre-commit hooks." 31 | @echo " reinit-test-dev Reinstall pre-commit hooks" 32 | @echo " lint Run linters" 33 | @echo " test Run tests" 34 | @echo " migrate Run migrations" 35 | @echo " revision Create a new migration" 36 | @echo " front Run frontend" 37 | @echo " scrape-repos Scrape repos" 38 | @echo " parse-dependencies Scrape dependencies" 39 | @echo " index-repos Index repos" 40 | @echo " index-dependencies Index dependencies" 41 | 42 | requirements-base: # Compile base requirements 43 | python -m piptools compile \ 44 | --output-file=requirements/base.txt \ 45 | -v \ 46 | pyproject.toml 47 | 48 | requirements-test: requirements-base # Compile test requirements 49 | python -m piptools compile \ 50 | --extra=test \ 51 | --output-file=requirements/test.txt \ 52 | -v \ 53 | pyproject.toml 54 | 55 | requirements-dev: requirements-base # Compile dev requirements 56 | python -m piptools compile \ 57 | --extra=dev \ 58 | --output-file=requirements/dev.txt \ 59 | -v \ 60 | pyproject.toml 61 | 62 | requirements: requirements-base requirements-test requirements-dev # Compile all requirements 63 | .PHONY: requirements 64 | 65 | install: # Install the app locally 66 | python -m pip install -r $(REQUIREMENTS)/base.txt . 67 | .PHONY: install 68 | 69 | install-test: # Install the app locally with test dependencies 70 | python -m pip install \ 71 | -r $(REQUIREMENTS)/base.txt \ 72 | -r $(REQUIREMENTS)/test.txt \ 73 | --editable . 74 | .PHONY: install-test 75 | 76 | install-dev: # Install the app locally with dev dependencies 77 | python -m pip install \ 78 | -r $(REQUIREMENTS)/base.txt \ 79 | -r $(REQUIREMENTS)/dev.txt \ 80 | --editable . 81 | .PHONY: install-dev 82 | 83 | install-test-dev: # Install the app locally with test and dev dependencies 84 | python -m pip install \ 85 | -r $(REQUIREMENTS)/base.txt \ 86 | -r $(REQUIREMENTS)/test.txt \ 87 | -r $(REQUIREMENTS)/dev.txt \ 88 | --editable . 89 | .PHONY: install-test-dev 90 | 91 | install-front: # Install frontend 92 | cd frontend && pnpm install 93 | .PHONY: install-front 94 | 95 | init-test-dev: install-test-dev # Install the app locally with test and dev dependencies. Also install pre-commit hooks. 96 | pre-commit install 97 | .PHONY: init-test-dev 98 | 99 | reinit-test-dev: init-test-dev # Reinstall pre-commit hooks 100 | pre-commit install --install-hooks --overwrite 101 | .PHONY: reinit-test-dev 102 | 103 | lint: # Run linters 104 | pre-commit run --all-files 105 | .PHONY: lint 106 | 107 | test: # Run tests 108 | python -m pytest -vv -s --cov=app --cov-report=xml --cov-branch app 109 | .PHONY: test 110 | 111 | migrate: # Run migrations 112 | python -m alembic upgrade heads 113 | .PHONY: migrate 114 | 115 | revision: # Create a new migration 116 | python -m alembic revision --autogenerate -m "$(message)" 117 | .PHONY: revision 118 | 119 | front: install-front # Run frontend 120 | cd frontend && pnpm dev 121 | .PHONY: front 122 | 123 | scrape-repos: # Scrape repos 124 | python -m app.scrape scrape-repos 125 | .PHONY: scrape-repos 126 | 127 | parse-dependencies: # Scrape dependencies 128 | python -m app.scrape parse-dependencies 129 | .PHONY: parse-dependencies 130 | 131 | index-repos: # Index repos 132 | python -m app.index index-repos 133 | .PHONY: index-repos 134 | 135 | index-dependencies: # Index dependencies 136 | python -m app.index index-dependencies 137 | .PHONY: index-dependencies 138 | 139 | .DEFAULT_GOAL := init-test-dev # Set the default goal to init-dev-test 140 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Awesome FastAPI Projects 2 | 3 | View the website: https://kludex.github.io/awesome-fastapi-projects/ 4 | 5 | ## Local Development 6 | 7 | ### Setup 8 | 9 | #### Python and Virtual Environment 10 | 11 | The instructions below assume you have [pyenv](https://github.com/pyenv/pyenv) installed. 12 | If you don't, use any other method to create a virtual environment 13 | and install Python 3.11.4. 14 | 15 | - Install Python 3.11.4 16 | 17 | ```shell 18 | pyenv install 3.11.4 19 | ``` 20 | 21 | - Create a virtual environment 22 | 23 | ```shell 24 | pyenv virtualenv 3.11.4 awesome-fastapi-projects 25 | ``` 26 | 27 | - Activate the virtual environment 28 | 29 | ```shell 30 | pyenv local awesome-fastapi-projects 31 | ``` 32 | 33 | #### Install dependencies and pre-commit hooks 34 | 35 | There is a `Makefile` with some useful commands to help you get started. 36 | For available commands, run `make help`. To install dependencies and pre-commit hooks, run: 37 | 38 | ```shell 39 | make 40 | ``` 41 | 42 | #### Frontend 43 | 44 | The frontend is built with [React](https://reactjs.org/) and [Next.js](https://nextjs.org/). 45 | It is being statically built and served on GitHub Pages: https://kludex.github.io/awesome-fastapi-projects/ 46 | 47 | To run the frontend locally, you need to install [Node.js](https://nodejs.org/en/) and [pnpm](https://pnpm.io/). 48 | The node version is specified in the `.node-version` file. 49 | To easily manage the node version, you can use [fnm](https://github.com/Schniz/fnm). 50 | Then, run the following commands: 51 | 52 | ```shell 53 | make front 54 | ``` 55 | 56 | This will install the dependencies and start the development server. 57 | The frontend will be available at http://localhost:3000. 58 | -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | [alembic] 3 | # path to migration scripts 4 | script_location = migrations 5 | 6 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 7 | # Uncomment the line below if you want the files to be prepended with date and time 8 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 9 | 10 | # sys.path path, will be prepended to sys.path if present. 11 | # defaults to the current working directory. 12 | prepend_sys_path = . 13 | 14 | # timezone to use when rendering the date within the migration file 15 | # as well as the filename. 16 | # If specified, requires the python-dateutil library that can be 17 | # installed by adding `alembic[tz]` to the pip requirements 18 | # string value is passed to dateutil.tz.gettz() 19 | # leave blank for localtime 20 | # timezone = 21 | 22 | # max length of characters to apply to the 23 | # "slug" field 24 | # truncate_slug_length = 40 25 | 26 | # set to 'true' to run the environment during 27 | # the 'revision' command, regardless of autogenerate 28 | # revision_environment = false 29 | 30 | # set to 'true' to allow .pyc and .pyo files without 31 | # a source .py file to be detected as revisions in the 32 | # versions/ directory 33 | # sourceless = false 34 | 35 | # version location specification; This defaults 36 | # to migrations/versions. When using multiple version 37 | # directories, initial revisions must be specified with --version-path. 38 | # The path separator used here should be the separator specified by "version_path_separator" below. 39 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions 40 | 41 | # version path separator; As mentioned above, this is the character used to split 42 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 43 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 44 | # Valid values for version_path_separator are: 45 | # 46 | # version_path_separator = : 47 | # version_path_separator = ; 48 | # version_path_separator = space 49 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 50 | 51 | # set to 'true' to search source files recursively 52 | # in each "version_locations" directory 53 | # new in Alembic version 1.10 54 | # recursive_version_locations = false 55 | 56 | # the output encoding used when revision files 57 | # are written from script.py.mako 58 | # output_encoding = utf-8 59 | 60 | sqlalchemy.url = sqlite+aiosqlite:///db.sqlite3 61 | 62 | 63 | [post_write_hooks] 64 | # post_write_hooks defines scripts or Python functions that are run 65 | # on newly generated revision scripts. See the documentation for further 66 | # detail and examples 67 | 68 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 69 | # hooks = black 70 | # black.type = console_scripts 71 | # black.entrypoint = black 72 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 73 | 74 | # Logging configuration 75 | [loggers] 76 | keys = root,sqlalchemy,alembic 77 | 78 | [handlers] 79 | keys = console 80 | 81 | [formatters] 82 | keys = generic 83 | 84 | [logger_root] 85 | level = WARN 86 | handlers = console 87 | qualname = 88 | 89 | [logger_sqlalchemy] 90 | level = WARN 91 | handlers = 92 | qualname = sqlalchemy.engine 93 | 94 | [logger_alembic] 95 | level = INFO 96 | handlers = 97 | qualname = alembic 98 | 99 | [handler_console] 100 | class = StreamHandler 101 | args = (sys.stderr,) 102 | level = NOTSET 103 | formatter = generic 104 | 105 | [formatter_generic] 106 | format = %(levelname)-5.5s [%(name)s] %(message)s 107 | datefmt = %H:%M:%S 108 | -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- 1 | """A web application for indexing the projects using FastAPI.""" 2 | -------------------------------------------------------------------------------- /app/conftest.py: -------------------------------------------------------------------------------- 1 | """The application-level conftest.""" 2 | import asyncio 3 | import contextlib 4 | from collections.abc import AsyncGenerator, Generator 5 | from typing import Literal 6 | 7 | import pytest 8 | import stamina 9 | from dirty_equals import IsList 10 | from sqlalchemy.ext.asyncio import ( 11 | AsyncConnection, 12 | AsyncEngine, 13 | AsyncSession, 14 | async_sessionmaker, 15 | create_async_engine, 16 | ) 17 | 18 | from app.database import Dependency, Repo 19 | from app.factories import DependencyCreateDataFactory 20 | from app.source_graph.factories import SourceGraphRepoDataFactory 21 | from app.source_graph.models import SourceGraphRepoData 22 | 23 | 24 | @pytest.fixture(autouse=True, scope="session") 25 | def anyio_backend() -> Literal["asyncio"]: 26 | """Use asyncio as the async backend.""" 27 | return "asyncio" 28 | 29 | 30 | @pytest.fixture(autouse=True, scope="session") 31 | def _deactivate_retries() -> None: 32 | """Deactivate stamina retries.""" 33 | stamina.set_active(False) 34 | 35 | 36 | @pytest.fixture(scope="session") 37 | def db_path() -> str: 38 | """Use the in-memory database for tests.""" 39 | return "" # ":memory:" 40 | 41 | 42 | @pytest.fixture(scope="session") 43 | def db_connection_string( 44 | db_path: str, 45 | ) -> str: 46 | """Provide the connection string for the in-memory database.""" 47 | return f"sqlite+aiosqlite:///{db_path}" 48 | 49 | 50 | @pytest.fixture(scope="session", params=[{"echo": False}], ids=["echo=False"]) 51 | async def db_engine( 52 | db_connection_string: str, 53 | request: pytest.FixtureRequest, 54 | ) -> AsyncGenerator[AsyncEngine, None, None]: 55 | """Create the database engine.""" 56 | # echo=True enables logging of all SQL statements 57 | # https://docs.sqlalchemy.org/en/20/core/engines.html#sqlalchemy.create_engine.params.echo 58 | engine = create_async_engine( 59 | db_connection_string, 60 | **request.param, # type: ignore 61 | ) 62 | try: 63 | yield engine 64 | finally: 65 | # for AsyncEngine created in function scope, close and 66 | # clean-up pooled connections 67 | await engine.dispose() 68 | 69 | 70 | @pytest.fixture(scope="session") 71 | def event_loop( 72 | request: pytest.FixtureRequest, 73 | ) -> Generator[asyncio.AbstractEventLoop, None, None]: 74 | """ 75 | Create an instance of the default event loop for a session. 76 | 77 | An event loop is destroyed at the end of the test session. 78 | https://docs.pytest.org/en/6.2.x/fixture.html#fixture-scopes 79 | """ 80 | with contextlib.closing(loop := asyncio.get_event_loop_policy().get_event_loop()): 81 | yield loop 82 | 83 | 84 | @pytest.fixture(scope="session") 85 | async def _database_objects( 86 | db_engine: AsyncEngine, 87 | ) -> AsyncGenerator[None, None]: 88 | """Create the database objects (tables, etc.).""" 89 | from app.database import Base 90 | 91 | # Enters a transaction 92 | # https://docs.sqlalchemy.org/en/20/orm/extensions/asyncio.html#sqlalchemy.ext.asyncio.AsyncConnection.begin 93 | try: 94 | async with db_engine.begin() as conn: 95 | await conn.run_sync(Base.metadata.drop_all) 96 | await conn.run_sync(Base.metadata.create_all) 97 | yield 98 | finally: 99 | # Clean up after the testing session is over 100 | async with db_engine.begin() as conn: 101 | await conn.run_sync(Base.metadata.drop_all) 102 | 103 | 104 | @pytest.fixture(scope="session") 105 | async def db_connection( 106 | db_engine: AsyncEngine, 107 | ) -> AsyncGenerator[AsyncConnection, None]: 108 | """Create a database connection.""" 109 | # Return connection with no transaction 110 | # https://docs.sqlalchemy.org/en/20/orm/extensions/asyncio.html#sqlalchemy.ext.asyncio.AsyncEngine.connect 111 | async with db_engine.connect() as conn: 112 | yield conn 113 | 114 | 115 | @pytest.fixture() 116 | async def db_session( 117 | db_engine: AsyncEngine, 118 | _database_objects: None, 119 | ) -> AsyncGenerator[AsyncSession, None]: 120 | """Create a database session.""" 121 | # The `async_sessionmaker` function is used to create a Session factory 122 | # https://docs.sqlalchemy.org/en/20/orm/extensions/asyncio.html#sqlalchemy.ext.asyncio.async_sessionmaker 123 | async_session_factory = async_sessionmaker( 124 | db_engine, expire_on_commit=False, autoflush=False, autocommit=False 125 | ) 126 | async with async_session_factory() as session: 127 | yield session 128 | 129 | 130 | @pytest.fixture() 131 | async def db_uow( 132 | db_session: AsyncSession, 133 | ) -> AsyncGenerator[AsyncSession, None]: 134 | """Provide a transactional scope around a series of operations.""" 135 | from app.uow import async_session_uow 136 | 137 | # This context manager will start a transaction, and roll it back at the end 138 | # https://docs.sqlalchemy.org/en/20/orm/extensions/asyncio.html#sqlalchemy.ext.asyncio.AsyncSessionTransaction 139 | async with async_session_uow(db_session) as session: 140 | yield session 141 | 142 | 143 | @pytest.fixture() 144 | async def some_repos( 145 | db_session: AsyncSession, 146 | source_graph_repo_data_factory: SourceGraphRepoDataFactory, 147 | dependency_create_data_factory: DependencyCreateDataFactory, 148 | ) -> list[Repo]: 149 | """Create some repos.""" 150 | source_graph_repos_data: list[ 151 | SourceGraphRepoData 152 | ] = source_graph_repo_data_factory.batch(10) 153 | assert source_graph_repos_data == IsList(length=10) 154 | repos = [ 155 | Repo( 156 | url=str(source_graph_repo_data.repo_url), 157 | description=source_graph_repo_data.description, 158 | stars=source_graph_repo_data.stars, 159 | source_graph_repo_id=source_graph_repo_data.repo_id, 160 | dependencies=[ 161 | Dependency(**dependency_create_data.model_dump()) 162 | for dependency_create_data in dependency_create_data_factory.batch(5) 163 | ], 164 | ) 165 | for source_graph_repo_data in source_graph_repos_data 166 | ] 167 | db_session.add_all(repos) 168 | await db_session.flush() 169 | return repos 170 | -------------------------------------------------------------------------------- /app/database.py: -------------------------------------------------------------------------------- 1 | """ 2 | Database models and session. 3 | 4 | The database is a SQLite database, and is stored in the root 5 | of the project as `db.sqlite3`. 6 | 7 | The database is managed using Alembic, and migrations 8 | are stored in the `migrations/` directory. 9 | 10 | The module defines the following models: 11 | 12 | - `Repo`: A repository that is being tracked. 13 | - `Dependency`: A dependency of a repository. 14 | - `RepoDependency`: A relationship between a repository and a dependency. 15 | 16 | The database is accessed asynchronously using SQLAlchemy's async API. 17 | """ 18 | from pathlib import PurePath 19 | from typing import Final 20 | 21 | from sqlalchemy import BigInteger, ForeignKey, MetaData, String, Text, UniqueConstraint 22 | from sqlalchemy.ext.asyncio import ( 23 | AsyncAttrs, 24 | AsyncEngine, 25 | AsyncSession, 26 | async_sessionmaker, 27 | create_async_engine, 28 | ) 29 | from sqlalchemy.orm import ( 30 | Mapped, 31 | declarative_base, 32 | mapped_column, 33 | relationship, 34 | ) 35 | 36 | from app.types import RevisionHash, SourceGraphRepoId 37 | 38 | _DB_PATH: Final[PurePath] = PurePath(__file__).parent.parent / "db.sqlite3" 39 | 40 | _SQLALCHEMY_DATABASE_URL: Final[str] = f"sqlite+aiosqlite:///{_DB_PATH}" 41 | 42 | engine: Final[AsyncEngine] = create_async_engine(_SQLALCHEMY_DATABASE_URL) 43 | 44 | async_session_maker: Final[async_sessionmaker[AsyncSession]] = async_sessionmaker( 45 | engine, expire_on_commit=False, autoflush=False, autocommit=False 46 | ) 47 | 48 | metadata = MetaData( 49 | naming_convention={ 50 | "ix": "ix_%(table_name)s_%(column_0_N_name)s ", 51 | "uq": "uq_%(table_name)s_%(column_0_N_name)s ", 52 | "ck": "ck_%(table_name)s_%(constraint_name)s ", 53 | "fk": "fk_%(table_name)s_%(column_0_N_name)s_%(referred_table_name)s", 54 | "pk": "pk_%(table_name)s", 55 | } 56 | ) 57 | 58 | 59 | Base = declarative_base(metadata=metadata, cls=AsyncAttrs) 60 | 61 | 62 | class Repo(Base): 63 | """A repository that is being tracked.""" 64 | 65 | __tablename__ = "repo" 66 | id: Mapped[int] = mapped_column(primary_key=True) 67 | url: Mapped[str] = mapped_column(nullable=False, unique=True) 68 | description: Mapped[str] = mapped_column(Text, nullable=False) 69 | stars: Mapped[int] = mapped_column(BigInteger, nullable=False) 70 | source_graph_repo_id: Mapped[SourceGraphRepoId | None] = mapped_column( 71 | BigInteger, nullable=True, unique=True 72 | ) 73 | dependencies: Mapped[list["Dependency"]] = relationship( 74 | "Dependency", secondary="repo_dependency", back_populates="repos" 75 | ) 76 | last_checked_revision: Mapped[RevisionHash | None] = mapped_column( 77 | String(255), nullable=True 78 | ) 79 | __table_args__ = (UniqueConstraint("url", "source_graph_repo_id"),) 80 | 81 | 82 | class Dependency(Base): 83 | """A dependency of a repository.""" 84 | 85 | __tablename__ = "dependency" 86 | id: Mapped[int] = mapped_column(primary_key=True) 87 | name: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) 88 | repos: Mapped[list["Repo"]] = relationship( 89 | "Repo", secondary="repo_dependency", back_populates="dependencies" 90 | ) 91 | 92 | 93 | class RepoDependency(Base): 94 | """A relationship between a repository and a dependency.""" 95 | 96 | __tablename__ = "repo_dependency" 97 | repo_id: Mapped[int] = mapped_column( 98 | ForeignKey(Repo.id, ondelete="CASCADE"), primary_key=True 99 | ) 100 | dependency_id: Mapped[int] = mapped_column( 101 | ForeignKey(Dependency.id, ondelete="CASCADE"), primary_key=True 102 | ) 103 | -------------------------------------------------------------------------------- /app/dependencies.py: -------------------------------------------------------------------------------- 1 | """Dependencies parsing.""" 2 | import asyncio 3 | import subprocess 4 | from collections.abc import Sequence 5 | 6 | import aiofiles.tempfile 7 | import stamina 8 | from loguru import logger 9 | 10 | from app.database import Repo 11 | from app.models import DependencyCreateData 12 | from app.types import RevisionHash 13 | 14 | 15 | async def run_command(*cmd: str, cwd: str | None = None) -> str: 16 | """ 17 | Run the given command in a subprocess and return the stdout as plain text. 18 | 19 | :param cmd: The command to run. 20 | :param cwd: The working directory to run the command in. 21 | :return: The stdout result 22 | """ 23 | process = await asyncio.create_subprocess_exec( 24 | *cmd, 25 | stdout=subprocess.PIPE, 26 | stderr=subprocess.PIPE, 27 | cwd=cwd, 28 | ) 29 | 30 | stdout, stderr = await process.communicate() 31 | 32 | if process.returncode != 0: 33 | raise RuntimeError( 34 | f"Command '{cmd}' failed with exit code '{process.returncode}':\n" 35 | f"[stdout]: '{stdout.decode()}'\n" 36 | f"[stderr]: '{stderr.decode()}'" 37 | ) 38 | 39 | return stdout.decode() 40 | 41 | 42 | async def acquire_dependencies_data_for_repository( 43 | repo: Repo, 44 | ) -> tuple[RevisionHash, list[DependencyCreateData]]: 45 | """ 46 | Acquire dependencies for the given repository. 47 | 48 | The function will use the "third-party-imports" tool to 49 | parse the third-party dependencies of the repository. 50 | 51 | Since this tool has been written in Rust and is basically 52 | a CLI tool, the parsing will happen is a subprocess. 53 | 54 | :param repo: A repository for which to return the dependencies. 55 | :return: The dependencies data required to create the dependencies in the DB. 56 | """ 57 | logger.info( 58 | "Acquiring the dependencies data for the repo with id {repo_id}.", 59 | repo_id=repo.id, 60 | enqueue=True, 61 | ) 62 | async with aiofiles.tempfile.TemporaryDirectory() as directory: 63 | # Clone the repository 64 | logger.info( 65 | "Cloning the repo with id {repo_id} into the directory {directory}.", 66 | repo_id=repo.id, 67 | directory=directory, 68 | enqueue=True, 69 | ) 70 | await run_command( 71 | "git", 72 | "clone", 73 | "--depth", 74 | "1", 75 | repo.url, 76 | directory, 77 | ) 78 | 79 | # Get the latest commit hash 80 | logger.info( 81 | "Getting the latest commit hash for the repo with id {repo_id}.", 82 | repo_id=repo.id, 83 | enqueue=True, 84 | ) 85 | revision: str = await run_command( 86 | "git", 87 | "rev-parse", 88 | "HEAD", 89 | cwd=directory, 90 | ) 91 | 92 | if repo.last_checked_revision == revision: 93 | # Assume there are no new dependencies to return 94 | # since all the repo dependencies have already 95 | # been parsed. 96 | logger.info( 97 | "The repo with id {repo_id} has already been updated.", 98 | repo_id=repo.id, 99 | enqueue=True, 100 | ) 101 | return RevisionHash(revision), [] 102 | 103 | # Parse the dependencies 104 | async for attempt in stamina.retry_context(on=RuntimeError, attempts=3): 105 | with attempt: 106 | logger.info( 107 | "Parsing the dependencies for the repo with id {repo_id}.", 108 | repo_id=repo.id, 109 | enqueue=True, 110 | ) 111 | dependencies: str = await run_command( 112 | "third-party-imports", 113 | directory, 114 | ) 115 | if dependencies: 116 | logger.info( 117 | "Successfully parsed the dependencies for the repo with id {repo_id}.", 118 | repo_id=repo.id, 119 | enqueue=True, 120 | ) 121 | # Split the dependencies by new line 122 | dependencies_list: Sequence[str] = dependencies.split("\n") 123 | # Drop the first two lines (the info lines) 124 | dependencies_list = ( 125 | dependencies_list[2:] if len(dependencies_list) > 2 else [] 126 | ) 127 | logger.info( 128 | "Found {count} dependencies for the repo with id {repo_id}.", 129 | count=len(dependencies_list), 130 | repo_id=repo.id, 131 | enqueue=True, 132 | ) 133 | else: 134 | logger.info( 135 | "No dependencies found for the repo with id {repo_id}.", 136 | repo_id=repo.id, 137 | enqueue=True, 138 | ) 139 | dependencies_list = [] 140 | return ( 141 | RevisionHash(revision), 142 | [ 143 | DependencyCreateData( 144 | name=dependency.strip(), 145 | ) 146 | for dependency in dependencies_list 147 | if dependency.strip() 148 | ], 149 | ) 150 | -------------------------------------------------------------------------------- /app/factories.py: -------------------------------------------------------------------------------- 1 | """Factories for creating models for testing.""" 2 | from polyfactory.factories.pydantic_factory import ModelFactory 3 | from polyfactory.pytest_plugin import register_fixture 4 | 5 | from app.models import DependencyCreateData 6 | 7 | 8 | @register_fixture 9 | class DependencyCreateDataFactory(ModelFactory[DependencyCreateData]): 10 | """Factory for creating DependencyCreateData.""" 11 | 12 | __model__ = DependencyCreateData 13 | -------------------------------------------------------------------------------- /app/index.py: -------------------------------------------------------------------------------- 1 | """ 2 | Create repos and dependencies indexes. 3 | 4 | This script creates can create two indexes: 5 | 6 | - ``repos_index.json``: Contains all the repositories and their dependencies. 7 | - ``dependencies_index.json``: Contains all the dependencies and the 8 | repositories that depend on them. 9 | 10 | The indexes are used by the frontend to display the data and perform searches. 11 | """ 12 | import asyncio 13 | import json 14 | from pathlib import Path 15 | from typing import Final 16 | 17 | import aiofiles 18 | import sqlalchemy.orm 19 | import typer 20 | 21 | from app.database import Dependency, Repo, async_session_maker 22 | from app.models import DependencyDetail, RepoDetail 23 | from app.uow import async_session_uow 24 | 25 | #: The path to the repos index file. 26 | REPOS_INDEX_PATH: Final[Path] = Path(__file__).parent.parent / "repos_index.json" 27 | #: The path to the dependencies index file. 28 | DEPENDENCIES_INDEX_PATH: Final[Path] = ( 29 | Path(__file__).parent.parent / "dependencies_index.json" 30 | ) 31 | 32 | app = typer.Typer() 33 | 34 | 35 | async def create_repos_index() -> None: 36 | """ 37 | Create repos_index.json file from database. 38 | 39 | :return: None 40 | """ 41 | async with async_session_maker() as session, async_session_uow( 42 | session 43 | ), aiofiles.open(REPOS_INDEX_PATH, "w") as index_file: 44 | await index_file.write( 45 | json.dumps( 46 | { 47 | "repos": [ 48 | RepoDetail.model_validate(repo).model_dump() 49 | async for repo in ( 50 | await session.stream_scalars( 51 | sqlalchemy.select(Repo) 52 | .order_by(Repo.id) 53 | .options(sqlalchemy.orm.selectinload(Repo.dependencies)) 54 | ) 55 | ) 56 | ], 57 | }, 58 | indent=4, 59 | ) 60 | ) 61 | 62 | 63 | async def create_dependencies_index() -> None: 64 | """ 65 | Create dependencies_index.json file from database. 66 | 67 | :return: None 68 | """ 69 | async with async_session_maker() as session, async_session_uow( 70 | session 71 | ) as session, aiofiles.open(DEPENDENCIES_INDEX_PATH, "w") as index_file: 72 | dependencies = [ 73 | DependencyDetail.model_validate(dependency).model_dump() 74 | async for dependency in ( 75 | await session.stream_scalars( 76 | sqlalchemy.select(Dependency).order_by(Dependency.id) 77 | ) 78 | ) 79 | if dependency.name 80 | ] 81 | await index_file.write( 82 | json.dumps( 83 | { 84 | "dependencies": dependencies, 85 | }, 86 | indent=4, 87 | ) 88 | ) 89 | 90 | 91 | @app.command() 92 | def index_repos() -> None: 93 | """Create ``repos_index.json``.""" 94 | asyncio.run(create_repos_index()) 95 | 96 | 97 | @app.command() 98 | def index_dependencies() -> None: 99 | """Create ``dependencies_index.json``.""" 100 | asyncio.run(create_dependencies_index()) 101 | 102 | 103 | if __name__ == "__main__": 104 | app() 105 | -------------------------------------------------------------------------------- /app/models.py: -------------------------------------------------------------------------------- 1 | """Module contains the models for the application.""" 2 | 3 | from pydantic import BaseModel, ConfigDict, NonNegativeInt 4 | 5 | from app.types import DependencyId, RepoId, RevisionHash, SourceGraphRepoId 6 | 7 | 8 | class DependencyCreateData(BaseModel): 9 | """A dependency of a repository.""" 10 | 11 | name: str 12 | 13 | 14 | class DependencyDetail(BaseModel): 15 | """A dependency of a repository.""" 16 | 17 | model_config = ConfigDict( 18 | from_attributes=True, 19 | ) 20 | 21 | id: DependencyId 22 | name: str 23 | 24 | 25 | class RepoDetail(BaseModel): 26 | """A repository that is being tracked.""" 27 | 28 | model_config = ConfigDict( 29 | from_attributes=True, 30 | ) 31 | 32 | id: RepoId 33 | url: str 34 | description: str 35 | stars: NonNegativeInt 36 | source_graph_repo_id: SourceGraphRepoId | None 37 | dependencies: list[DependencyDetail] 38 | last_checked_revision: RevisionHash | None 39 | -------------------------------------------------------------------------------- /app/scrape.py: -------------------------------------------------------------------------------- 1 | """The logic for scraping the source graph data processing it.""" 2 | import asyncio 3 | 4 | import sqlalchemy.dialects.sqlite 5 | import typer 6 | from loguru import logger 7 | from sqlalchemy.ext.asyncio import AsyncSession 8 | 9 | from app.database import Dependency, Repo, RepoDependency, async_session_maker 10 | from app.dependencies import acquire_dependencies_data_for_repository 11 | from app.source_graph.client import AsyncSourceGraphSSEClient 12 | from app.source_graph.mapper import create_or_update_repos_from_source_graph_repos_data 13 | from app.source_graph.models import SourceGraphRepoData 14 | from app.uow import async_session_uow 15 | 16 | 17 | async def _create_dependencies_for_repo(session: AsyncSession, repo: Repo) -> None: 18 | """ 19 | Create dependencies for a repo. 20 | 21 | For each parsed dependency, creates a new record in the database, if such a 22 | dependency does not exist. 23 | Then, assigns the dependencies to the given repo. 24 | 25 | :param session: An asynchronous session object 26 | :param repo: A repo for which to create and assign the dependencies 27 | """ 28 | # Acquire the dependencies data for the repo 29 | logger.info( 30 | "Acquiring the dependencies data for the repo with id {repo_id}.", 31 | repo_id=repo.id, 32 | enqueue=True, 33 | ) 34 | try: 35 | ( 36 | revision, 37 | dependencies_create_data, 38 | ) = await acquire_dependencies_data_for_repository(repo) 39 | except RuntimeError: 40 | # If the parsing fails, 41 | # just skip creating the dependencies 42 | logger.error( 43 | "Failed to acquire the dependencies data for the repo with id {repo_id}.", 44 | repo_id=repo.id, 45 | enqueue=True, 46 | ) 47 | return 48 | if repo.last_checked_revision == revision: 49 | # If the repo has already been updated, 50 | # just skip creating the dependencies 51 | logger.info( 52 | "The repo with id {repo_id} has fresh dependencies.", 53 | repo_id=repo.id, 54 | enqueue=True, 55 | ) 56 | return 57 | if not dependencies_create_data: 58 | # If there are no dependencies, 59 | # just skip creating the dependencies 60 | logger.info( 61 | "The repo with id {repo_id} has no dependencies.", 62 | repo_id=repo.id, 63 | enqueue=True, 64 | ) 65 | return 66 | # Update the repo with the revision hash 67 | logger.info( 68 | "Updating the repo with id {repo_id} with the revision hash {revision}.", 69 | repo_id=repo.id, 70 | revision=revision, 71 | enqueue=True, 72 | ) 73 | update_repo_statement = ( 74 | sqlalchemy.update(Repo) 75 | .where(Repo.id == repo.id) 76 | .values(last_checked_revision=revision) 77 | ) 78 | await session.execute(update_repo_statement) 79 | # Create dependencies - on conflict do nothing. 80 | # This is to avoid creating duplicate dependencies. 81 | logger.info( 82 | "Creating the dependencies for the repo with id {repo_id}.", 83 | repo_id=repo.id, 84 | enqueue=True, 85 | ) 86 | insert_dependencies_statement = sqlalchemy.dialects.sqlite.insert( 87 | Dependency 88 | ).on_conflict_do_nothing(index_elements=[Dependency.name]) 89 | await session.execute( 90 | insert_dependencies_statement.returning(Dependency), 91 | [ 92 | { 93 | "name": dependency_data.name, 94 | } 95 | for dependency_data in dependencies_create_data 96 | ], 97 | ) 98 | # Re-fetch the dependencies from the database 99 | dependencies = ( 100 | await session.scalars( 101 | sqlalchemy.select(Dependency).where( 102 | Dependency.name.in_( 103 | [ 104 | dependency_data.name 105 | for dependency_data in dependencies_create_data 106 | ] 107 | ) 108 | ) 109 | ) 110 | ).all() 111 | # Add the dependencies to the repo 112 | insert_repo_dependencies_statement = sqlalchemy.dialects.sqlite.insert( 113 | RepoDependency 114 | ).on_conflict_do_nothing([RepoDependency.repo_id, RepoDependency.dependency_id]) 115 | await session.execute( 116 | insert_repo_dependencies_statement, 117 | [ 118 | { 119 | "repo_id": repo.id, 120 | "dependency_id": dependency.id, 121 | } 122 | for dependency in dependencies 123 | ], 124 | ) 125 | 126 | 127 | async def _save_scraped_repos_from_source_graph_repos_data( 128 | source_graph_repos_data: list[SourceGraphRepoData], 129 | ) -> None: 130 | """ 131 | Save the scraped repos from the source graph repos data. 132 | 133 | .. note:: 134 | This function is meant to be used in a task group. 135 | From the SQLAlchemy documentation: 136 | :: 137 | https://docs.sqlalchemy.org/en/20/orm/extensions/asyncio.html#using-asyncsession-with-concurrent-tasks 138 | 139 | The AsyncSession object is a mutable, stateful object 140 | which represents a single, stateful database 141 | transaction in progress. Using concurrent tasks with asyncio, 142 | with APIs such as asyncio.gather() for example, should use 143 | a separate AsyncSession per individual task. 144 | 145 | 146 | :param source_graph_repos_data: The source graph repos data. 147 | :return: None 148 | """ # noqa: E501 149 | async with async_session_maker() as session, async_session_uow(session): 150 | saved_repos = await create_or_update_repos_from_source_graph_repos_data( 151 | session=session, 152 | source_graph_repos_data=source_graph_repos_data, 153 | ) 154 | logger.info( 155 | "Saving {count} repos.", 156 | count=len(saved_repos), 157 | enqueue=True, 158 | ) 159 | await session.commit() 160 | 161 | 162 | async def scrape_source_graph_repos() -> None: 163 | """ 164 | Iterate over the source graph repos and create or update them in the database. 165 | 166 | :return: None 167 | """ 168 | async with AsyncSourceGraphSSEClient() as sg_client, asyncio.TaskGroup() as tg: 169 | logger.info( 170 | "Creating or updating repos from source graph repos data.", 171 | enqueue=True, 172 | ) 173 | async for sg_repos_data in sg_client.aiter_fastapi_repos(): 174 | logger.info( 175 | "Received {count} repos.", 176 | count=len(sg_repos_data), 177 | enqueue=True, 178 | ) 179 | tg.create_task( 180 | _save_scraped_repos_from_source_graph_repos_data( 181 | source_graph_repos_data=sg_repos_data 182 | ) 183 | ) 184 | 185 | 186 | async def parse_dependencies_for_repo(semaphore: asyncio.Semaphore, repo: Repo) -> None: 187 | """ 188 | Parse the dependencies for a given repo and create them in the database. 189 | 190 | .. note:: 191 | This function is meant to be used in a task group. 192 | From the SQLAlchemy documentation: 193 | :: 194 | https://docs.sqlalchemy.org/en/20/orm/extensions/asyncio.html#using-asyncsession-with-concurrent-tasks 195 | 196 | The AsyncSession object is a mutable, stateful object 197 | which represents a single, stateful database 198 | transaction in progress. Using concurrent tasks with asyncio, 199 | with APIs such as asyncio.gather() for example, should use 200 | a separate AsyncSession per individual task. 201 | 202 | 203 | :param semaphore: A semaphore to limit the number of concurrent requests 204 | :param repo: A repo for which to create and assign the dependencies 205 | :return: None 206 | """ # noqa: E501 207 | async with semaphore, async_session_maker() as session, async_session_uow(session): 208 | # Associate the repo object with a fresh session instance 209 | repo = await session.merge(repo) 210 | # Create the dependencies for the repo 211 | logger.info( 212 | "Creating the dependencies for the repo with id {repo_id}.", 213 | repo_id=repo.id, 214 | enqueue=True, 215 | ) 216 | await _create_dependencies_for_repo(session=session, repo=repo) 217 | await session.commit() 218 | 219 | 220 | async def parse_dependencies_for_repos() -> None: 221 | """ 222 | Parse the dependencies for all the repos in the database. 223 | 224 | :return: None. 225 | """ 226 | logger.info("Fetching the repos from the database.", enqueue=True) 227 | async with async_session_maker() as session: 228 | repos = ( 229 | await session.scalars( 230 | sqlalchemy.select(Repo).order_by( 231 | Repo.last_checked_revision.is_(None).desc() 232 | ) 233 | ) 234 | ).all() 235 | logger.info("Fetched {count} repos.", count=len(repos), enqueue=True) 236 | logger.info("Parsing the dependencies for the repos.", enqueue=True) 237 | semaphore = asyncio.Semaphore(10) 238 | async with asyncio.TaskGroup() as tg: 239 | for repo in repos: 240 | logger.info( 241 | "Parsing the dependencies for repo {repo_id}.", 242 | repo_id=repo.id, 243 | enqueue=True, 244 | ) 245 | tg.create_task(parse_dependencies_for_repo(semaphore=semaphore, repo=repo)) 246 | 247 | 248 | app = typer.Typer() 249 | 250 | 251 | @app.command() 252 | def scrape_repos() -> None: 253 | """ 254 | Scrape the FastAPI-related repositories utilizing the source graph API. 255 | 256 | :return: None 257 | """ 258 | logger.info("Scraping the source graph repos.", enqueue=True) 259 | asyncio.run(scrape_source_graph_repos()) 260 | 261 | 262 | @app.command() 263 | def parse_dependencies() -> None: 264 | """ 265 | Parse the dependencies for all the repos in the database. 266 | 267 | :return: None. 268 | """ 269 | logger.info( 270 | "Parsing the dependencies for all the repos in the database.", enqueue=True 271 | ) 272 | asyncio.run(parse_dependencies_for_repos()) 273 | 274 | 275 | if __name__ == "__main__": 276 | app() 277 | -------------------------------------------------------------------------------- /app/source_graph/__init__.py: -------------------------------------------------------------------------------- 1 | """Scraping module for the application.""" 2 | -------------------------------------------------------------------------------- /app/source_graph/client.py: -------------------------------------------------------------------------------- 1 | """The client for the SourceGraph API.""" 2 | import asyncio 3 | from collections.abc import AsyncGenerator, Mapping, MutableMapping 4 | from contextlib import asynccontextmanager 5 | from datetime import timedelta 6 | from types import TracebackType 7 | from typing import Any, Final, Self 8 | from urllib.parse import quote 9 | 10 | import httpx 11 | import stamina 12 | from httpx_sse import EventSource, ServerSentEvent, aconnect_sse 13 | from loguru import logger 14 | 15 | from app.source_graph.models import SourceGraphRepoData, SourceGraphRepoDataListAdapter 16 | 17 | #: The URL of the SourceGraph SSE API. 18 | SOURCE_GRAPH_STREAM_API_URL: Final[str] = "https://sourcegraph.com/.api/search/stream" 19 | 20 | 21 | #: The query parameters for the SourceGraph SSE API. 22 | FASTAPI_REPOS_QUERY_PARAMS: Final[Mapping[str, str]] = { 23 | "q": quote( 24 | " ".join( 25 | [ 26 | "repo:has.content(from fastapi import FastApi)", 27 | "type:repo", 28 | "visibility:public", 29 | "archived:no", 30 | "fork:no", 31 | ] 32 | ) 33 | ), 34 | } 35 | 36 | 37 | class AsyncSourceGraphSSEClient: 38 | """ 39 | A client for the SourceGraph SSE API. 40 | 41 | To learn more about the underlying API, see the ``SourceGraph SSE API`` 42 | https://docs.sourcegraph.com/api/stream_api#sourcegraph-stream-api 43 | """ 44 | 45 | def __init__(self: Self) -> None: 46 | """Initialize the client.""" 47 | self._last_event_id: str | None = None 48 | self._reconnection_delay: float = 0.0 49 | self._aclient: httpx.AsyncClient = httpx.AsyncClient() 50 | 51 | async def __aenter__(self: Self) -> Self: 52 | """Enter the async context manager.""" 53 | await self._aclient.__aenter__() 54 | return self 55 | 56 | async def __aexit__( 57 | self: Self, 58 | exc_type: type[BaseException] | None = None, 59 | exc_val: BaseException | None = None, 60 | exc_tb: TracebackType | None = None, 61 | ) -> None: 62 | """Exit the async context manager.""" 63 | return await self._aclient.__aexit__(exc_type, exc_val, exc_tb) 64 | 65 | @asynccontextmanager 66 | async def _aconnect_sse( 67 | self: Self, **kwargs: MutableMapping[str, Any] 68 | ) -> AsyncGenerator[EventSource, None]: 69 | """Connect to the SourceGraph SSE API.""" 70 | headers = kwargs.pop("headers", {}) 71 | if self._last_event_id is not None: 72 | headers["Last-Event-ID"] = self._last_event_id 73 | async with aconnect_sse( 74 | client=self._aclient, 75 | url=SOURCE_GRAPH_STREAM_API_URL, 76 | method="GET", 77 | headers=headers, 78 | **kwargs, 79 | ) as event_source: 80 | yield event_source 81 | 82 | async def _aiter_sse( 83 | self: Self, **kwargs: MutableMapping[str, Any] 84 | ) -> AsyncGenerator[ServerSentEvent, None]: 85 | """Iterate over the SourceGraph SSE API.""" 86 | async with self._aconnect_sse(**kwargs) as event_source: 87 | async for event in event_source.aiter_sse(): 88 | yield event 89 | 90 | async def _aiter_sse_with_retries( 91 | self: Self, **kwargs: MutableMapping[str, Any] 92 | ) -> AsyncGenerator[ServerSentEvent, None]: 93 | """Iterate over the SourceGraph SSE API with retries.""" 94 | async for attempt in stamina.retry_context( 95 | on=(httpx.ReadError, httpx.ReadTimeout) 96 | ): 97 | with attempt: 98 | await asyncio.sleep(self._reconnection_delay) 99 | async for event in self._aiter_sse(**kwargs): 100 | self._last_event_id = event.id 101 | if event.retry is not None: 102 | logger.error( 103 | "Received a retry event from the SourceGraph SSE API. " 104 | "Schedule a reconnection in {retry} milliseconds.", 105 | retry=event.retry, 106 | enqueue=True, 107 | ) 108 | self._reconnection_delay = timedelta( 109 | milliseconds=event.retry 110 | ).total_seconds() 111 | else: 112 | self._reconnection_delay = 0.0 113 | yield event 114 | 115 | async def aiter_fastapi_repos( 116 | self: Self, 117 | ) -> AsyncGenerator[list[SourceGraphRepoData], None]: 118 | """Iterate over the SourceGraph SSE API with retries.""" 119 | async for event in self._aiter_sse_with_retries( 120 | params=dict(FASTAPI_REPOS_QUERY_PARAMS) 121 | ): 122 | if event.event == "matches": 123 | yield SourceGraphRepoDataListAdapter.validate_python(event.json()) 124 | -------------------------------------------------------------------------------- /app/source_graph/factories.py: -------------------------------------------------------------------------------- 1 | """Factories for creating test data.""" 2 | from polyfactory.factories.pydantic_factory import ModelFactory 3 | from polyfactory.pytest_plugin import register_fixture 4 | 5 | from app.source_graph.models import SourceGraphRepoData 6 | 7 | 8 | @register_fixture 9 | class SourceGraphRepoDataFactory(ModelFactory[SourceGraphRepoData]): 10 | """Factory for creating SourceGraphRepoData.""" 11 | 12 | __model__ = SourceGraphRepoData 13 | -------------------------------------------------------------------------------- /app/source_graph/mapper.py: -------------------------------------------------------------------------------- 1 | """Mapper for source graph models to the database objects.""" 2 | from collections.abc import Sequence 3 | 4 | import sqlalchemy.dialects.sqlite 5 | from sqlalchemy.ext.asyncio import AsyncSession 6 | 7 | from app import database 8 | from app.source_graph.models import SourceGraphRepoData 9 | 10 | 11 | async def create_or_update_repos_from_source_graph_repos_data( 12 | session: AsyncSession, source_graph_repos_data: Sequence[SourceGraphRepoData] 13 | ) -> Sequence[database.Repo]: 14 | """ 15 | Create repos from source graph repos data. 16 | 17 | If any repos already exist, update them. 18 | 19 | :param session: The database session. 20 | :param source_graph_repos_data: The source graph repos data. 21 | """ 22 | insert_statement = sqlalchemy.dialects.sqlite.insert(database.Repo) 23 | update_statement = insert_statement.on_conflict_do_update( 24 | index_elements=[database.Repo.source_graph_repo_id], 25 | set_={ 26 | "url": insert_statement.excluded.url, 27 | "description": insert_statement.excluded.description, 28 | "stars": insert_statement.excluded.stars, 29 | "source_graph_repo_id": insert_statement.excluded.source_graph_repo_id, 30 | }, 31 | ) 32 | 33 | return ( 34 | await session.scalars( 35 | update_statement.returning(database.Repo), 36 | [ 37 | { 38 | "url": str(repo_data.repo_url), 39 | "description": repo_data.description, 40 | "stars": repo_data.stars, 41 | "source_graph_repo_id": repo_data.repo_id, 42 | } 43 | for repo_data in source_graph_repos_data 44 | ], 45 | ) 46 | ).all() 47 | -------------------------------------------------------------------------------- /app/source_graph/models.py: -------------------------------------------------------------------------------- 1 | """The models for the Source Graph data.""" 2 | import datetime 3 | from typing import Literal, Self 4 | 5 | from pydantic import ( 6 | BaseModel, 7 | Field, 8 | HttpUrl, 9 | NonNegativeInt, 10 | TypeAdapter, 11 | computed_field, 12 | ) 13 | 14 | from app.types import SourceGraphRepoId 15 | 16 | 17 | class SourceGraphRepoData(BaseModel): 18 | """The data of a repository.""" 19 | 20 | type: Literal["repo"] 21 | repo_id: SourceGraphRepoId = Field(..., alias="repositoryID") 22 | repo_handle: str = Field(..., alias="repository") 23 | stars: NonNegativeInt = Field(..., alias="repoStars") 24 | last_fetched_at: datetime.datetime = Field(..., alias="repoLastFetched") 25 | description: str = Field(default="") 26 | 27 | @computed_field # type: ignore[misc] 28 | @property 29 | def repo_url(self: Self) -> HttpUrl: 30 | """The URL of the repository.""" 31 | return TypeAdapter(HttpUrl).validate_python(f"https://{self.repo_handle}") 32 | 33 | 34 | #: The type adapter for the SourceGraphRepoData. 35 | SourceGraphRepoDataAdapter = TypeAdapter(SourceGraphRepoData) 36 | 37 | #: The type adapter for the SourceGraphRepoData list. 38 | SourceGraphRepoDataListAdapter = TypeAdapter(list[SourceGraphRepoData]) 39 | -------------------------------------------------------------------------------- /app/source_graph/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Test the scraping of the SourceGraph API.""" 2 | -------------------------------------------------------------------------------- /app/source_graph/tests/test_client.py: -------------------------------------------------------------------------------- 1 | """Test the client module for the source graph.""" 2 | from typing import Any 3 | 4 | import pytest 5 | from dirty_equals import HasLen, IsDatetime, IsInstance, IsPositiveInt 6 | from pydantic import Json, TypeAdapter 7 | 8 | from app.source_graph.models import SourceGraphRepoData 9 | 10 | 11 | @pytest.fixture() 12 | def source_graph_matched_repos_data() -> Json[Any]: 13 | """Return the sample data of the matched repositories.""" 14 | return [ 15 | { 16 | "type": "repo", 17 | "repositoryID": 55636527, 18 | "repository": "github.com/tiangolo/sqlmodel", 19 | "repoStars": 10277, 20 | "repoLastFetched": "2023-07-31T18:47:22.875731Z", 21 | "description": ( 22 | "SQL databases in Python, designed " 23 | "for simplicity, compatibility, " 24 | "and robustness." 25 | ), 26 | "metadata": { 27 | "fastapi": "null", 28 | "json": "null", 29 | "json-schema": "null", 30 | "pydantic": "null", 31 | "python": "null", 32 | "sql": "null", 33 | "sqlalchemy": "null", 34 | }, 35 | }, 36 | { 37 | "type": "repo", 38 | "repositoryID": 59434622, 39 | "repository": "github.com/reflex-dev/reflex", 40 | "repoStars": 10061, 41 | "repoLastFetched": "2023-07-31T08:58:42.692906Z", 42 | "description": "(Previously Pynecone) 🕸 Web apps in pure Python 🐍", 43 | }, 44 | { 45 | "type": "repo", 46 | "repositoryID": 42982149, 47 | "repository": "github.com/PaddlePaddle/PaddleNLP", 48 | "repoStars": 9804, 49 | "repoLastFetched": "2023-07-31T16:48:08.839209Z", 50 | "description": ( 51 | "👑 Easy-to-use and powerful NLP library with 🤗 " 52 | "Awesome model zoo, supporting wide-range of NLP tasks " 53 | "from research to industrial applications, including" 54 | " 🗂Text Classification, 🔍 Neural Search, ❓ Question " 55 | "Answering, ℹ️ Information Extraction, " 56 | "📄 Document Intelligence, 💌 Sentiment Analysis etc." 57 | ), 58 | "metadata": { 59 | "bert": "null", 60 | "embedding": "null", 61 | "ernie": "null", 62 | "information-extraction": "null", 63 | "neural-search": "null", 64 | "nlp": "null", 65 | "paddlenlp": "null", 66 | "pretrained-models": "null", 67 | "question-answering": "null", 68 | "search-engine": "null", 69 | "semantic-analysis": "null", 70 | "sentiment-analysis": "null", 71 | "seq2seq": "null", 72 | "transformer": "null", 73 | "transformers": "null", 74 | "uie": "null", 75 | }, 76 | }, 77 | { 78 | "type": "repo", 79 | "repositoryID": 36246068, 80 | "repository": "github.com/realpython/materials", 81 | "repoStars": 4359, 82 | "repoLastFetched": "2023-07-31T05:15:16.993896Z", 83 | }, 84 | ] 85 | 86 | 87 | def test_source_graph_repo_data(source_graph_matched_repos_data: Json[Any]) -> None: 88 | """Test the SourceGraphRepoData deserialization.""" 89 | assert source_graph_matched_repos_data == HasLen(4) 90 | _SourceGraphRepoDataListValidator = TypeAdapter(list[SourceGraphRepoData]) 91 | repos_parsed = _SourceGraphRepoDataListValidator.validate_python( 92 | source_graph_matched_repos_data 93 | ) 94 | assert repos_parsed == HasLen(4) 95 | assert all(repo == IsInstance[SourceGraphRepoData] for repo in repos_parsed) 96 | assert all( 97 | repo.repo_id == repo_data["repositoryID"] 98 | for repo, repo_data in zip( 99 | repos_parsed, source_graph_matched_repos_data, strict=True 100 | ) 101 | ) 102 | assert all( 103 | repo.repo_handle == repo_data["repository"] 104 | for repo, repo_data in zip( 105 | repos_parsed, source_graph_matched_repos_data, strict=True 106 | ) 107 | ) 108 | assert all( 109 | repo.stars == IsPositiveInt and repo.stars == repo_data["repoStars"] 110 | for repo, repo_data in zip( 111 | repos_parsed, source_graph_matched_repos_data, strict=True 112 | ) 113 | ) 114 | assert all( 115 | str(repo.repo_url) == f"https://{repo_data['repository']}" 116 | for repo, repo_data in zip( 117 | repos_parsed, source_graph_matched_repos_data, strict=True 118 | ) 119 | ) 120 | assert all(repo.last_fetched_at == IsDatetime for repo in repos_parsed) 121 | -------------------------------------------------------------------------------- /app/source_graph/tests/test_mapper.py: -------------------------------------------------------------------------------- 1 | """The tests for the source graph mapper to the database objects.""" 2 | 3 | import pytest 4 | import sqlalchemy 5 | from dirty_equals import IsInstance, IsList 6 | from sqlalchemy.ext.asyncio import AsyncSession 7 | 8 | from app import database 9 | from app.source_graph.factories import SourceGraphRepoDataFactory 10 | from app.source_graph.mapper import create_or_update_repos_from_source_graph_repos_data 11 | from app.source_graph.models import SourceGraphRepoData 12 | 13 | pytestmark = pytest.mark.anyio 14 | 15 | 16 | async def test_create_or_update_repos_from_source_graph_repos_data( 17 | db_session: AsyncSession, 18 | source_graph_repo_data_factory: SourceGraphRepoDataFactory, 19 | ) -> None: 20 | """Test creating repos from source graph repos data.""" 21 | source_graph_repo_data: list[ 22 | SourceGraphRepoData 23 | ] = source_graph_repo_data_factory.batch(5) 24 | repos = await create_or_update_repos_from_source_graph_repos_data( 25 | db_session, source_graph_repo_data 26 | ) 27 | assert repos == IsList(length=5) 28 | assert all(repo == IsInstance[database.Repo] for repo in repos) 29 | assert all(repo.id is not None for repo in repos) 30 | 31 | 32 | async def test_create_or_update_repos_from_source_graph_repos_data_update( 33 | some_repos: list[database.Repo], 34 | db_session: AsyncSession, 35 | source_graph_repo_data_factory: SourceGraphRepoDataFactory, 36 | ) -> None: 37 | """Test updating repos from source graph repos data.""" 38 | assert ( 39 | await db_session.execute( 40 | sqlalchemy.select(sqlalchemy.func.count(database.Repo.id)) 41 | ) 42 | ).scalar() == len(some_repos) 43 | source_graph_repos_data: list[ 44 | SourceGraphRepoData 45 | ] = source_graph_repo_data_factory.batch(len(some_repos)) 46 | source_graph_repos_data = [ 47 | SourceGraphRepoData( 48 | **( 49 | repo_data.model_dump(by_alias=True) 50 | | {"repositoryID": repo.source_graph_repo_id} 51 | ) 52 | ) 53 | for repo, repo_data in zip(some_repos, source_graph_repos_data, strict=True) 54 | ] 55 | repos = await create_or_update_repos_from_source_graph_repos_data( 56 | db_session, source_graph_repos_data 57 | ) 58 | assert repos == IsList(length=len(some_repos)) 59 | assert all(repo == IsInstance[database.Repo] for repo in repos) 60 | assert all(repo.id is not None for repo in repos) 61 | assert ( 62 | await db_session.execute( 63 | sqlalchemy.select(sqlalchemy.func.count(database.Repo.id)) 64 | ) 65 | ).scalar() == len(some_repos) 66 | -------------------------------------------------------------------------------- /app/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """The application tests.""" 2 | -------------------------------------------------------------------------------- /app/tests/test_database.py: -------------------------------------------------------------------------------- 1 | """Test the operations on the database models.""" 2 | import pytest 3 | import sqlalchemy as sa 4 | import sqlalchemy.orm 5 | from dirty_equals import IsList 6 | from sqlalchemy.ext.asyncio import AsyncSession 7 | 8 | from app import database 9 | from app.factories import DependencyCreateDataFactory 10 | from app.models import DependencyCreateData 11 | from app.source_graph.factories import SourceGraphRepoDataFactory 12 | from app.source_graph.models import SourceGraphRepoData 13 | 14 | pytestmark = pytest.mark.anyio 15 | 16 | 17 | def _assert_repo_properties( 18 | repo: database.Repo, source_graph_repo_data: SourceGraphRepoData 19 | ) -> bool: 20 | """Assert that the repo has the expected properties.""" 21 | assert repo.id is not None 22 | assert repo.url == str(source_graph_repo_data.repo_url) 23 | assert repo.description == source_graph_repo_data.description 24 | assert repo.stars == source_graph_repo_data.stars 25 | assert repo.source_graph_repo_id == source_graph_repo_data.repo_id 26 | return True 27 | 28 | 29 | async def test_create_repo_no_dependencies( 30 | db_session: AsyncSession, 31 | source_graph_repo_data_factory: SourceGraphRepoDataFactory, 32 | ) -> None: 33 | """Test creating a repo.""" 34 | source_graph_repo_data: SourceGraphRepoData = source_graph_repo_data_factory.build() 35 | repo = database.Repo( 36 | url=str(source_graph_repo_data.repo_url), 37 | description=source_graph_repo_data.description, 38 | stars=source_graph_repo_data.stars, 39 | source_graph_repo_id=source_graph_repo_data.repo_id, 40 | ) 41 | db_session.add(repo) 42 | await db_session.flush() 43 | await db_session.refresh(repo) 44 | _assert_repo_properties(repo, source_graph_repo_data) 45 | assert (await repo.awaitable_attrs.dependencies) == IsList(length=0) 46 | 47 | 48 | async def test_create_repo_with_dependencies( 49 | db_session: AsyncSession, 50 | source_graph_repo_data_factory: SourceGraphRepoDataFactory, 51 | dependency_create_data_factory: DependencyCreateDataFactory, 52 | ) -> None: 53 | """Test creating a repo with dependencies.""" 54 | source_graph_repo_data: SourceGraphRepoData = source_graph_repo_data_factory.build() 55 | dependencies_create_data: list[ 56 | DependencyCreateData 57 | ] = dependency_create_data_factory.batch(5) 58 | repo = database.Repo( 59 | url=str(source_graph_repo_data.repo_url), 60 | description=source_graph_repo_data.description, 61 | stars=source_graph_repo_data.stars, 62 | source_graph_repo_id=source_graph_repo_data.repo_id, 63 | dependencies=[ 64 | database.Dependency(**dependency_create_data.model_dump()) 65 | for dependency_create_data in dependencies_create_data 66 | ], 67 | ) 68 | db_session.add(repo) 69 | await db_session.flush() 70 | _assert_repo_properties(repo, source_graph_repo_data) 71 | repo_dependencies = await repo.awaitable_attrs.dependencies 72 | assert repo_dependencies == IsList(length=5) 73 | assert all( 74 | repo_dependency.name == dependency.name 75 | for repo_dependency, dependency in zip( 76 | repo_dependencies, dependencies_create_data, strict=True 77 | ) 78 | ) 79 | 80 | 81 | async def test_list_repositories( 82 | db_session: AsyncSession, 83 | some_repos: list[database.Repo], 84 | ) -> None: 85 | """Test listing repositories.""" 86 | repos_from_db_result = await db_session.execute( 87 | sa.select(database.Repo).options( 88 | sqlalchemy.orm.joinedload(database.Repo.dependencies) 89 | ) 90 | ) 91 | repos_from_db = repos_from_db_result.scalars().unique().all() 92 | assert repos_from_db == IsList(length=10) 93 | assert all( 94 | repo.id == repo_data.id 95 | and all( 96 | repo_dependency.name == dependency.name 97 | for repo_dependency, dependency in zip( 98 | repo.dependencies, repo_data.dependencies, strict=True 99 | ) 100 | ) 101 | for repo, repo_data in zip(repos_from_db, some_repos, strict=True) 102 | ) 103 | -------------------------------------------------------------------------------- /app/types.py: -------------------------------------------------------------------------------- 1 | """Type definitions for the application.""" 2 | from typing import NewType 3 | 4 | #: The ID of a repository from the database. 5 | RepoId = NewType("RepoId", int) 6 | #: The ID of a repository from the SourceGraph API. 7 | SourceGraphRepoId = NewType("SourceGraphRepoId", int) 8 | #: The ID of a dependency from the database. 9 | DependencyId = NewType("DependencyId", int) 10 | #: The revision hash of a repository. 11 | RevisionHash = NewType("RevisionHash", str) 12 | -------------------------------------------------------------------------------- /app/uow.py: -------------------------------------------------------------------------------- 1 | """ 2 | The Unit of Work pattern implementation. 3 | 4 | To learn more about the UoW, see: 5 | https://www.cosmicpython.com/book/chapter_06_uow.html 6 | """ 7 | from collections.abc import AsyncGenerator 8 | from contextlib import asynccontextmanager 9 | 10 | from sqlalchemy.ext.asyncio import AsyncSession 11 | 12 | 13 | @asynccontextmanager 14 | async def async_session_uow( 15 | session: AsyncSession, 16 | ) -> AsyncGenerator[AsyncSession, None]: 17 | """ 18 | Provide a transactional scope around a series of operations. 19 | 20 | :param session: The database session. 21 | :return: a UoW instance 22 | """ 23 | async with session.begin(): 24 | try: 25 | yield session 26 | finally: 27 | if session.in_transaction() and session.is_active: 28 | # session.is_active is True if this Session not in “partial rollback” 29 | # state. If this Session is within a transaction, and that transaction 30 | # has not been rolled back internally, the Session.is_active will also 31 | # be True. 32 | # https://docs.sqlalchemy.org/en/20/orm/extensions/asyncio.html#sqlalchemy.ext.asyncio.AsyncSession.is_active 33 | await session.rollback() 34 | -------------------------------------------------------------------------------- /db.sqlite3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kludex/awesome-fastapi-projects/9d303b5bd23360f141bd9e1f438757c4f82348c3/db.sqlite3 -------------------------------------------------------------------------------- /frontend/.env: -------------------------------------------------------------------------------- 1 | NEXT_PUBLIC_PROJECT_REPO_URL="https://github.com/kludex/awesome-fastapi-projects" 2 | -------------------------------------------------------------------------------- /frontend/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["next/core-web-vitals", "prettier"] 3 | } 4 | -------------------------------------------------------------------------------- /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | 27 | # local env files 28 | .env*.local 29 | 30 | # vercel 31 | .vercel 32 | 33 | # typescript 34 | *.tsbuildinfo 35 | next-env.d.ts 36 | -------------------------------------------------------------------------------- /frontend/.prettierignore: -------------------------------------------------------------------------------- 1 | out 2 | .next 3 | next-env.d.ts 4 | -------------------------------------------------------------------------------- /frontend/.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "arrowParens": "always", 3 | "bracketSameLine": false, 4 | "bracketSpacing": true, 5 | "semi": true, 6 | "singleQuote": false, 7 | "jsxSingleQuote": false, 8 | "quoteProps": "as-needed", 9 | "trailingComma": "all", 10 | "htmlWhitespaceSensitivity": "css", 11 | "vueIndentScriptAndStyle": false, 12 | "proseWrap": "preserve", 13 | "insertPragma": false, 14 | "printWidth": 80, 15 | "requirePragma": false, 16 | "tabWidth": 2, 17 | "useTabs": false, 18 | "embeddedLanguageFormatting": "auto" 19 | } 20 | -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). 2 | 3 | ## Getting Started 4 | 5 | First, run the development server: 6 | 7 | ```bash 8 | npm run dev 9 | # or 10 | yarn dev 11 | # or 12 | pnpm dev 13 | ``` 14 | 15 | Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. 16 | 17 | You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. 18 | 19 | This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font. 20 | 21 | ## Learn More 22 | 23 | To learn more about Next.js, take a look at the following resources: 24 | 25 | - [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. 26 | - [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. 27 | 28 | You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! 29 | 30 | ## Deploy on Vercel 31 | 32 | The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. 33 | 34 | Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. 35 | -------------------------------------------------------------------------------- /frontend/components.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://ui.shadcn.com/schema.json", 3 | "style": "default", 4 | "rsc": true, 5 | "tsx": true, 6 | "tailwind": { 7 | "config": "tailwind.config.ts", 8 | "css": "src/app/globals.css", 9 | "baseColor": "slate", 10 | "cssVariables": true 11 | }, 12 | "aliases": { 13 | "components": "@/components", 14 | "utils": "@/lib/utils" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /frontend/next.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('next').NextConfig} */ 2 | 3 | const pkg = require("./package.json"); 4 | 5 | // starts a command line process to get the git hash 6 | const commitHash = require("child_process") 7 | .execSync('git log --pretty=format:"%h" -n1') 8 | .toString() 9 | .trim(); 10 | 11 | const nextConfig = { 12 | output: "export", 13 | basePath: 14 | // TODO: Change to load from env variable 15 | process.env.NODE_ENV === "production" ? "/awesome-fastapi-projects" : "", 16 | env: { 17 | commitHash, 18 | frontendAppVersion: pkg.version, 19 | }, 20 | }; 21 | 22 | module.exports = nextConfig; 23 | -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "frontend", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "start": "next start", 9 | "lint": "next lint", 10 | "prettier:lint": "prettier --check .", 11 | "prettier:format": "prettier --write ." 12 | }, 13 | "dependencies": { 14 | "@hookform/resolvers": "^3.3.1", 15 | "@orama/orama": "^1.2.3", 16 | "@radix-ui/react-avatar": "^1.0.3", 17 | "@radix-ui/react-dialog": "^1.0.4", 18 | "@radix-ui/react-label": "^2.0.2", 19 | "@radix-ui/react-slot": "^1.0.2", 20 | "@tanstack/react-table": "^8.9.3", 21 | "@tanstack/react-virtual": "3.0.0-alpha.0", 22 | "@types/node": "20.5.1", 23 | "@types/react": "18.2.20", 24 | "@types/react-dom": "18.2.7", 25 | "autoprefixer": "10.4.15", 26 | "class-variance-authority": "^0.7.0", 27 | "clsx": "^2.0.0", 28 | "cmdk": "^0.2.0", 29 | "eslint": "8.47.0", 30 | "eslint-config-next": "13.4.18", 31 | "lucide-react": "^0.269.0", 32 | "next": "13.4.18", 33 | "postcss": "8.4.28", 34 | "react": "18.2.0", 35 | "react-dom": "18.2.0", 36 | "react-hook-form": "^7.46.1", 37 | "tailwind-merge": "^1.14.0", 38 | "tailwindcss": "3.3.3", 39 | "tailwindcss-animate": "^1.0.6", 40 | "typescript": "5.1.6", 41 | "zod": "^3.21.4" 42 | }, 43 | "devDependencies": { 44 | "eslint-config-prettier": "^9.0.0", 45 | "prettier": "^3.0.3" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /frontend/postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | }; 7 | -------------------------------------------------------------------------------- /frontend/public/next.svg: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /frontend/public/vercel.svg: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /frontend/src/app/columns.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { Badge } from "@/components/ui/badge"; 3 | import { Button } from "@/components/ui/button"; 4 | import { Repo } from "@/lib/schemas"; 5 | import { ColumnDef } from "@tanstack/react-table"; 6 | import { ArrowUpDown, MoreHorizontal } from "lucide-react"; 7 | 8 | export const columns: ColumnDef[] = [ 9 | { 10 | accessorKey: "url", 11 | header: function () { 12 | return ( 13 |
14 | Project 15 | 16 | 🚀 17 | 18 |
19 | ); 20 | }, 21 | cell: function ({ row }) { 22 | return ( 23 | 47 | ); 48 | }, 49 | }, 50 | { 51 | accessorKey: "description", 52 | header: function () { 53 | return ( 54 |
55 | Description 56 | 57 | ✍️ 58 | 59 |
60 | ); 61 | }, 62 | cell: function ({ row }) { 63 | return ( 64 |

65 | {row.getValue("description") || "No description"} 66 |

67 | ); 68 | }, 69 | }, 70 | { 71 | accessorKey: "stars", 72 | header: function ({ column }) { 73 | return ( 74 | 84 | ); 85 | }, 86 | cell: function ({ row }) { 87 | return ( 88 | 89 | {row.getValue("stars").toLocaleString("en-US")} 90 | 91 | ); 92 | }, 93 | }, 94 | ]; 95 | -------------------------------------------------------------------------------- /frontend/src/app/data-table.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import { 4 | ColumnDef, 5 | SortingState, 6 | flexRender, 7 | getCoreRowModel, 8 | getPaginationRowModel, 9 | getSortedRowModel, 10 | useReactTable, 11 | } from "@tanstack/react-table"; 12 | 13 | import { 14 | Table, 15 | TableBody, 16 | TableCell, 17 | TableHead, 18 | TableHeader, 19 | TableRow, 20 | } from "@/components/ui/table"; 21 | import { Button } from "@/components/ui/button"; 22 | import { useState } from "react"; 23 | 24 | interface DataTableProps { 25 | columns: ColumnDef[]; 26 | data: TData[]; 27 | } 28 | 29 | export function DataTable({ 30 | columns, 31 | data, 32 | }: DataTableProps) { 33 | const [sorting, setSorting] = useState([]); 34 | const table = useReactTable({ 35 | data, 36 | columns, 37 | getCoreRowModel: getCoreRowModel(), 38 | getPaginationRowModel: getPaginationRowModel(), 39 | onSortingChange: setSorting, 40 | getSortedRowModel: getSortedRowModel(), 41 | state: { 42 | sorting, 43 | }, 44 | }); 45 | 46 | return ( 47 |
48 |
49 | 50 | 51 | {table.getHeaderGroups().map((headerGroup) => ( 52 | 53 | {headerGroup.headers.map((header) => { 54 | return ( 55 | 56 | {header.isPlaceholder 57 | ? null 58 | : flexRender( 59 | header.column.columnDef.header, 60 | header.getContext(), 61 | )} 62 | 63 | ); 64 | })} 65 | 66 | ))} 67 | 68 | 69 | {table.getRowModel().rows?.length ? ( 70 | table.getRowModel().rows.map((row) => ( 71 | 75 | {row.getVisibleCells().map((cell) => ( 76 | 77 | {flexRender( 78 | cell.column.columnDef.cell, 79 | cell.getContext(), 80 | )} 81 | 82 | ))} 83 | 84 | )) 85 | ) : ( 86 | 87 | 91 | No results. 92 | 93 | 94 | )} 95 | 96 |
97 |
98 |
99 | 107 | 115 |
116 |
117 | ); 118 | } 119 | -------------------------------------------------------------------------------- /frontend/src/app/dependencies-search-provider.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { 3 | createDependenciesOrama, 4 | prepareDependenciesOramaIndex, 5 | DependenciesOramaContext, 6 | } from "@/lib/search"; 7 | import { PropsWithChildren } from "react"; 8 | import { SearchProvider } from "./search-provider"; 9 | import { DependenciesIndex } from "@/lib/schemas"; 10 | 11 | export function DependenciesSearchProvider({ 12 | children, 13 | dependencies, 14 | }: PropsWithChildren<{ 15 | dependencies: DependenciesIndex["dependencies"]; 16 | }>) { 17 | const prepareOramaIndex = async () => { 18 | const orama = await createDependenciesOrama(); 19 | await prepareDependenciesOramaIndex(orama, dependencies); 20 | return orama; 21 | }; 22 | 23 | return ( 24 | 28 | {children} 29 | 30 | ); 31 | } 32 | -------------------------------------------------------------------------------- /frontend/src/app/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kludex/awesome-fastapi-projects/9d303b5bd23360f141bd9e1f438757c4f82348c3/frontend/src/app/favicon.ico -------------------------------------------------------------------------------- /frontend/src/app/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | @layer base { 6 | :root { 7 | --background: 0 0% 100%; 8 | --foreground: 240 10% 3.9%; 9 | --card: 0 0% 100%; 10 | --card-foreground: 240 10% 3.9%; 11 | --popover: 0 0% 100%; 12 | --popover-foreground: 240 10% 3.9%; 13 | --primary: 142.1 76.2% 36.3%; 14 | --primary-foreground: 355.7 100% 97.3%; 15 | --secondary: 240 4.8% 95.9%; 16 | --secondary-foreground: 240 5.9% 10%; 17 | --muted: 240 4.8% 95.9%; 18 | --muted-foreground: 240 3.8% 46.1%; 19 | --accent: 240 4.8% 95.9%; 20 | --accent-foreground: 240 5.9% 10%; 21 | --destructive: 0 84.2% 60.2%; 22 | --destructive-foreground: 0 0% 98%; 23 | --border: 240 5.9% 90%; 24 | --input: 240 5.9% 90%; 25 | --ring: 142.1 76.2% 36.3%; 26 | --radius: 0.5rem; 27 | } 28 | 29 | .dark { 30 | --background: 20 14.3% 4.1%; 31 | --foreground: 0 0% 95%; 32 | --card: 24 9.8% 10%; 33 | --card-foreground: 0 0% 95%; 34 | --popover: 0 0% 9%; 35 | --popover-foreground: 0 0% 95%; 36 | --primary: 142.1 70.6% 45.3%; 37 | --primary-foreground: 144.9 80.4% 10%; 38 | --secondary: 240 3.7% 15.9%; 39 | --secondary-foreground: 0 0% 98%; 40 | --muted: 0 0% 15%; 41 | --muted-foreground: 240 5% 64.9%; 42 | --accent: 12 6.5% 15.1%; 43 | --accent-foreground: 0 0% 98%; 44 | --destructive: 0 62.8% 30.6%; 45 | --destructive-foreground: 0 85.7% 97.3%; 46 | --border: 240 3.7% 15.9%; 47 | --input: 240 3.7% 15.9%; 48 | --ring: 142.4 71.8% 29.2%; 49 | } 50 | } 51 | 52 | @layer base { 53 | * { 54 | @apply border-border; 55 | } 56 | body { 57 | @apply bg-background text-foreground; 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /frontend/src/app/layout.tsx: -------------------------------------------------------------------------------- 1 | import { cn } from "@/lib/utils"; 2 | import "./globals.css"; 3 | import type { Metadata } from "next"; 4 | import { Inter } from "next/font/google"; 5 | import { buttonVariants } from "@/components/ui/button"; 6 | import { Icons } from "@/components/ui/icons"; 7 | import Link from "next/link"; 8 | import { Card, CardHeader, CardTitle, CardContent } from "@/components/ui/card"; 9 | import { FolderGit, Rocket } from "lucide-react"; 10 | 11 | const inter = Inter({ subsets: ["latin"] }); 12 | 13 | export const metadata: Metadata = { 14 | title: "Awesome FastAPI projects", 15 | description: "An automatically generated list of awesome FastAPI projects", 16 | }; 17 | 18 | export default function RootLayout({ 19 | children, 20 | }: { 21 | children: React.ReactNode; 22 | }) { 23 | return ( 24 | 25 | 26 |
27 |
28 |
29 | 45 |
46 |
47 |
48 |
49 |

50 | Awesome FastAPI projects{" "} 51 | 52 | 🎉 53 | 54 |

55 | {children} 56 |
57 |
58 | 141 |
142 | 143 | 144 | ); 145 | } 146 | // TODO: improve pagination - sync with the URL, add a "go to page" input 147 | // TODO: refactor the layout and the components 148 | -------------------------------------------------------------------------------- /frontend/src/app/page.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | loadDependenciesIndexServerOnly, 3 | loadReposIndexServerOnly, 4 | } from "@/lib/indexes"; 5 | import { ReposTable } from "./repos-table"; 6 | import { ReposSearchProvider } from "./repos-search-provider"; 7 | import { DependenciesSearchProvider } from "./dependencies-search-provider"; 8 | 9 | export default async function Home() { 10 | const { repos } = await loadReposIndexServerOnly(); 11 | const { dependencies } = await loadDependenciesIndexServerOnly(); 12 | // refactor repos and dependencies to be loaded from the context 13 | return ( 14 |
15 | 16 | 17 | 18 | 19 | 20 |
21 | ); 22 | } 23 | -------------------------------------------------------------------------------- /frontend/src/app/repos-search-provider.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { 3 | createReposOrama, 4 | prepareReposOramaIndex, 5 | ReposOramaContext, 6 | } from "@/lib/search"; 7 | import { PropsWithChildren } from "react"; 8 | import { SearchProvider } from "./search-provider"; 9 | import { RepoIndex } from "@/lib/schemas"; 10 | 11 | export function ReposSearchProvider({ 12 | children, 13 | repos, 14 | }: PropsWithChildren<{ 15 | repos: RepoIndex["repos"]; 16 | }>) { 17 | const prepareOramaIndex = async () => { 18 | const orama = await createReposOrama(); 19 | await prepareReposOramaIndex(orama, repos); 20 | return orama; 21 | }; 22 | 23 | return ( 24 | 28 | {children} 29 | 30 | ); 31 | } 32 | -------------------------------------------------------------------------------- /frontend/src/app/repos-table.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { Repo, Dependency } from "@/lib/schemas"; 3 | import { search } from "@orama/orama"; 4 | import { SearchForm } from "./search-form"; 5 | import { columns } from "./columns"; 6 | import { DataTable } from "./data-table"; 7 | import { useReposOrama } from "@/lib/search"; 8 | import { useState } from "react"; 9 | import { useQuerySearchFormData } from "@/lib/hooks"; 10 | import React from "react"; 11 | 12 | export function ReposTable({ 13 | repos, 14 | dependencies, 15 | }: { 16 | repos: Repo[]; 17 | dependencies: Dependency[]; 18 | }) { 19 | const reposOrama = useReposOrama(); 20 | const [searchedRepos, setSearchedRepos] = useState(repos); 21 | const { searchQueryFromQueryParam, dependenciesQueryFromQueryParam } = 22 | useQuerySearchFormData(dependencies); 23 | 24 | const onSearchSubmit = React.useCallback( 25 | async ({ 26 | search: description, 27 | dependencies, 28 | }: { 29 | search: string; 30 | dependencies: Dependency[]; 31 | }) => { 32 | if (!reposOrama.isIndexed || !reposOrama.orama) { 33 | throw new Error("Repos Orama is not initialized"); 34 | } 35 | const results = await search(reposOrama.orama, { 36 | term: description, 37 | properties: ["description"], 38 | limit: repos.length, 39 | }); 40 | const searchedRepos = results.hits.map((hit) => hit.document as Repo); 41 | // Workaround because Orama doesn't support filtering by properties of objects in arrays 42 | const filteredRepos = searchedRepos.filter((repo) => { 43 | return dependencies.every((dependency) => { 44 | return repo.dependencies.some( 45 | (repoDependency) => repoDependency.id === dependency.id, 46 | ); 47 | }); 48 | }); 49 | setSearchedRepos(filteredRepos); 50 | }, 51 | [repos, reposOrama.isIndexed, reposOrama.orama], 52 | ); 53 | 54 | const _ref = React.useCallback( 55 | (node: HTMLDivElement | null) => { 56 | if (node !== null) { 57 | if (reposOrama.isIndexed && reposOrama.orama) { 58 | onSearchSubmit({ 59 | search: searchQueryFromQueryParam(), 60 | dependencies: dependenciesQueryFromQueryParam(), 61 | }); 62 | } 63 | } 64 | }, 65 | [ 66 | dependenciesQueryFromQueryParam, 67 | onSearchSubmit, 68 | reposOrama.isIndexed, 69 | reposOrama.orama, 70 | searchQueryFromQueryParam, 71 | ], 72 | ); 73 | 74 | return ( 75 | <> 76 |
77 | 78 |
79 | 80 | 81 | ); 82 | } 83 | -------------------------------------------------------------------------------- /frontend/src/app/search-form.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { zodResolver } from "@hookform/resolvers/zod"; 3 | import { useForm } from "react-hook-form"; 4 | import * as z from "zod"; 5 | 6 | import { Button } from "@/components/ui/button"; 7 | import { 8 | Form, 9 | FormControl, 10 | FormDescription, 11 | FormField, 12 | FormItem, 13 | FormLabel, 14 | FormMessage, 15 | } from "@/components/ui/form"; 16 | import { Input } from "@/components/ui/input"; 17 | import { MultiSelect } from "@/components/ui/multiselect"; 18 | import { Dependency, dependencySchema } from "@/lib/schemas"; 19 | import { useSearchParams, useRouter, usePathname } from "next/navigation"; 20 | import React from "react"; 21 | import { useQuerySearchFormData } from "@/lib/hooks"; 22 | 23 | const FormSchema = z.object({ 24 | search: z 25 | .string() 26 | .min(0) 27 | .max(256, { message: "Search must be less than 256 characters" }) 28 | .default(""), 29 | dependencies: z.array(dependencySchema).default(() => []), 30 | }); 31 | 32 | export interface SearchFormProps { 33 | onSubmit: (data: z.infer) => void; 34 | dependencies: Dependency[]; 35 | } 36 | 37 | export function SearchForm({ onSubmit, dependencies }: SearchFormProps) { 38 | const router = useRouter(); 39 | const pathname = usePathname(); 40 | const searchParams = useSearchParams(); 41 | 42 | const { 43 | searchQueryFromQueryParam, 44 | searchQueryToQueryParam, 45 | dependenciesQueryFromQueryParam, 46 | dependenciesQueryToQueryParam, 47 | } = useQuerySearchFormData(dependencies); 48 | 49 | const form = useForm>({ 50 | resolver: zodResolver(FormSchema), 51 | defaultValues: { 52 | search: searchQueryFromQueryParam(), 53 | dependencies: dependenciesQueryFromQueryParam(), 54 | }, 55 | }); 56 | 57 | const createQueryString = React.useCallback( 58 | ({ 59 | searchQueryValue, 60 | dependenciesQueryValue, 61 | }: { 62 | searchQueryValue: string; 63 | dependenciesQueryValue: Dependency[]; 64 | }) => { 65 | const params = new URLSearchParams(searchParams); 66 | params.set("search", searchQueryToQueryParam(searchQueryValue)); 67 | params.set( 68 | "dependencies", 69 | dependenciesQueryToQueryParam(dependenciesQueryValue), 70 | ); 71 | 72 | return params.toString(); 73 | }, 74 | [dependenciesQueryToQueryParam, searchParams, searchQueryToQueryParam], 75 | ); 76 | 77 | const onSubmitWrapper = (data: z.infer) => { 78 | onSubmit(data); 79 | // update URL search params 80 | const queryString = createQueryString({ 81 | searchQueryValue: data.search, 82 | dependenciesQueryValue: data.dependencies, 83 | }); 84 | router.replace(`${pathname}?${queryString}`); 85 | }; 86 | return ( 87 |
88 | 89 | ( 93 | 94 | Search for a repository 95 | 96 | 97 | 98 | 99 | The search is performed on the repository description. 100 | 101 | 102 | 103 | )} 104 | /> 105 | ( 109 | 110 | Dependencies 111 | 112 | 113 | 114 | 115 | Filter by dependencies used in the repository. 116 | 117 | 118 | 119 | )} 120 | /> 121 | 122 | 123 | 124 | ); 125 | } 126 | -------------------------------------------------------------------------------- /frontend/src/app/search-provider.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { IOramaContext } from "@/lib/search"; 3 | import { Context, PropsWithChildren, useEffect, useState } from "react"; 4 | 5 | import { Orama, ProvidedTypes as OramaProvidedTypes } from "@orama/orama"; 6 | 7 | export type SearchProviderProps< 8 | OramaParameters extends Partial = any, 9 | > = PropsWithChildren<{ 10 | OramaContext: Context>; 11 | createIndex: () => Promise>; 12 | }>; 13 | 14 | export function SearchProvider< 15 | OramaParameters extends Partial, 16 | >({ 17 | children, 18 | OramaContext, 19 | createIndex, 20 | }: SearchProviderProps) { 21 | const [orama, setOrama] = useState | null>(null); 22 | const [isIndexed, setIsIndexed] = useState(false); 23 | 24 | useEffect(() => { 25 | async function initializeOrama() { 26 | setIsIndexed(false); 27 | await createIndex().then(setOrama); 28 | setIsIndexed(true); 29 | } 30 | initializeOrama(); 31 | }, [createIndex]); 32 | 33 | return ( 34 | 35 | {children} 36 | 37 | ); 38 | } 39 | -------------------------------------------------------------------------------- /frontend/src/components/ui/badge.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react"; 2 | import { cva, type VariantProps } from "class-variance-authority"; 3 | 4 | import { cn } from "@/lib/utils"; 5 | 6 | const badgeVariants = cva( 7 | "inline-flex items-center rounded-full border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2", 8 | { 9 | variants: { 10 | variant: { 11 | default: 12 | "border-transparent bg-primary text-primary-foreground hover:bg-primary/80", 13 | secondary: 14 | "border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80", 15 | destructive: 16 | "border-transparent bg-destructive text-destructive-foreground hover:bg-destructive/80", 17 | outline: "text-foreground", 18 | }, 19 | }, 20 | defaultVariants: { 21 | variant: "default", 22 | }, 23 | }, 24 | ); 25 | 26 | export interface BadgeProps 27 | extends React.HTMLAttributes, 28 | VariantProps {} 29 | 30 | function Badge({ className, variant, ...props }: BadgeProps) { 31 | return ( 32 |
33 | ); 34 | } 35 | 36 | export { Badge, badgeVariants }; 37 | -------------------------------------------------------------------------------- /frontend/src/components/ui/button.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react"; 2 | import { Slot } from "@radix-ui/react-slot"; 3 | import { cva, type VariantProps } from "class-variance-authority"; 4 | 5 | import { cn } from "@/lib/utils"; 6 | 7 | const buttonVariants = cva( 8 | "inline-flex items-center justify-center rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50", 9 | { 10 | variants: { 11 | variant: { 12 | default: "bg-primary text-primary-foreground hover:bg-primary/90", 13 | destructive: 14 | "bg-destructive text-destructive-foreground hover:bg-destructive/90", 15 | outline: 16 | "border border-input bg-background hover:bg-accent hover:text-accent-foreground", 17 | secondary: 18 | "bg-secondary text-secondary-foreground hover:bg-secondary/80", 19 | ghost: "hover:bg-accent hover:text-accent-foreground", 20 | link: "text-primary underline-offset-4 hover:underline", 21 | }, 22 | size: { 23 | default: "h-10 px-4 py-2", 24 | sm: "h-9 rounded-md px-3", 25 | lg: "h-11 rounded-md px-8", 26 | icon: "h-10 w-10", 27 | }, 28 | }, 29 | defaultVariants: { 30 | variant: "default", 31 | size: "default", 32 | }, 33 | }, 34 | ); 35 | 36 | export interface ButtonProps 37 | extends React.ButtonHTMLAttributes, 38 | VariantProps { 39 | asChild?: boolean; 40 | } 41 | 42 | const Button = React.forwardRef( 43 | ({ className, variant, size, asChild = false, ...props }, ref) => { 44 | const Comp = asChild ? Slot : "button"; 45 | return ( 46 | 51 | ); 52 | }, 53 | ); 54 | Button.displayName = "Button"; 55 | 56 | export { Button, buttonVariants }; 57 | -------------------------------------------------------------------------------- /frontend/src/components/ui/card.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react"; 2 | 3 | import { cn } from "@/lib/utils"; 4 | 5 | const Card = React.forwardRef< 6 | HTMLDivElement, 7 | React.HTMLAttributes 8 | >(({ className, ...props }, ref) => ( 9 |
17 | )); 18 | Card.displayName = "Card"; 19 | 20 | const CardHeader = React.forwardRef< 21 | HTMLDivElement, 22 | React.HTMLAttributes 23 | >(({ className, ...props }, ref) => ( 24 |
29 | )); 30 | CardHeader.displayName = "CardHeader"; 31 | 32 | const CardTitle = React.forwardRef< 33 | HTMLParagraphElement, 34 | React.HTMLAttributes 35 | >(({ className, ...props }, ref) => ( 36 |

44 | )); 45 | CardTitle.displayName = "CardTitle"; 46 | 47 | const CardDescription = React.forwardRef< 48 | HTMLParagraphElement, 49 | React.HTMLAttributes 50 | >(({ className, ...props }, ref) => ( 51 |

56 | )); 57 | CardDescription.displayName = "CardDescription"; 58 | 59 | const CardContent = React.forwardRef< 60 | HTMLDivElement, 61 | React.HTMLAttributes 62 | >(({ className, ...props }, ref) => ( 63 |

64 | )); 65 | CardContent.displayName = "CardContent"; 66 | 67 | const CardFooter = React.forwardRef< 68 | HTMLDivElement, 69 | React.HTMLAttributes 70 | >(({ className, ...props }, ref) => ( 71 |
76 | )); 77 | CardFooter.displayName = "CardFooter"; 78 | 79 | export { 80 | Card, 81 | CardHeader, 82 | CardFooter, 83 | CardTitle, 84 | CardDescription, 85 | CardContent, 86 | }; 87 | -------------------------------------------------------------------------------- /frontend/src/components/ui/command.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import * as React from "react"; 4 | import { DialogProps } from "@radix-ui/react-dialog"; 5 | import { Command as CommandPrimitive } from "cmdk"; 6 | import { Search } from "lucide-react"; 7 | 8 | import { cn } from "@/lib/utils"; 9 | import { Dialog, DialogContent } from "@/components/ui/dialog"; 10 | 11 | const Command = React.forwardRef< 12 | React.ElementRef, 13 | React.ComponentPropsWithoutRef 14 | >(({ className, ...props }, ref) => ( 15 | 23 | )); 24 | Command.displayName = CommandPrimitive.displayName; 25 | 26 | interface CommandDialogProps extends DialogProps {} 27 | 28 | const CommandDialog = ({ children, ...props }: CommandDialogProps) => { 29 | return ( 30 | 31 | 32 | 33 | {children} 34 | 35 | 36 | 37 | ); 38 | }; 39 | 40 | const CommandInput = React.forwardRef< 41 | React.ElementRef, 42 | React.ComponentPropsWithoutRef 43 | >(({ className, ...props }, ref) => ( 44 |
45 | 46 | 54 |
55 | )); 56 | 57 | CommandInput.displayName = CommandPrimitive.Input.displayName; 58 | 59 | const CommandList = React.forwardRef< 60 | React.ElementRef, 61 | React.ComponentPropsWithoutRef 62 | >(({ className, ...props }, ref) => ( 63 | 68 | )); 69 | 70 | CommandList.displayName = CommandPrimitive.List.displayName; 71 | 72 | const CommandEmpty = React.forwardRef< 73 | React.ElementRef, 74 | React.ComponentPropsWithoutRef 75 | >((props, ref) => ( 76 | 81 | )); 82 | 83 | CommandEmpty.displayName = CommandPrimitive.Empty.displayName; 84 | 85 | const CommandGroup = React.forwardRef< 86 | React.ElementRef, 87 | React.ComponentPropsWithoutRef 88 | >(({ className, ...props }, ref) => ( 89 | 97 | )); 98 | 99 | CommandGroup.displayName = CommandPrimitive.Group.displayName; 100 | 101 | const CommandSeparator = React.forwardRef< 102 | React.ElementRef, 103 | React.ComponentPropsWithoutRef 104 | >(({ className, ...props }, ref) => ( 105 | 110 | )); 111 | CommandSeparator.displayName = CommandPrimitive.Separator.displayName; 112 | 113 | const CommandItem = React.forwardRef< 114 | React.ElementRef, 115 | React.ComponentPropsWithoutRef 116 | >(({ className, ...props }, ref) => ( 117 | 125 | )); 126 | 127 | CommandItem.displayName = CommandPrimitive.Item.displayName; 128 | 129 | const CommandShortcut = ({ 130 | className, 131 | ...props 132 | }: React.HTMLAttributes) => { 133 | return ( 134 | 141 | ); 142 | }; 143 | CommandShortcut.displayName = "CommandShortcut"; 144 | 145 | export { 146 | Command, 147 | CommandDialog, 148 | CommandInput, 149 | CommandList, 150 | CommandEmpty, 151 | CommandGroup, 152 | CommandItem, 153 | CommandShortcut, 154 | CommandSeparator, 155 | }; 156 | -------------------------------------------------------------------------------- /frontend/src/components/ui/dialog.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import * as React from "react"; 4 | import * as DialogPrimitive from "@radix-ui/react-dialog"; 5 | import { X } from "lucide-react"; 6 | 7 | import { cn } from "@/lib/utils"; 8 | 9 | const Dialog = DialogPrimitive.Root; 10 | 11 | const DialogTrigger = DialogPrimitive.Trigger; 12 | 13 | const DialogPortal = ({ 14 | className, 15 | ...props 16 | }: DialogPrimitive.DialogPortalProps) => ( 17 | 18 | ); 19 | DialogPortal.displayName = DialogPrimitive.Portal.displayName; 20 | 21 | const DialogOverlay = React.forwardRef< 22 | React.ElementRef, 23 | React.ComponentPropsWithoutRef 24 | >(({ className, ...props }, ref) => ( 25 | 33 | )); 34 | DialogOverlay.displayName = DialogPrimitive.Overlay.displayName; 35 | 36 | const DialogContent = React.forwardRef< 37 | React.ElementRef, 38 | React.ComponentPropsWithoutRef 39 | >(({ className, children, ...props }, ref) => ( 40 | 41 | 42 | 50 | {children} 51 | 52 | 53 | Close 54 | 55 | 56 | 57 | )); 58 | DialogContent.displayName = DialogPrimitive.Content.displayName; 59 | 60 | const DialogHeader = ({ 61 | className, 62 | ...props 63 | }: React.HTMLAttributes) => ( 64 |
71 | ); 72 | DialogHeader.displayName = "DialogHeader"; 73 | 74 | const DialogFooter = ({ 75 | className, 76 | ...props 77 | }: React.HTMLAttributes) => ( 78 |
85 | ); 86 | DialogFooter.displayName = "DialogFooter"; 87 | 88 | const DialogTitle = React.forwardRef< 89 | React.ElementRef, 90 | React.ComponentPropsWithoutRef 91 | >(({ className, ...props }, ref) => ( 92 | 100 | )); 101 | DialogTitle.displayName = DialogPrimitive.Title.displayName; 102 | 103 | const DialogDescription = React.forwardRef< 104 | React.ElementRef, 105 | React.ComponentPropsWithoutRef 106 | >(({ className, ...props }, ref) => ( 107 | 112 | )); 113 | DialogDescription.displayName = DialogPrimitive.Description.displayName; 114 | 115 | export { 116 | Dialog, 117 | DialogTrigger, 118 | DialogContent, 119 | DialogHeader, 120 | DialogFooter, 121 | DialogTitle, 122 | DialogDescription, 123 | }; 124 | -------------------------------------------------------------------------------- /frontend/src/components/ui/form.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react"; 2 | import * as LabelPrimitive from "@radix-ui/react-label"; 3 | import { Slot } from "@radix-ui/react-slot"; 4 | import { 5 | Controller, 6 | ControllerProps, 7 | FieldPath, 8 | FieldValues, 9 | FormProvider, 10 | useFormContext, 11 | } from "react-hook-form"; 12 | 13 | import { cn } from "@/lib/utils"; 14 | import { Label } from "@/components/ui/label"; 15 | 16 | const Form = FormProvider; 17 | 18 | type FormFieldContextValue< 19 | TFieldValues extends FieldValues = FieldValues, 20 | TName extends FieldPath = FieldPath, 21 | > = { 22 | name: TName; 23 | }; 24 | 25 | const FormFieldContext = React.createContext( 26 | {} as FormFieldContextValue, 27 | ); 28 | 29 | const FormField = < 30 | TFieldValues extends FieldValues = FieldValues, 31 | TName extends FieldPath = FieldPath, 32 | >({ 33 | ...props 34 | }: ControllerProps) => { 35 | return ( 36 | 37 | 38 | 39 | ); 40 | }; 41 | 42 | const useFormField = () => { 43 | const fieldContext = React.useContext(FormFieldContext); 44 | const itemContext = React.useContext(FormItemContext); 45 | const { getFieldState, formState } = useFormContext(); 46 | 47 | const fieldState = getFieldState(fieldContext.name, formState); 48 | 49 | if (!fieldContext) { 50 | throw new Error("useFormField should be used within "); 51 | } 52 | 53 | const { id } = itemContext; 54 | 55 | return { 56 | id, 57 | name: fieldContext.name, 58 | formItemId: `${id}-form-item`, 59 | formDescriptionId: `${id}-form-item-description`, 60 | formMessageId: `${id}-form-item-message`, 61 | ...fieldState, 62 | }; 63 | }; 64 | 65 | type FormItemContextValue = { 66 | id: string; 67 | }; 68 | 69 | const FormItemContext = React.createContext( 70 | {} as FormItemContextValue, 71 | ); 72 | 73 | const FormItem = React.forwardRef< 74 | HTMLDivElement, 75 | React.HTMLAttributes 76 | >(({ className, ...props }, ref) => { 77 | const id = React.useId(); 78 | 79 | return ( 80 | 81 |
82 | 83 | ); 84 | }); 85 | FormItem.displayName = "FormItem"; 86 | 87 | const FormLabel = React.forwardRef< 88 | React.ElementRef, 89 | React.ComponentPropsWithoutRef 90 | >(({ className, ...props }, ref) => { 91 | const { error, formItemId } = useFormField(); 92 | 93 | return ( 94 |