├── .docker ├── docker-compose.yml └── entrypoint.sh ├── .gitattributes ├── .github └── workflows │ ├── ci.yml │ └── ghcr.yml ├── .gitignore ├── CHANGELOG.md ├── Dockerfile ├── Dockerfile.dev ├── README.md ├── babel.cfg ├── config └── settings.toml ├── crashserver ├── __init__.py ├── cli.py ├── config.py ├── migrations │ ├── README │ ├── alembic.ini │ ├── env.py │ ├── script.py.mako │ └── versions │ │ ├── 20211211_001700_initial_database.py │ │ ├── 20211211_002700_add_upload_ip.py │ │ ├── 20220114_103500_storage_table.py │ │ └── 20230108_054000_symcache_remove.py ├── server │ ├── __init__.py │ ├── controllers │ │ ├── __init__.py │ │ ├── auth.py │ │ ├── crash_upload_api.py │ │ ├── sym_upload_v1.py │ │ ├── sym_upload_v2.py │ │ ├── webapi.py │ │ └── webviews.py │ ├── core │ │ ├── __init__.py │ │ └── extensions.py │ ├── forms.py │ ├── helpers │ │ ├── __init__.py │ │ └── crash_upload.py │ ├── jobs.py │ ├── models │ │ ├── __init__.py │ │ ├── annotation.py │ │ ├── attachments.py │ │ ├── build_metadata.py │ │ ├── minidump.py │ │ ├── project.py │ │ ├── storage.py │ │ ├── symbol.py │ │ ├── symbol_upload.py │ │ └── user.py │ └── storage │ │ ├── __init__.py │ │ ├── backend.py │ │ ├── loader.py │ │ ├── modules │ │ ├── __init__.py │ │ ├── filesystem.py │ │ └── s3.py │ │ ├── storage_factory.py │ │ └── storage_target.py ├── syscheck.py └── utility │ ├── __init__.py │ ├── decorators.py │ ├── hostinfo.py │ ├── misc.py │ ├── processor.py │ └── sysinfo.py ├── main-rq.py ├── main.py ├── poetry.lock ├── pyproject.toml ├── res ├── assets │ ├── crashserver_banner.png │ └── crashserver_logo.svg ├── bin │ └── linux │ │ ├── README.md │ │ ├── dump_syms │ │ ├── minidump_stackwalk │ │ └── stackwalker ├── static │ ├── css │ │ ├── app.css │ │ ├── fa-all.css │ │ └── prism.css │ ├── img │ │ ├── apple-touch-icon.png │ │ └── favicon-32x32.png │ ├── js │ │ ├── app.js │ │ └── prism.js │ └── webfonts │ │ ├── fa-brands-400.eot │ │ ├── fa-brands-400.svg │ │ ├── fa-brands-400.ttf │ │ ├── fa-brands-400.woff │ │ ├── fa-brands-400.woff2 │ │ ├── fa-regular-400.eot │ │ ├── fa-regular-400.svg │ │ ├── fa-regular-400.ttf │ │ ├── fa-regular-400.woff │ │ ├── fa-regular-400.woff2 │ │ ├── fa-solid-900.eot │ │ ├── fa-solid-900.svg │ │ ├── fa-solid-900.ttf │ │ ├── fa-solid-900.woff │ │ └── fa-solid-900.woff2 └── templates │ ├── app │ ├── create.html │ ├── dashboard.html │ ├── home.html │ ├── settings.html │ ├── settings.macros.html │ └── upload.html │ ├── auth │ └── login.html │ ├── base │ ├── base.html │ ├── flash.html │ ├── navbar.html │ └── sidebar.html │ ├── crash │ ├── crash.html │ └── crash_detail.html │ ├── errors │ ├── 404.html │ └── 500.html │ └── symbols │ ├── symbol-list-no-syms.html │ ├── symbol-list-simple.html │ ├── symbol-list-versioned.html │ ├── symbol-upload.html │ └── symbols.html └── tests ├── __init__.py ├── conftest.py ├── functional ├── test_auth.py ├── test_storage.py └── test_webviews.py └── util.py /.docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | # Development docker-compose.yml 2 | # Run `docker-compose up -d` in the directory, and the following things will happen 3 | # 1. A postgres and redis container will be created. These are required backend services. 4 | # 2. Two containers will be created based on the `Dockerfile.dev` in the root of the project. 5 | # This dockerfile allow the source code in the root of the project to be used with the rest 6 | # of the docker environment. You can modify the source, and since the flask development 7 | # server is used, the changes will be updated immediately. 8 | # 3. A folder called `data` will be created in this directory. All appdata and log output 9 | # will be stored within this directory 10 | 11 | version: "3.0" 12 | 13 | networks: 14 | crashnet: 15 | driver: bridge 16 | 17 | services: 18 | redis: 19 | image: bitnami/redis:7.0 20 | restart: always 21 | networks: [crashnet] 22 | ports: ["6379:6379"] 23 | environment: 24 | REDIS_PASSWORD: ${REDIS_PASSWORD:-password} 25 | 26 | postgres: 27 | image: postgres 28 | restart: always 29 | networks: [crashnet] 30 | volumes: [db_data:/var/lib/postgresql/data] 31 | ports: ["5432:5432"] 32 | environment: 33 | POSTGRES_DB: ${POSTGRES_DB:-crashserver} 34 | POSTGRES_USER: ${POSTGRES_USER:-postgres} 35 | POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} 36 | 37 | minio: 38 | image: minio/minio 39 | networks: [crashnet] 40 | restart: always 41 | command: ["server", "/data", "--console-address", ":9001"] 42 | environment: 43 | MINIO_ACCESS_KEY: cs-access-key 44 | MINIO_SECRET_KEY: cs-secret-key 45 | ports: ["9000:9000", "9001:9001"] 46 | volumes: [minio_data:/data] 47 | 48 | crashserver-app: 49 | build: 50 | context: .. 51 | dockerfile: Dockerfile.dev 52 | restart: always 53 | networks: [crashnet] 54 | depends_on: [postgres, crashserver-worker, redis] 55 | ports: ["8888:8888"] 56 | volumes: 57 | - ../:/app/ 58 | 59 | # App Data 60 | - ./logs:/logs 61 | - ./storage:/storage 62 | environment: 63 | FLASK_APP: crashserver/server:create_app 64 | FLASK_RUN_HOST: 0.0.0.0 65 | FLASK_RUN_PORT: 8888 66 | FLASK_DEBUG: 1 67 | CRASH_DB__db: ${POSTGRES_DB:-crashserver} 68 | CRASH_DB__user: ${POSTGRES_USER:-postgres} 69 | CRASH_DB__passwd: ${POSTGRES_PASSWORD:-password} 70 | CRASH_DB__host: ${CRASH_DB__HOST:-postgres} 71 | CRASH_REDIS__passwd: ${REDIS_PASSWORD:-password} 72 | TZ: ${TZ:-UTC} 73 | 74 | crashserver-worker: 75 | build: 76 | context: .. 77 | dockerfile: Dockerfile.dev 78 | command: python3 /app/main-rq.py 79 | restart: always 80 | depends_on: [postgres, redis] 81 | networks: [crashnet] 82 | volumes: ["./storage:/storage", "../:/app/"] 83 | environment: 84 | CRASH_DB__db: ${POSTGRES_DB:-crashserver} 85 | CRASH_DB__user: ${POSTGRES_USER:-postgres} 86 | CRASH_DB__passwd: ${POSTGRES_PASSWORD:-password} 87 | CRASH_DB__host: ${CRASH_DB__HOST:-postgres} 88 | CRASH_REDIS__passwd: ${REDIS_PASSWORD:-password} 89 | LOGURU_LEVEL: INFO 90 | 91 | 92 | volumes: 93 | db_data: 94 | minio_data: -------------------------------------------------------------------------------- /.docker/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # check to see if this file is being run or sourced from another script 5 | _is_sourced() { 6 | # https://unix.stackexchange.com/a/215279 7 | [ "${#FUNCNAME[@]}" -ge 2 ] \ 8 | && [ "${FUNCNAME[0]}" = '_is_sourced' ] \ 9 | && [ "${FUNCNAME[1]}" = 'source' ] 10 | } 11 | 12 | _main() { 13 | # If trying to run webapp... 14 | if [ "$*" = "python3 ./main.py" ]; then 15 | 16 | # Ensure we have rw to correct directories 17 | # Permissions only modified for the webapp, as only that requires write access 18 | # TODO: This assumes all symbols will be stored on the same system. 19 | # This is not true for all installations, due to CrashServer being pre-release, 20 | # is acceptable until a remote symbol storage system is implemented. 21 | [ -d "/storage" ] && { 22 | find /storage \! -user "$PUID" -exec chown "$PUID:$PGID" '{}' + 23 | chmod 755 /storage 24 | } 25 | [ -d "/logs" ] && { 26 | find /logs \! -user "$PUID" -exec chown "$PUID:$PGID" '{}' + 27 | chmod 755 /logs 28 | } 29 | fi 30 | 31 | exec gosu "$PUID:$PGID" "$@" 32 | } 33 | 34 | if ! _is_sourced; then 35 | _main "$@" 36 | fi -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Enable core.autocrlf for all files in the repository. 2 | * text=auto 3 | *.sh text eol=lf 4 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Crash Server CI 2 | on: push 3 | 4 | jobs: 5 | lint-with-blackd: 6 | name: Black Formatter 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Checkout CrashServer 10 | uses: actions/checkout@v2 11 | 12 | - name: Run Black Formatter 13 | uses: psf/black@stable 14 | with: 15 | options: "--check --verbose --line-length 180" 16 | 17 | check-image-build: 18 | name: Build Container Image 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Checkout CrashServer 22 | uses: actions/checkout@v2 23 | 24 | - name: Build CrashServer Container 25 | id: build 26 | uses: docker/build-push-action@v2 27 | with: 28 | push: false -------------------------------------------------------------------------------- /.github/workflows/ghcr.yml: -------------------------------------------------------------------------------- 1 | name: Github Container Upload 2 | on: 3 | push: 4 | branches: [develop] 5 | release: 6 | types: [published] 7 | 8 | jobs: 9 | ghcr: 10 | name: Upload to Github Container Registry 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout CrashServer 14 | uses: actions/checkout@v2 15 | 16 | - name: Login to GitHub Container Registry 17 | uses: docker/login-action@v1 18 | with: 19 | registry: ghcr.io 20 | username: ${{ github.actor }} 21 | password: ${{ secrets.GHCR_TOKEN }} 22 | 23 | - name: Generate Container Metadata 24 | id: meta 25 | uses: docker/metadata-action@v3 26 | with: 27 | images: ghcr.io/jameskr97/crashserver 28 | tags: | 29 | type=ref,event=tag 30 | type=semver,pattern={{major}}.{{minor}}.{{patch}} 31 | type=ref,event=branch 32 | 33 | - name: Build and push 34 | id: build 35 | uses: docker/build-push-action@v2 36 | with: 37 | push: true 38 | tags: ${{ steps.meta.outputs.tags }} 39 | labels: ${{ steps.meta.outputs.labels }} 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Build Artifacts 2 | **/__pycache__/** 3 | /dist/ 4 | 5 | # IDE Related 6 | /.idea/ 7 | 8 | # Application related 9 | messages.pot 10 | .env 11 | .docker/logs 12 | .docker/storage -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [0.4.2] - 2023-01-08 8 | ### Fixed 9 | - `redis` image switched to `bitnami/redis` as this allows use of a `REDIS_PASSWORD` environment variable 10 | - A password has been added to redis. This previously allowed anybody to access the redis instance, and send commands which would prevent proper functionality. 11 | - `SymCache` has been removed in favor of a local cache which exists inside each `crashserver-worker` instance. 12 | 13 | ### Added 14 | - Authorized users may now upload symbols via a web interface 15 | 16 | ## [0.4.1] - 2022-05-09 17 | ### Fixed 18 | - Change calling of `create_app()` with previously created app variable in main.py 19 | 20 | ## [0.4.0] - 2022-04-25 21 | ### Fixed 22 | - Migrations now include initial database create as a migration. (Date and time estimated based on first actual migration) 23 | - Fixed crash-per-day graph possibly being themed incorrectly on first load. 24 | - Crash detail modules list no longer blacked whited-out in dark-mode, and list is alphabetized. 25 | - Account dropdown no longer whited-out in dark-mode. 26 | 27 | ### Added 28 | - Storage module added to isolate the request to save a file from where the file is saved. 29 | - `gosu` added to Dockerfiles to allow for setting mapped directory ownership, and specifying a `PUID/PGID` for the container to run as. (Note: Docker `--user` param is no longer used) 30 | - TZ environment variable added to configure how information is displayed. Required installing `tzdata` in docker 31 | - Flask-Babel for easy translation. All translatable strings surrounded with relevant `_()` and `ngettext()` functions. 32 | - Minidump files may now be uploaded with or without gzip. 33 | 34 | ### Changed 35 | - RQ Worker now started via `main-rq.py`. 36 | - RQ Worker jobs updated to use Storage module 37 | - Moved commands which were apart of `crashserver` cli command, to be within the `flask` command 38 | - Updated README.md to include a `Getting Started` section 39 | - Crash-per-day chart bars widened, and removed vertical lines 40 | - Crash-per-day chart shows day of week name, instead of year 41 | - Crash-per-day chart has day of week on new line 42 | - Log output for each minidump upload only takes one line, instead of two. 43 | - Reorganized javascript functions 44 | 45 | # Removed 46 | - Remove `get_disk_space` and `is_inside_docker` functions. 47 | 48 | ## [0.3.5] - 2021-12-25 49 | ### Added 50 | - Dark mode! Toggle icon seen at top right, or in drop-down on mobile 51 | - Chart now shows 7 or 30 days depending on dropdown selection. 52 | 53 | ### Changed 54 | - Attachment content no longer included on every page by default. Content requested on view attempt via webapi endpoint. 55 | - Attachments unable to be fetched present an error, instead of infinitely loading. 56 | 57 | ## [0.3.4] - 2021-12-21 58 | ### Added 59 | - Update Docker to use private env file added to .gitignore. 60 | 61 | ### Fixed 62 | - Crash Report chart now shows the most recent 7-days instead of the first recorded 7 days. 63 | 64 | ## [0.3.3] - 2021-12-16 65 | ### Added 66 | - Graph on homepage which shows crashes for each day of the past week 67 | - Added favicon for web browsers and apple icon shortcut 68 | - Decoded minidumps for versioned symbols now show the symbol's related version number on the crash detail page 69 | 70 | ### Changed 71 | - Use S3 compatible storage for attachments 72 | 73 | ## [0.3.2] - 2021-12-11 74 | ### Changed 75 | - Crash Detail prismjs attachment presentation is larger, and syntax is updated 76 | - Switched init to use flask app factory design 77 | - Added Flask-DebugToolbar for request debug information 78 | 79 | ### Added 80 | - Allowed cli.py to be run from cli 81 | - Added Flask-Migrate to easily upgrade database 82 | - Add `upload_ip` column to minidump table 83 | 84 | ### Removed 85 | - Remove minidump upload limit 86 | 87 | ### Fixed 88 | - Crash report page delete action, deletes correct minidump. 89 | - Fix reading files with bad/invalid utf-8 characters. 90 | 91 | ## [0.3.1] - 2021-10-29 92 | ### Removed 93 | - `charset-normalizer`: Switched to using BytesIO 94 | 95 | ### Fixed 96 | - Fixed web minidump upload to work with zero attachments 97 | - Fixed web crash list from showing all rows as "Processing" 98 | - Fixed symbol lists not showing symbol count for all os's 99 | - Fixed linux icon not showing in symbol list 100 | 101 | ## [0.3.0] - 2021-09-29 102 | ### Added 103 | - Add support for uploading attachments. Any additional files to minidump will be added as a row in the Attachments table, and stored to disk 104 | - Crash detail page has tabs for stacktrace, and uploaded attachments 105 | 106 | ### Fixed 107 | - "Crashed Thread" tag on crash detail page, marks the thread the actually crashed instead of the first thread 108 | 109 | ### Changed 110 | - `MinidumpTask` table removed, with relevant variables merged into `Minidump` table. 111 | 112 | 113 | ## [0.2.0.4] - 2021-09-25 114 | ### Added 115 | - Added `.docker` folder, which defines a docker based development environment. 116 | - Redis+RQ task queue to decode minidumps 117 | - Added prism.js for code formatting (though feature is WIP) 118 | 119 | ### Changed 120 | - Updated Crash Report page to look cleaner and show dump metadata or decode progress. 121 | - Updated Crash Report page to have a slim view for mobile pages. 122 | 123 | ### Removed 124 | - Huey as a task queue 125 | 126 | ### Fixed 127 | - Fixed bug preventing from upload webpage from uploading minidumps 128 | 129 | ## [0.2.0.3] - 2021-09-19 130 | ### Fixed 131 | - Flush symbol after stored in database to get "symbol_location" from project 132 | 133 | ### Changed 134 | - Symbol "doesn't exist", and "already uploaded" log messages 135 | 136 | ## [0.2.0.2] - 2021-09-18 137 | ### Added 138 | - More log messages on minidump processing 139 | - `/logs/app.log` for application logs 140 | 141 | ### Fixed 142 | - Dockerfile to download essential library `libcurl3-gnutls` for stackwalker 143 | 144 | 145 | ## [0.2.0.1] - 2021-09-07 146 | ### Added 147 | - Module to get system specific information 148 | 149 | ### Changed 150 | - Switched from python logging module to [loguru](https://github.com/Delgan/loguru) 151 | 152 | ## [0.2.0] - 2021-09-05 153 | ### Added 154 | - Allow for deep linking via `#` in url on the settings page 155 | - CLI commands for creating and deleting user accounts 156 | - Notify user if there are no symbols uploaded when trying to upload a minidump 157 | - gunicorn is run from a python application script instead of the command line 158 | - gunicorn access logs are written to the /logs directory 159 | 160 | ### Changed 161 | - Disallow empty version argument when uploading symbol for versioned project 162 | 163 | ### Removed 164 | - Removed config for domain. 165 | 166 | ## [0.1.1-alpha] - 2021-09-02 167 | ### Added 168 | - Show API keys at settings page for each project. 169 | - Allow password to be changed when form is posted 170 | - Add more config information at settings page about tab 171 | - Ensure all storage directories are created at program startup 172 | 173 | ### Fixed 174 | - Upload page not allowing upload after a wrong file was previously uploaded 175 | 176 | ## [0.1.0-alpha] - 2021-09-01 177 | ### Added 178 | - View recently uploaded symbols at `/crash-reports` endpoint 179 | - View uploaded symbols by project at `/symbols` endpoint 180 | - Upload minidumps via publicly-accessible `/upload` webpage 181 | - Upload minidump via `/api/minidump/upload` endpoint, with minidump specific api-key 182 | - Minidump uploads are limited to 10 per hour, per remote ip-address 183 | - Upload symbols via `/api/symbol/upload` endpoint, with symbol specific api-key 184 | - Login system (no registration) for admins to be able to 185 | - Create projects 186 | - View project data 187 | - View system information 188 | - Admin panel after login is mostly non-functioning, and is still a WIP at this release 189 | - Asynchronously decode any uploaded minidump 190 | - Only decode minidumps if symbol is available 191 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Dockerfile created with reccomendations from the following links: 2 | # - https://github.com/hexops/dockerfile 3 | # - https://github.com/juan131/dockerfile-best-practices 4 | # - https://www.youtube.com/watch?v=74rOYNmxfL8 5 | # - https://stackoverflow.com/questions/53835198/integrating-python-poetry-with-docker 6 | FROM python:3.10.1-slim-bullseye as builder 7 | 8 | # Install poetry for building 9 | WORKDIR /build 10 | ENV POETRY_VERSION=1.3.1 11 | 12 | COPY poetry.lock pyproject.toml ./ 13 | RUN pip install "poetry==$POETRY_VERSION" && python -m venv /venv && poetry export -f requirements.txt | /venv/bin/pip install -r /dev/stdin 14 | 15 | # Copy source code, change owner, then build 16 | COPY README.md . 17 | COPY main.py . 18 | COPY crashserver/ crashserver/ 19 | RUN poetry build && /venv/bin/pip install dist/*.whl 20 | 21 | FROM python:3.10.1-slim-bullseye as runner 22 | 23 | # Set enviroment variables 24 | ENV DEBIAN_FRONTEND=noninteractive 25 | ENV TZ=UTC 26 | ENV PUID=10000 27 | ENV PGID=10001 28 | ENV VIRTUAL_ENV=/venv 29 | ENV PATH="$VIRTUAL_ENV/bin:$PATH" 30 | ENV ENV_FOR_DYNACONF=docker 31 | 32 | # Copy environment, change owner, and install system dependencies 33 | WORKDIR /app 34 | COPY main.py ./ 35 | COPY main-rq.py ./ 36 | COPY config/ config/ 37 | COPY res/ res/ 38 | COPY crashserver/migrations crashserver/migrations 39 | 40 | # Install linux dependencies 41 | COPY --from=builder /venv /venv 42 | RUN apt update &&\ 43 | apt install libmagic1 libcurl3-gnutls tzdata gosu -y --no-install-recommends &&\ 44 | rm -rf /var/lib/apt/lists/* &&\ 45 | python3 -m venv $VIRTUAL_ENV 46 | 47 | COPY .docker/entrypoint.sh /entrypoint.sh 48 | RUN chmod +x /entrypoint.sh 49 | ENTRYPOINT ["/entrypoint.sh"] 50 | CMD ["python3", "./main.py"] 51 | 52 | -------------------------------------------------------------------------------- /Dockerfile.dev: -------------------------------------------------------------------------------- 1 | FROM python:3.10.1-slim-bullseye as builder 2 | 3 | # Install poetry for building 4 | WORKDIR /build 5 | ENV POETRY_VERSION=1.3.1 6 | 7 | COPY poetry.lock pyproject.toml ./ 8 | RUN pip install "poetry==$POETRY_VERSION" && python -m venv /venv && poetry export --with dev -f requirements.txt | /venv/bin/pip install -r /dev/stdin 9 | 10 | FROM python:3.10.1-slim-bullseye as runner 11 | 12 | # Set enviroment variables 13 | ENV DEBIAN_FRONTEND=noninteractive 14 | ENV TZ=UTC 15 | ENV PUID=10000 16 | ENV PGID=10001 17 | ENV VIRTUAL_ENV=/venv 18 | ENV PATH="$VIRTUAL_ENV/bin:$PATH" 19 | ENV ENV_FOR_DYNACONF=docker 20 | 21 | # Copy environment, change owner, and install system dependencies 22 | WORKDIR /app 23 | 24 | # Install linux dependencies 25 | COPY --from=builder /venv /venv 26 | RUN apt update &&\ 27 | apt install libmagic1 libcurl3-gnutls tzdata gosu -y --no-install-recommends &&\ 28 | rm -rf /var/lib/apt/lists/* &&\ 29 | python3 -m venv $VIRTUAL_ENV 30 | 31 | COPY .docker/entrypoint.sh /entrypoint.sh 32 | RUN chmod +x /entrypoint.sh 33 | ENTRYPOINT ["/entrypoint.sh"] 34 | CMD ["python3", "./main.py"] 35 | 36 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![](res/assets/crashserver_banner.png) 2 | 3 | An implementation of an upstream collection server for the [Google Crashpad](https://chromium.googlesource.com/crashpad/crashpad/) crash handler. Intended to be as an all-in-one setup for small-to-medium projects who want to ability to: 4 | 5 | - Store symbols and decode minidumps for separate projects 6 | - View a list of recent crashes for any given project 7 | - Allow for minidump upload from a public webpage 8 | 9 | Built for open-source projects that use Google Crashpad, and want to host their own crash collection server. 10 | 11 | ## Getting Started 12 | 13 | ### Development 14 | 15 | Running CrashServer locally can be done with the following commands: 16 | 17 | ```bash 18 | $ git clone https://gitub.com/jameskr97/CrashServer.git 19 | $ cd CrashServer/.docker/ 20 | $ docker-compose up -d 21 | ``` 22 | 23 | ### Production 24 | 25 | CrashServer is not yet ready for production environments. If you still choose to use CrashServer for your application, you may refer to the `.docker/docker-compose.yml` for necessary components. CrashServer is available as a container image at `ghcr.io/jameskr97/CrashServer`. 26 | 27 | ## TODO 28 | - API 29 | - [x] `/api/minidump/upload` Upload Minidumps for project under endpoint. 30 | - [x] Handle `gzip` minidump upload 31 | - [x] `/api/synbol/upload` Upload Symbols for project under endpoint, secured by `api_key` 32 | - Web 33 | - [x] List of all symbols for project 34 | - [x] Upload minidump (publicly) 35 | - [x] Upload symbols (authenticated users only) 36 | - Backend 37 | - [x] Ensure minidump can be decoded before producing readable minidump 38 | - [ ] Support for symbols on development versions of the project 39 | - [ ] Auto-delete minidumps after a selected interval 40 | - [ ] Single project mode 41 | - [x] Implement `sym-upload-v1` protocol 42 | - [x] Implement `sym-upload-v2` protocol 43 | - [x] Use Amazon S3 (S3-Compatible) for all data store 44 | - Misc 45 | - [ ] Documentation Generation 46 | - [ ] CI and Tests 47 | - [x] CLI Management interface (via flask) -------------------------------------------------------------------------------- /babel.cfg: -------------------------------------------------------------------------------- 1 | [python: crashserver/**.py] 2 | [jinja2: res/templates/**.html] 3 | extensions=jinja2.ext.autoescape,jinja2.ext.with_ -------------------------------------------------------------------------------- /config/settings.toml: -------------------------------------------------------------------------------- 1 | # CrashServer Configuration File 2 | 3 | [default] 4 | SECRET_KEY = "changeme" 5 | 6 | [default.flask] 7 | web_port = 8888 8 | 9 | # Currently, only postgres is supported 10 | [default.db] 11 | user = "postgres" 12 | passwd = "password" 13 | host = "postgres" 14 | port = 5432 15 | name = "crashserver" 16 | 17 | [default.redis] 18 | passwd = "password" 19 | host = "redis" 20 | port = 6379 21 | 22 | # Docker-based default storage directories 23 | [default.storage] 24 | appdata = "/storage" 25 | logs = "/logs" 26 | 27 | [testing] 28 | DEBUG_TB_INTERCEPT_REDIRECTS = false 29 | WTF_CSRF_ENABLED = false 30 | 31 | [testing.db] 32 | name = "test_crashserver" 33 | 34 | [testing.login] 35 | email = "test@test.com" 36 | passwd = "password" -------------------------------------------------------------------------------- /crashserver/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/crashserver/__init__.py -------------------------------------------------------------------------------- /crashserver/cli.py: -------------------------------------------------------------------------------- 1 | import os 2 | import uuid 3 | 4 | import click 5 | from email_validator import validate_email, EmailNotValidError 6 | from sqlalchemy import create_engine 7 | from sqlalchemy.orm import Session 8 | 9 | from crashserver.config import get_postgres_url 10 | 11 | 12 | def register_cli(app): 13 | register_translation(app) 14 | register_account_mgmt(app) 15 | register_util(app) 16 | 17 | 18 | def register_translation(app): 19 | @app.cli.group() 20 | def translate(): 21 | """Translation and localization commands.""" 22 | pass 23 | 24 | @translate.command() 25 | def update(): 26 | """Update all languages.""" 27 | if os.system("pybabel extract -F babel.cfg -k _l -o messages.pot ."): 28 | raise RuntimeError("extract command failed") 29 | if os.system("pybabel update -i messages.pot -d res/translations"): 30 | raise RuntimeError("update command failed") 31 | os.remove("messages.pot") 32 | 33 | @translate.command() 34 | def compile(): 35 | """Compile all languages.""" 36 | if os.system("pybabel compile -d res/translations"): 37 | raise RuntimeError("compile command failed") 38 | 39 | @translate.command() 40 | @click.argument("lang") 41 | def init(lang): 42 | """Initialize a new language.""" 43 | if os.system("pybabel extract -F babel.cfg -k _l -o messages.pot ."): 44 | raise RuntimeError("extract command failed") 45 | if os.system("pybabel init -i messages.pot -d res/translations -l " + lang): 46 | raise RuntimeError("init command failed") 47 | os.remove("messages.pot") 48 | 49 | 50 | def register_account_mgmt(app): 51 | @app.cli.group() 52 | def account(): 53 | """Account management commands""" 54 | pass 55 | 56 | @account.command() 57 | @click.argument("email") 58 | def adduser(email): 59 | """Create a new web account""" 60 | from crashserver.server.models import User 61 | 62 | # Ensure email is valid 63 | try: 64 | valid = validate_email(email) 65 | email = valid.email 66 | except EmailNotValidError as e: 67 | click.echo(str(e)) 68 | return 69 | 70 | # Connect to database 71 | with Session(create_engine(get_postgres_url())) as session: 72 | res = session.query(User).filter_by(email=email).first() 73 | if res: 74 | click.echo(f"User {email} already exists") 75 | return 76 | 77 | new_password = str(uuid.uuid4()).replace("-", "") # Generate password 78 | new_user = User(email=email) 79 | new_user.set_password(new_password) 80 | 81 | session.add(new_user) # Store to server 82 | session.commit() 83 | 84 | click.echo(f"User {email}:{new_password} has been created.") # Notify 85 | 86 | @account.command() 87 | @click.argument("email", required=True) 88 | def deluser(email): 89 | """Delete an existing web account""" 90 | from crashserver.server.models import User 91 | 92 | with Session(create_engine(get_postgres_url())) as session: 93 | res = session.query(User).filter_by(email=email).first() 94 | if not res: 95 | click.echo(f"User {email} does not exist") 96 | 97 | # Delete the user 98 | session.delete(res) 99 | session.commit() 100 | click.echo(f"User {email} deleted.") 101 | 102 | 103 | def register_util(app): 104 | @app.cli.group() 105 | def util(): 106 | """Misc crashserver interface functions""" 107 | pass 108 | 109 | @util.command(help="Force minidump to decode") 110 | @click.argument("dump_id", required=True) 111 | def force_decode(dump_id): 112 | from crashserver.server.models import Minidump 113 | 114 | with Session(create_engine(get_postgres_url())) as session: 115 | res = session.query(Minidump).get(dump_id) 116 | res.decode_task() 117 | print(f"Minidump {dump_id} sent to worker for decode.") 118 | 119 | @util.command(help="Create DB files") 120 | def create_db(): 121 | from crashserver.server import db 122 | 123 | db.create_all() 124 | -------------------------------------------------------------------------------- /crashserver/config.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from dynaconf import Dynaconf 4 | 5 | settings = Dynaconf( 6 | settings_files=["config/settings.toml"], 7 | environments=True, 8 | load_dotenv=True, 9 | envvar_prefix="CRASH", 10 | merge_enabled=True, 11 | ) 12 | 13 | 14 | def get_appdata_directory(path: str) -> Path: 15 | p = Path(settings.storage.appdata, path) 16 | if not p.exists(): 17 | p.mkdir(exist_ok=True, parents=True) 18 | return p 19 | 20 | 21 | def get_postgres_url(): 22 | return f"postgresql://{settings.db.user}:{settings.db.passwd}@{settings.db.host}:{settings.db.port}/{settings.db.name}" 23 | 24 | 25 | def get_redis_url(): 26 | return f"redis://:{settings.redis.passwd}@{settings.redis.host}:{settings.redis.port}" 27 | -------------------------------------------------------------------------------- /crashserver/migrations/README: -------------------------------------------------------------------------------- 1 | Single-database configuration for Flask. 2 | -------------------------------------------------------------------------------- /crashserver/migrations/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # template used to generate migration files 5 | # file_template = %%(rev)s_%%(slug)s 6 | 7 | # set to 'true' to run the environment during 8 | # the 'revision' command, regardless of autogenerate 9 | # revision_environment = false 10 | 11 | 12 | # Logging configuration 13 | [loggers] 14 | keys = root,sqlalchemy,alembic,flask_migrate 15 | 16 | [handlers] 17 | keys = console 18 | 19 | [formatters] 20 | keys = generic 21 | 22 | [logger_root] 23 | level = WARN 24 | handlers = console 25 | qualname = 26 | 27 | [logger_sqlalchemy] 28 | level = WARN 29 | handlers = 30 | qualname = sqlalchemy.engine 31 | 32 | [logger_alembic] 33 | level = INFO 34 | handlers = 35 | qualname = alembic 36 | 37 | [logger_flask_migrate] 38 | level = INFO 39 | handlers = 40 | qualname = flask_migrate 41 | 42 | [handler_console] 43 | class = StreamHandler 44 | args = (sys.stderr,) 45 | level = NOTSET 46 | formatter = generic 47 | 48 | [formatter_generic] 49 | format = [%(asctime)s][%(name)s][%(levelname)s]: %(message)s 50 | datefmt = %Y-%m-%d %H:%M:%S 51 | -------------------------------------------------------------------------------- /crashserver/migrations/env.py: -------------------------------------------------------------------------------- 1 | from __future__ import with_statement 2 | 3 | import logging 4 | from logging.config import fileConfig 5 | 6 | from flask import current_app 7 | 8 | from alembic import context 9 | 10 | # this is the Alembic Config object, which provides 11 | # access to the values within the .ini file in use. 12 | config = context.config 13 | 14 | # Interpret the config file for Python logging. 15 | # This line sets up loggers basically. 16 | fileConfig(config.config_file_name) 17 | logger = logging.getLogger("alembic.env") 18 | 19 | # add your model's MetaData object here 20 | # for 'autogenerate' support 21 | # from myapp import mymodel 22 | # target_metadata = mymodel.Base.metadata 23 | config.set_main_option("sqlalchemy.url", str(current_app.extensions["migrate"].db.get_engine().url).replace("%", "%%")) 24 | target_metadata = current_app.extensions["migrate"].db.metadata 25 | 26 | 27 | # other values from the config, defined by the needs of env.py, 28 | # can be acquired: 29 | # my_important_option = config.get_main_option("my_important_option") 30 | # ... etc. 31 | 32 | 33 | def run_migrations_offline(): 34 | """Run migrations in 'offline' mode. 35 | 36 | This configures the context with just a URL 37 | and not an Engine, though an Engine is acceptable 38 | here as well. By skipping the Engine creation 39 | we don't even need a DBAPI to be available. 40 | 41 | Calls to context.execute() here emit the given string to the 42 | script output. 43 | 44 | """ 45 | url = config.get_main_option("sqlalchemy.url") 46 | context.configure(url=url, target_metadata=target_metadata, literal_binds=True) 47 | 48 | with context.begin_transaction(): 49 | context.run_migrations() 50 | 51 | 52 | def run_migrations_online(): 53 | """Run migrations in 'online' mode. 54 | 55 | In this scenario we need to create an Engine 56 | and associate a connection with the context. 57 | 58 | """ 59 | 60 | # this callback is used to prevent an auto-migration from being generated 61 | # when there are no changes to the schema 62 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html 63 | def process_revision_directives(context, revision, directives): 64 | if getattr(config.cmd_opts, "autogenerate", False): 65 | script = directives[0] 66 | if script.upgrade_ops.is_empty(): 67 | directives[:] = [] 68 | logger.info("No changes in schema detected.") 69 | 70 | connectable = current_app.extensions["migrate"].db.get_engine() 71 | 72 | with connectable.connect() as connection: 73 | context.configure( 74 | connection=connection, 75 | target_metadata=target_metadata, 76 | process_revision_directives=process_revision_directives, 77 | **current_app.extensions["migrate"].configure_args, 78 | ) 79 | 80 | with context.begin_transaction(): 81 | context.run_migrations() 82 | 83 | 84 | if context.is_offline_mode(): 85 | run_migrations_offline() 86 | else: 87 | run_migrations_online() 88 | -------------------------------------------------------------------------------- /crashserver/migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /crashserver/migrations/versions/20211211_001700_initial_database.py: -------------------------------------------------------------------------------- 1 | """Create initial database 2 | 3 | Revision ID: 20211211_001700_initial_database 4 | Revises: 5 | Create Date: 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "20211211_001700_initial_database" 14 | down_revision = None 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table( 22 | "project", 23 | sa.Column("id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False), 24 | sa.Column("date_created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True), 25 | sa.Column("project_name", sa.Text(), nullable=False), 26 | sa.Column("project_type", sa.Enum("SIMPLE", "VERSIONED", name="projecttype"), nullable=False), 27 | sa.Column("minidump_api_key", sa.String(length=32), nullable=False), 28 | sa.Column("symbol_api_key", sa.String(length=32), nullable=False), 29 | sa.PrimaryKeyConstraint("id"), 30 | ) 31 | op.create_table( 32 | "symcache", 33 | sa.Column("id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False), 34 | sa.Column("date_created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True), 35 | sa.Column("module_id", sa.Text(), nullable=False), 36 | sa.Column("build_id", sa.Text(), nullable=False), 37 | sa.Column("os", sa.Text(), nullable=True), 38 | sa.Column("arch", sa.Text(), nullable=True), 39 | sa.Column("file_location", sa.Text(), nullable=True), 40 | sa.Column("file_size_bytes", sa.Integer(), nullable=True), 41 | sa.PrimaryKeyConstraint("id"), 42 | ) 43 | op.create_table( 44 | "users", 45 | sa.Column("id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False), 46 | sa.Column("date_created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True), 47 | sa.Column("email", sa.String(length=254), nullable=False), 48 | sa.Column("password", sa.String(length=200), nullable=False), 49 | sa.PrimaryKeyConstraint("id"), 50 | ) 51 | op.create_table( 52 | "build_metadata", 53 | sa.Column("id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False), 54 | sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=False), 55 | sa.Column("module_id", sa.Text(), nullable=False), 56 | sa.Column("build_id", sa.Text(), nullable=False), 57 | sa.ForeignKeyConstraint( 58 | ["project_id"], 59 | ["project.id"], 60 | ), 61 | sa.PrimaryKeyConstraint("id"), 62 | ) 63 | op.create_table( 64 | "sym_upload_tracker", 65 | sa.Column("id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False), 66 | sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=False), 67 | sa.Column("date_created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True), 68 | sa.Column("module_id", sa.Text(), nullable=True), 69 | sa.Column("build_id", sa.Text(), nullable=True), 70 | sa.Column("arch", sa.Text(), nullable=True), 71 | sa.Column("os", sa.Text(), nullable=True), 72 | sa.Column("file_hash", sa.String(length=64), nullable=True), 73 | sa.ForeignKeyConstraint( 74 | ["project_id"], 75 | ["project.id"], 76 | ), 77 | sa.PrimaryKeyConstraint("id"), 78 | ) 79 | op.create_table( 80 | "minidump", 81 | sa.Column("id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False), 82 | sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=False), 83 | sa.Column("build_metadata_id", postgresql.UUID(as_uuid=True), nullable=True), 84 | sa.Column("date_created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True), 85 | sa.Column("symbolicated", sa.Boolean(), nullable=True), 86 | sa.Column("client_guid", postgresql.UUID(as_uuid=True), nullable=True), 87 | sa.Column("filename", sa.Text(), nullable=False), 88 | sa.Column("stacktrace", postgresql.JSONB(astext_type=sa.Text()), nullable=True), 89 | sa.Column("decode_task_id", sa.String(length=36), nullable=True), 90 | sa.Column("decode_task_complete", sa.Boolean(), nullable=True), 91 | sa.ForeignKeyConstraint( 92 | ["build_metadata_id"], 93 | ["build_metadata.id"], 94 | ), 95 | sa.ForeignKeyConstraint( 96 | ["project_id"], 97 | ["project.id"], 98 | ), 99 | sa.PrimaryKeyConstraint("id"), 100 | ) 101 | op.create_table( 102 | "symbol", 103 | sa.Column("id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False), 104 | sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=False), 105 | sa.Column("build_metadata_id", postgresql.UUID(as_uuid=True), nullable=False), 106 | sa.Column("date_created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True), 107 | sa.Column("app_version", sa.Text(), nullable=True), 108 | sa.Column("os", sa.Text(), nullable=False), 109 | sa.Column("arch", sa.Text(), nullable=False), 110 | sa.Column("file_location", sa.Text(), nullable=False), 111 | sa.Column("file_size_bytes", sa.Integer(), nullable=False), 112 | sa.Column("file_hash", sa.String(length=64), nullable=False), 113 | sa.ForeignKeyConstraint( 114 | ["build_metadata_id"], 115 | ["build_metadata.id"], 116 | ), 117 | sa.ForeignKeyConstraint( 118 | ["project_id"], 119 | ["project.id"], 120 | ), 121 | sa.PrimaryKeyConstraint("id"), 122 | ) 123 | op.create_table( 124 | "annotation", 125 | sa.Column("id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False), 126 | sa.Column("minidump_id", postgresql.UUID(as_uuid=True), nullable=False), 127 | sa.Column("key", sa.Text(), nullable=False), 128 | sa.Column("value", sa.Text(), nullable=False), 129 | sa.ForeignKeyConstraint( 130 | ["minidump_id"], 131 | ["minidump.id"], 132 | ), 133 | sa.PrimaryKeyConstraint("id"), 134 | ) 135 | op.create_table( 136 | "attachments", 137 | sa.Column("id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False), 138 | sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=False), 139 | sa.Column("minidump_id", postgresql.UUID(as_uuid=True), nullable=False), 140 | sa.Column("date_created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True), 141 | sa.Column("mime_type", sa.Text(), nullable=False), 142 | sa.Column("file_size_bytes", sa.Integer(), nullable=False), 143 | sa.Column("filename", sa.Text(), nullable=False), 144 | sa.Column("original_filename", sa.Text(), nullable=False), 145 | sa.ForeignKeyConstraint( 146 | ["minidump_id"], 147 | ["minidump.id"], 148 | ), 149 | sa.ForeignKeyConstraint( 150 | ["project_id"], 151 | ["project.id"], 152 | ), 153 | sa.PrimaryKeyConstraint("id"), 154 | ) 155 | # ### end Alembic commands ### 156 | 157 | 158 | def downgrade(): 159 | # ### commands auto generated by Alembic - please adjust! ### 160 | op.drop_table("attachments") 161 | op.drop_table("annotation") 162 | op.drop_table("symbol") 163 | op.drop_table("minidump") 164 | op.drop_table("sym_upload_tracker") 165 | op.drop_table("build_metadata") 166 | op.drop_table("users") 167 | op.drop_table("symcache") 168 | op.drop_table("project") 169 | op.execute("DROP TYPE projecttype") 170 | # ### end Alembic commands ### 171 | -------------------------------------------------------------------------------- /crashserver/migrations/versions/20211211_002700_add_upload_ip.py: -------------------------------------------------------------------------------- 1 | """Add "upload_ip" column to minidump 2 | 3 | Revision ID: 20211211_002700_add_upload_ip 4 | Revises: 5 | Create Date: 2021-12-11 05:20:46.385955 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "20211211_002700_add_upload_ip" 14 | down_revision = "20211211_001700_initial_database" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | op.add_column("minidump", sa.Column("upload_ip", postgresql.INET(), nullable=True)) 21 | 22 | 23 | def downgrade(): 24 | op.drop_column("minidump", "upload_ip") 25 | -------------------------------------------------------------------------------- /crashserver/migrations/versions/20220114_103500_storage_table.py: -------------------------------------------------------------------------------- 1 | """Add Storage table for isolated file access 2 | 3 | Revision ID: 20220114_103500_storage_table 4 | Revises: 20211211_002700_add_upload_ip 5 | Create Date: 2022-01-14 11:58:22.140682 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "20220114_103500_storage_table" 14 | down_revision = "20211211_002700_add_upload_ip" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table( 22 | "storage", 23 | sa.Column("key", sa.Text(), nullable=False), 24 | sa.Column("is_enabled", sa.Boolean(), nullable=False), 25 | sa.Column("is_primary", sa.Boolean(), nullable=False), 26 | sa.Column("config", postgresql.JSONB(astext_type=sa.Text()), nullable=True), 27 | sa.PrimaryKeyConstraint("key"), 28 | ) 29 | # ### end Alembic commands ### 30 | 31 | 32 | def downgrade(): 33 | # ### commands auto generated by Alembic - please adjust! ### 34 | op.drop_table("storage") 35 | # ### end Alembic commands ### 36 | -------------------------------------------------------------------------------- /crashserver/migrations/versions/20230108_054000_symcache_remove.py: -------------------------------------------------------------------------------- 1 | """Remove symcache table 2 | 3 | Revision ID: 8ef90f500b38 4 | Revises: 20220114_103500_storage_table 5 | Create Date: 2023-01-08 05:37:58.456479 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "20230108_054000_symcache_remove" 14 | down_revision = "20220114_103500_storage_table" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.drop_table("symcache") 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.create_table( 28 | "symcache", 29 | sa.Column("id", postgresql.UUID(), server_default=sa.text("gen_random_uuid()"), autoincrement=False, nullable=False), 30 | sa.Column("date_created", postgresql.TIMESTAMP(timezone=True), server_default=sa.text("now()"), autoincrement=False, nullable=True), 31 | sa.Column("module_id", sa.TEXT(), autoincrement=False, nullable=False), 32 | sa.Column("build_id", sa.TEXT(), autoincrement=False, nullable=False), 33 | sa.Column("os", sa.TEXT(), autoincrement=False, nullable=True), 34 | sa.Column("arch", sa.TEXT(), autoincrement=False, nullable=True), 35 | sa.Column("file_location", sa.TEXT(), autoincrement=False, nullable=True), 36 | sa.Column("file_size_bytes", sa.INTEGER(), autoincrement=False, nullable=True), 37 | sa.PrimaryKeyConstraint("id", name="symcache_pkey"), 38 | ) 39 | # ### end Alembic commands ### 40 | -------------------------------------------------------------------------------- /crashserver/server/__init__.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import flask 4 | from dynaconf import FlaskDynaconf 5 | from flask import Flask, render_template 6 | from flask_babel import _ 7 | from sqlalchemy_utils import create_database, database_exists 8 | 9 | from crashserver.cli import register_cli 10 | from crashserver.config import get_postgres_url 11 | from crashserver.server.core.extensions import babel, debug_toolbar, login, limiter, migrate, db, queue 12 | from crashserver.utility.hostinfo import HostInfo 13 | 14 | 15 | def create_app(): 16 | app = init_environment() 17 | 18 | register_errors(app) 19 | register_extensions(app) 20 | register_blueprints(app) 21 | register_jinja(app) 22 | register_cli(app) 23 | 24 | return app 25 | 26 | 27 | def init_environment(): 28 | """ 29 | Create Flask object with correct template/static dirs, and ensure essential app directories exist. 30 | :return: Flask app object 31 | """ 32 | # Setup config directories 33 | resources_root = Path("res").absolute() 34 | templates = resources_root / "templates" 35 | static = resources_root / "static" 36 | 37 | # Create app and initial parameters 38 | app = Flask("CrashServer", static_folder=str(static), template_folder=str(templates)) 39 | app.config["SQLALCHEMY_DATABASE_URI"] = get_postgres_url() 40 | app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = True 41 | app.config["LANGUAGES"] = ["en", "zh"] 42 | app.config["BABEL_TRANSLATION_DIRECTORIES"] = str(resources_root / "translations") 43 | FlaskDynaconf(app, settings_files=["config/settings.toml"], environments=True) 44 | 45 | return app 46 | 47 | 48 | def register_errors(app: Flask): 49 | def render_error(error): 50 | """Render error template.""" 51 | error_code = getattr(error, "code", 500) 52 | return render_template(f"errors/{error_code}.html"), error_code 53 | 54 | for errcode in [404, 500]: 55 | app.errorhandler(errcode)(render_error) 56 | 57 | 58 | def register_extensions(app: Flask): 59 | babel.init_app(app) 60 | debug_toolbar.init_app(app) 61 | db.init_app(app) 62 | migrate.init_app(app, db, directory="crashserver/migrations") 63 | limiter.init_app(app) 64 | login.init_app(app) 65 | 66 | @babel.localeselector 67 | def get_locale(): 68 | return flask.request.accept_languages.best_match(app.config["LANGUAGES"]) 69 | 70 | login.login_view = "auth.login" 71 | login.login_message = _("You must be logged in to see this page") 72 | login.login_message_category = "info" 73 | 74 | 75 | def register_blueprints(app: Flask): 76 | from .controllers import ( 77 | auth, 78 | crash_upload_api, 79 | sym_upload_v1, 80 | sym_upload_v2, 81 | webapi, 82 | views, 83 | ) 84 | 85 | app.register_blueprint(views) 86 | app.register_blueprint(webapi) 87 | app.register_blueprint(crash_upload_api) 88 | app.register_blueprint(sym_upload_v1, url_prefix="/symupload") 89 | app.register_blueprint(sym_upload_v2, url_prefix="/symupload") 90 | app.register_blueprint(auth, url_prefix="/auth") 91 | 92 | 93 | def register_jinja(app: Flask): 94 | from crashserver.utility import sysinfo, misc 95 | import humanize 96 | 97 | app.add_template_global(sysinfo, "sysinfo") 98 | app.add_template_global(HostInfo, "HostInfo") 99 | app.add_template_global(misc.get_font_awesome_os_icon, "get_font_awesome_os_icon") 100 | app.add_template_global(misc.naturaltime, "humantime") 101 | app.add_template_global(misc.get_storage_icon, "get_storage_icon") 102 | app.add_template_global(humanize, "humanize") 103 | app.add_template_global(app.config, "settings") 104 | 105 | @app.template_filter("pluralize") 106 | def pluralize(number, singular="", plural="s"): 107 | return singular if number == 1 else plural 108 | 109 | 110 | def init_database(app: Flask): 111 | """TODO: Reformat to be in manage.py""" 112 | # Ensure database exists 113 | if not database_exists(app.config["SQLALCHEMY_DATABASE_URI"]): 114 | create_database(app.config["SQLALCHEMY_DATABASE_URI"]) 115 | print("Database created") 116 | 117 | from crashserver.server.models import Annotation 118 | from crashserver.server.models import BuildMetadata 119 | from crashserver.server.models import Minidump 120 | from crashserver.server.models import Project 121 | from crashserver.server.models import Symbol 122 | from crashserver.server.models import SymbolUploadV2 123 | from crashserver.server.models import User 124 | from crashserver.server.models import SymCache 125 | -------------------------------------------------------------------------------- /crashserver/server/controllers/__init__.py: -------------------------------------------------------------------------------- 1 | from .auth import auth 2 | from .crash_upload_api import crash_upload_api 3 | from .sym_upload_v1 import sym_upload_v1 4 | from .sym_upload_v2 import sym_upload_v2 5 | from .webapi import webapi 6 | from .webviews import views 7 | -------------------------------------------------------------------------------- /crashserver/server/controllers/auth.py: -------------------------------------------------------------------------------- 1 | from flask import Blueprint, render_template, request, redirect, url_for, flash 2 | from flask_babel import _ 3 | from flask_login import current_user, login_user, logout_user 4 | 5 | from crashserver.server import login 6 | from crashserver.server.forms import LoginForm 7 | from crashserver.server.models import User 8 | from crashserver.utility import misc 9 | 10 | auth = Blueprint("auth", __name__) 11 | 12 | 13 | # %% Flask-Login Required 14 | @login.user_loader 15 | def load_user(user_id): 16 | return User.query.get(user_id) 17 | 18 | 19 | # %% Routes 20 | @auth.route("/login", methods=["GET", "POST"]) 21 | def login(): 22 | """Present the user with a page where they can login.""" 23 | # Go to homepage if user is already logged in 24 | if current_user.is_authenticated: 25 | return redirect(url_for("views.home")) 26 | 27 | # If we posted and the for is valid 28 | form = LoginForm(request.form) 29 | if request.method == "POST" and form.validate(): 30 | 31 | # Find the user verify password 32 | user = User.query.filter_by(email=form.email.data).first() 33 | if user is None or not user.check_password(form.password.data): 34 | # If any info is bad, alert user 35 | flash(_("Invalid email or password"), category="error") 36 | return redirect(url_for("auth.login")) 37 | 38 | # Otherwise login 39 | else: 40 | flash(_("Logged in"), category="info") 41 | login_user(user, remember=form.remember_me.data) 42 | return redirect(url_for("views.home")) 43 | 44 | # Present form errors if form is invalid 45 | elif form.errors: 46 | misc.flash_form_errors(form) 47 | 48 | return render_template("auth/login.html", form=form) 49 | 50 | 51 | @auth.route("/logout") 52 | def logout(): 53 | if current_user.is_authenticated: 54 | flash(_("Logged out")) 55 | logout_user() 56 | return redirect(url_for("views.home")) 57 | -------------------------------------------------------------------------------- /crashserver/server/controllers/crash_upload_api.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | import io 3 | 4 | from flask import Blueprint, request 5 | from werkzeug.formparser import parse_form_data 6 | 7 | from crashserver.server import db, helpers 8 | from crashserver.utility.decorators import ( 9 | file_key_required, 10 | api_key_required, 11 | check_project_versioned, 12 | ) 13 | from crashserver.utility.misc import SymbolData 14 | 15 | crash_upload_api = Blueprint("api", __name__) 16 | 17 | 18 | @crash_upload_api.route("/api/minidump/upload", methods=["POST"]) 19 | @api_key_required() 20 | def upload_minidump(project): 21 | """ 22 | A Crashpad_handler sets this endpoint as their upload url with the "-no-upload-gzip" 23 | argument, and it will save and prepare the file for processing 24 | :return: 25 | """ 26 | 27 | # A crashpad upload `Content Encoding` will only be gzip, or not present. 28 | if request.content_encoding == "gzip": 29 | uncompressed = gzip.decompress(request.get_data()) 30 | environ = { 31 | "wsgi.input": io.BytesIO(uncompressed), 32 | "CONTENT_LENGTH": str(len(uncompressed)), 33 | "CONTENT_TYPE": request.content_type, 34 | "REQUEST_METHOD": "POST", 35 | } 36 | stream, form, files = parse_form_data(environ) 37 | dump_values = dict(form) 38 | attachments = files.to_dict() 39 | 40 | else: 41 | # Additional files after minidump has been popped from dict are misc attachments. 42 | attachments = request.files.to_dict() 43 | dump_values = dict(request.values) 44 | 45 | minidump_key = "upload_file_minidump" 46 | if minidump_key not in attachments.keys(): 47 | return {"error": "missing file parameter {}".format(minidump_key)}, 400 48 | minidump = attachments.pop(minidump_key) 49 | 50 | return helpers.minidump_upload( 51 | db.session, 52 | project.id, 53 | dump_values, 54 | minidump.stream.read(), 55 | attachments.values(), 56 | ) 57 | 58 | 59 | @crash_upload_api.route("/api/symbol/upload", methods=["POST"]) 60 | @file_key_required("symbol_file") 61 | @api_key_required("symbol") 62 | @check_project_versioned() 63 | def upload_symbol(project, version): 64 | symbol_file = request.files.get("symbol_file") 65 | 66 | symbol_file_bytes = symbol_file.stream.read() 67 | with io.BytesIO(symbol_file_bytes) as f: 68 | first_line_str = f.readline().decode("utf-8") 69 | 70 | # Get relevant module info from first line of file 71 | symbol_data = SymbolData.from_module_line(first_line_str) 72 | symbol_data.app_version = version 73 | symbol_file.stream.seek(0) 74 | 75 | return helpers.symbol_upload(db.session, project, symbol_file_bytes, symbol_data) 76 | -------------------------------------------------------------------------------- /crashserver/server/controllers/sym_upload_v1.py: -------------------------------------------------------------------------------- 1 | """ 2 | sym_upload_v1: An implementation of the `sym-upload-v1` protocol to upload 3 | symbol files to the CrashServer. 4 | 5 | This protocol is used when the `symupload` program from the breakpad repository 6 | is invoked without any `-p` parameter, and without 7 | """ 8 | 9 | from flask import Blueprint, request 10 | 11 | from crashserver.server import db, helpers 12 | from crashserver.utility.decorators import ( 13 | file_key_required, 14 | api_key_required, 15 | check_project_versioned, 16 | ) 17 | from crashserver.utility.misc import SymbolData 18 | 19 | sym_upload_v1 = Blueprint("sym-upload-v1", __name__) 20 | 21 | 22 | @sym_upload_v1.route("", methods=["POST"]) 23 | @file_key_required("symbol_file") 24 | @api_key_required("symbol") 25 | @check_project_versioned() 26 | def upload(project, version): 27 | """ 28 | Upload endpoint for `sym-upload-v1` protocol. 29 | Received payload is a multipart/form request with all data needed to receive a symbol file 30 | :return: 31 | """ 32 | data = SymbolData( 33 | os=request.form["os"].strip(), 34 | arch=request.form["cpu"].strip(), 35 | build_id=request.form["debug_identifier"].strip(), 36 | module_id=request.form["debug_file"].strip(), 37 | app_version=version, 38 | ) 39 | 40 | file_content = request.files.get("symbol_file").stream.read() 41 | return helpers.symbol_upload(db.session, project, file_content, data) 42 | -------------------------------------------------------------------------------- /crashserver/server/controllers/sym_upload_v2.py: -------------------------------------------------------------------------------- 1 | """ 2 | Implementation of the `sym-upload-v2` protocol. 3 | The protocol defines three endpoints which are called in a specific order. 4 | Notes: 5 | - Breakpads documentation was very limited, and this is my understanding of the v2 protocol 6 | - /v1/ is not documented in the crashpad repository, and is unrelated to 7 | `sym-upload-v2` protocol. It's only defining that it's version 1 of the `sym-upload-v2` 8 | protocol. 9 | - Every defined endpoint requires the `key` argument 10 | 11 | v2 protocol calls the following functions: 12 | 1. /v1/symbols//:checkStatus?key= 13 | First, symupload wants to check if the symbol exists on the server. 14 | This endpoint is called with an empty body, and expects a json response. 15 | The response will look like: 16 | { "status": "" } 17 | Where must be one of three values: ["STATUS_UNSPECIFIED", "MISSING", "FOUND"] 18 | - "FOUND" - The symbol exists on the server 19 | - "MISSING" - The symbol does not exist on the server 20 | - "STATUS_UNSPECIFIED" - The server doesn't not explicitly state the existence or nonexistence of the symbol 21 | 22 | My interpretation of "STATUS_UNSPECIFIED" comes down to if there is not a row matching `debug_file` and `debug_id` 23 | in the `build_metadata` table. Thought at the time of writing this, it doesn't really matter. On mac, windows, 24 | and linux, in the source for symupload, it basically checks (res["status"] == "FOUND") ? "FOUND" : "MISSING". 25 | Nothing happens for "STATUS_UNSPECIFIED". Breakpad, why? 26 | 27 | 2. /v1/uploads:create?key= 28 | After the first endpoint, if symupload decides that it wants to upload the symbol, it will call this endpoint. 29 | This endpoint is called with an empty body, and expects a json response. 30 | The response will look like: 31 | { "uploadUrl": "", "uploadKey": "" } 32 | Note the camel-case. `sym-upload-v2` docs shows underscores, while camel-case is used in the source. Breakpad, why? 33 | 34 | - : This must be an endpoint where symupload may PUT the symbol file. When symupload PUT's to this url, the key 35 | will not be included. You are expected to send a url which it can use to upload the symbol file to an intermediary 36 | location. This is not meant to be a url which is used to directly add the symbol to the project. That is what 37 | happens in step three. Here, we only tell symupload where to upload the file, and we then evaluate and determine 38 | what to do with the symbol on the third url. 39 | 40 | 3. /uploads/:complete?key= 41 | After the symbol is uploaded, this endpoint is called asking is to "process" the symbol file. 42 | This endpoint is called with an 3-parameter body, and expects a json response. 43 | The body will look like: 44 | { "symbol_id": { "debug_file": "", "debug_id": "" }, "symbol_upload_type": "BREAKPAD" } 45 | For the above values: 46 | - debug_file: The module_id of the previously submitted symbol file 47 | - debug_id: The build_id of the previously submitted symbol file 48 | - symbol_upload_type: The type of symbol (since symupload can upload more than one). 49 | CrashServer only accepts the BREAKPAD symbol type 50 | 51 | The response will look like: 52 | { "result": "" } 53 | Where must be one of three values: ["RESULT_UNSPECIFIED", "OK", "DUPLICATE_DATA"] 54 | - "OK" - The symbol storage was updated with the previously uploaded symbol 55 | - "DUPLICATE_DATA" - The symbol is identical to the previously upload symbol. No change to symbol storage. 56 | Breakpad docs 57 | - "RESULT_UNSPECIFIED" - I have no idea what this value is, or when to use this value. It's not referenced in 58 | the symupload, and is equivalent to "OK" when sent. 59 | """ 60 | import os 61 | 62 | from flask import Blueprint, request, url_for 63 | from loguru import logger 64 | 65 | from crashserver.server import db, helpers 66 | from crashserver.server.models import SymbolUploadV2, BuildMetadata 67 | from crashserver.utility.decorators import url_arg_required, api_key_required 68 | 69 | sym_upload_v2 = Blueprint("sym-upload-v2", __name__) 70 | 71 | 72 | @sym_upload_v2.route("/v1/symbols//:checkStatus") 73 | @api_key_required("symbol", "key", pass_project=False) 74 | def check_status(module_id, build_id): 75 | build = db.session.query(BuildMetadata).filter_by(build_id=build_id.strip(), module_id=module_id.strip()).first() 76 | 77 | # # This will return if the row does not exist... 78 | if build is None: 79 | return {"status": "STATUS_UNSPECIFIED"}, 200 80 | 81 | # ...and this will return based on symbol_exists on an existing row 82 | return {"status": "FOUND" if build.symbol else "MISSING"}, 200 83 | 84 | 85 | @sym_upload_v2.route("/v1/uploads:create", methods=["POST"]) 86 | @api_key_required("symbol", "key") 87 | def create(project): 88 | symbol_ref = SymbolUploadV2(project_id=project.id) 89 | db.session.add(symbol_ref) 90 | db.session.commit() 91 | 92 | res = { 93 | "uploadUrl": url_for("sym-upload-v2.upload_location", sym_id=symbol_ref.id, _external=True), 94 | "uploadKey": symbol_ref.id, 95 | } 96 | 97 | return res, 200 98 | 99 | 100 | @sym_upload_v2.route("/upload", methods=["PUT"]) 101 | @url_arg_required("sym_id") 102 | def upload_location(): 103 | new_symbol = db.session.query(SymbolUploadV2).get(request.args.get("sym_id")) 104 | new_symbol.store_file(request.data) 105 | db.session.commit() 106 | return "", 200 107 | 108 | 109 | @sym_upload_v2.route("/v1/uploads/:complete", methods=["POST"]) 110 | @api_key_required("symbol", "key") 111 | def is_upload_complete(project, upload_key): 112 | logger.info("Attempting to upload new symbol file") 113 | 114 | # TODO(james): Why does this not get sent on windows? Ignoring as it doesn't break core functionality 115 | # if request.json["symbol_upload_type"] != "BREAKPAD": 116 | # return {"error": "CrashServer only accepts breakpad debug symbols"}, 400 117 | 118 | # Get reference to the uploaded sym file 119 | symbol_ref = db.session.query(SymbolUploadV2).get(upload_key) 120 | 121 | # If a version already exists, compare hashes 122 | build = db.session.query(BuildMetadata).filter_by(build_id=symbol_ref.build_id, module_id=symbol_ref.module_id).first() 123 | 124 | # If symbol exists, and hashes match, then do nothing. 125 | if build and build.symbol and build.symbol.file_hash == symbol_ref.file_hash: 126 | return {"result": "DUPLICATE_DATA"}, 200 127 | 128 | # Save the file! 129 | file_data = symbol_ref.load_file() 130 | helpers.symbol_upload(db.session, project, file_data, symbol_ref.symbol_data) 131 | 132 | # Delete upload 133 | os.remove(symbol_ref.file_location) 134 | 135 | # Always delete intermediary table row. Only to track symupload v2 usages. 136 | db.session.delete(symbol_ref) 137 | db.session.commit() 138 | 139 | return {"result": "OK"}, 200 140 | -------------------------------------------------------------------------------- /crashserver/server/controllers/webapi.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import json 3 | import operator 4 | import os 5 | 6 | import natsort 7 | from flask import Blueprint, request, render_template, flash, redirect, abort, url_for 8 | from flask_babel import _ 9 | from flask_login import login_required 10 | from loguru import logger 11 | from sqlalchemy import func, text 12 | 13 | from crashserver.server import db 14 | from crashserver.server.models import Symbol, Project, ProjectType, Minidump, Attachment, Storage 15 | 16 | webapi = Blueprint("webapi", __name__) 17 | 18 | 19 | @webapi.route("/webapi/symbols/") 20 | def get_symbols(project_id): 21 | project = db.session.query(Project).get(project_id) 22 | proj_symbols = db.session.query(Symbol).filter_by(project_id=project_id).all() 23 | 24 | if len(proj_symbols) == 0: 25 | return {"html": render_template("symbols/symbol-list-no-syms.html")}, 200 26 | 27 | # Get counts for os symbols 28 | def sym_count(os: str): 29 | return db.session.query(Symbol).filter(Symbol.project_id == project_id).filter(func.lower(Symbol.os) == os.lower()).count() 30 | 31 | stats = { 32 | "sym_count": { 33 | "linux": sym_count("linux"), 34 | "mac": sym_count("mac"), 35 | "windows": sym_count("windows"), 36 | } 37 | } 38 | if project.project_type == ProjectType.VERSIONED: 39 | 40 | # Creates callable that returns 'app_version' from object when you get_attr(object) 41 | # sym_dict is in the format of {app_version: [Symbol objects of that version]} 42 | get_attr = operator.attrgetter("app_version") 43 | sorted_list = natsort.natsorted(proj_symbols, key=lambda x: get_attr(x), reverse=True) 44 | sym_dict = {version: sorted(list(group), key=operator.attrgetter("os")) for version, group in itertools.groupby(sorted_list, get_attr)} 45 | stats = { 46 | "sym_count": { 47 | "linux": sym_count("linux"), 48 | "mac": sym_count("mac"), 49 | "windows": sym_count("windows"), 50 | } 51 | } 52 | return { 53 | "html": render_template( 54 | "symbols/symbol-list-versioned.html", 55 | project=project, 56 | sym_dict=sym_dict, 57 | stats=stats, 58 | ) 59 | }, 200 60 | 61 | elif project.project_type == ProjectType.SIMPLE: 62 | return { 63 | "html": render_template( 64 | "symbols/symbol-list-simple.html", 65 | project=project, 66 | symbols=proj_symbols, 67 | stats=stats, 68 | ) 69 | }, 200 70 | 71 | 72 | @webapi.route("/webapi/symbols/count/") 73 | def get_symbols_count(project_id): 74 | proj_symbols = db.session.query(Symbol).filter_by(project_id=project_id).all() 75 | return {"count": len(proj_symbols)}, 200 76 | 77 | 78 | @webapi.route("/webapi/project/versions/") 79 | @login_required 80 | def get_project_versions(project_id): 81 | if project_id is None: 82 | return {"error": "project_id is required"}, 404 83 | 84 | project = db.session.query(Project.id).filter_by(id=project_id).first() 85 | if project is None: 86 | return {"error": "project_id is invalid"}, 404 87 | 88 | versions = db.session.query(Symbol.app_version).filter_by(project_id=project_id).distinct().all() 89 | versions = [v[0] for v in versions] 90 | return {"versions": versions}, 200 91 | 92 | 93 | @webapi.route("/webapi/project/rename/", methods=["POST"]) 94 | @login_required 95 | def rename_project(): 96 | project_id = request.form.get("project_id") 97 | new_name = request.form.get("project_name") 98 | res: Project = db.session.query(Project).filter_by(id=project_id).first() 99 | 100 | if res is None: 101 | flash(_("Unable to find Project ID?"), category="warning") 102 | logger.warning("Unable to rename project. Bad project ID: {}".format(project_id)) 103 | else: 104 | message = _("Project %(old)s renamed to %(new)s", old=res.project_name, new=new_name) 105 | res.project_name = new_name 106 | db.session.commit() 107 | 108 | logger.info(message) 109 | flash(message) 110 | 111 | return redirect(request.referrer) 112 | 113 | 114 | @webapi.route("/webapi/minidump/delete/", methods=["DELETE"]) 115 | @login_required 116 | def delete_minidump(dump_id): 117 | dump = db.session.query(Minidump).get(dump_id) 118 | if not dump: 119 | return {"error", "dump_id is invalid"}, 404 120 | 121 | dump.delete_minidump() 122 | db.session.delete(dump) 123 | db.session.commit() 124 | return "", 200 125 | 126 | 127 | @webapi.route("/webapi/stats/crash-per-day/") 128 | def crash_per_day(project_id): 129 | # Get crash per day data 130 | num_days = request.args.get("days", default=7, type=int) 131 | if num_days not in [7, 30]: 132 | num_days = 7 133 | 134 | if project_id == "all": 135 | project_id = None 136 | 137 | tz = request.cookies.get("timezone", os.environ.get("TZ")) 138 | with db.engine.connect() as conn: 139 | conn.execute(f"SET LOCAL timezone = '{tz}';") 140 | sql = text( 141 | f""" 142 | SELECT 143 | to_char(m.date_created::DATE, 'Dy') as day_name, 144 | to_char(m.date_created::DATE, 'MM-DD') as upload_date, 145 | COUNT(m.date_created) as num_dump 146 | FROM minidump m 147 | WHERE (m.project_id = :project_id OR :project_id is NULL) 148 | GROUP BY m.date_created::DATE 149 | ORDER BY to_char(m.date_created::DATE, 'YYYY-MM-DD') DESC 150 | LIMIT :num_days; 151 | """ 152 | ) 153 | res = conn.execute(sql, project_id=project_id, num_days=num_days) 154 | 155 | labels = [] 156 | counts = [] 157 | 158 | # Fill with actual data 159 | for data in res: 160 | labels.insert(0, [f"{data[1]}", f"({data[0]})"]) 161 | counts.insert(0, data[2]) 162 | 163 | # Fill with empty data if leftover spaces 164 | while len(labels) < num_days: 165 | labels.insert(0, [""]) 166 | counts.insert(0, 0) 167 | 168 | return json.dumps({"labels": labels, "counts": counts}, default=str) 169 | 170 | 171 | @webapi.route("/webapi/attachment/get-content/") 172 | def get_attachment_content(attach_id): 173 | attach = Attachment.query.get(attach_id) 174 | content = attach.file_content 175 | return {"file_content": content}, 200 if content is not None else 404 176 | 177 | 178 | @webapi.route("/webapi/storage/update/", methods=["POST"]) 179 | @login_required 180 | def update_storage_target(key): 181 | storage = db.session.query(Storage).get(key) 182 | 183 | # Attempting to save settings to a target that doesn't exist?? 184 | if not storage: 185 | abort(500) 186 | 187 | # Determine if the storage backend is being enabled or disabled 188 | form = {key: val for key, val in dict(request.form).items() if val} 189 | 190 | # If this is set to primary, disable all other primary 191 | primary_backend = True if form.pop("primary_backend", False) else False 192 | if primary_backend: 193 | old_prim = db.session.query(Storage).filter_by(is_primary=True).first() 194 | old_prim.is_primary = False 195 | storage.is_primary = True 196 | 197 | should_enable = True if form.pop("target_enabled", False) else False # Attempt to pop target_enabled. If it's not there, then disable. 198 | changed = not (should_enable == storage.is_enabled) 199 | 200 | old_config = storage.config.copy() 201 | old_config.update(form) 202 | 203 | # If it was changed, and should_enable is false, disable, commit, and notify. 204 | if changed and not should_enable: 205 | storage.is_enabled = False 206 | db.session.commit() 207 | flash(_("%(key)s has been disabled. No settings were changed.", key=key)) 208 | logger.info(f"{key} has been disabled. No settings were changed.") 209 | return redirect(url_for("views.settings")) 210 | 211 | # If we are here, then the state is already enabled, or newly enabled. Either way, update the settings. 212 | valid = storage.meta.validate_credentials(old_config) # First validate credentials 213 | 214 | if not valid: 215 | flash(_(f"Unable to validate credentials to {key}. Please try again.")) 216 | return redirect(url_for("views.settings")) 217 | 218 | # If we are here, the given credentials were valid 219 | storage.config = old_config 220 | storage.is_enabled = True 221 | db.session.commit() 222 | 223 | Storage.init_targets() 224 | 225 | flash(_("%(key)s settings have been updated.", key=key)) 226 | return redirect(url_for("views.settings")) 227 | -------------------------------------------------------------------------------- /crashserver/server/controllers/webviews.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | import uuid 4 | 5 | from flask import Blueprint, render_template, request, redirect, url_for, flash, abort 6 | from flask_babel import _ 7 | from flask_login import login_required, current_user 8 | 9 | from crashserver.config import settings as config 10 | from crashserver.server import db, helpers 11 | from crashserver.server.forms import CreateAppForm, UploadMinidumpForm, UpdateAccount, UploadSymbolForm 12 | from crashserver.server.models import Minidump, Project, ProjectType, User, Storage 13 | from crashserver.utility import misc 14 | 15 | views = Blueprint("views", __name__) 16 | 17 | 18 | @views.route("/") 19 | def home(): 20 | apps = Project.query.all() 21 | return render_template("app/home.html", apps=apps) 22 | 23 | 24 | @views.route("/settings", methods=["GET", "POST"]) 25 | @login_required 26 | def settings(): 27 | users = db.session.query(User).all() 28 | projects = db.session.query(Project).all() 29 | storage = db.session.query(Storage).order_by(Storage.key).all() 30 | 31 | form = UpdateAccount(current_user) 32 | if request.method == "POST" and form.validate(): 33 | current_user.set_password(form.new_pass.data) 34 | db.session.commit() 35 | flash(_("Password Updated")) 36 | else: 37 | misc.flash_form_errors(form) 38 | 39 | return render_template( 40 | "app/settings.html", 41 | account_form=form, 42 | users=users, 43 | projects=projects, 44 | settings=config, 45 | storage=storage, 46 | ) 47 | 48 | 49 | @views.route("/project/create", methods=["GET", "POST"]) 50 | @login_required 51 | def project_create(): 52 | form = CreateAppForm(request.form) 53 | 54 | # If the form is valid 55 | if request.method == "POST" and form.validate(): 56 | # Check if the name is taken 57 | existing = db.session.query(Project).filter_by(project_name=form.title.data).first() 58 | if existing is not None: 59 | flash(_("Project name %(name)s is taken.", name=form.title.data)) 60 | return redirect(url_for("views.project_create", form=form)) 61 | 62 | # Create the project 63 | # TODO(james): Ensure apikey doesn't exist? 64 | def random_key(): 65 | return str(uuid.UUID(bytes=os.urandom(16), version=4)).replace("-", "") 66 | 67 | new_project = Project(project_name=form.title.data) 68 | new_project.minidump_api_key = random_key() 69 | new_project.symbol_api_key = random_key() 70 | new_project.project_type = ProjectType.get_type_from_str(form.project_type.data) 71 | 72 | db.session.add(new_project) 73 | db.session.commit() 74 | 75 | flash(_("Project %(name)s was created.", name=form.title.data)) 76 | return redirect(url_for("views.home")) 77 | else: 78 | misc.flash_form_errors(form) 79 | 80 | return render_template("app/create.html", form=form) 81 | 82 | 83 | @views.route("/project/") 84 | def project_dashboard(id: str): 85 | proj = Project.query.filter_by(id=id).first() 86 | return render_template("app/dashboard.html", project=proj) 87 | 88 | 89 | @views.route("/crash-reports") 90 | def crash(): 91 | page = request.args.get("page", 1, type=int) 92 | res = db.session.query(Minidump, Project.project_name).filter(Minidump.project_id == Project.id).order_by(Minidump.date_created.desc()).paginate(page=page, per_page=10) 93 | return render_template("crash/crash.html", dumps=res) 94 | 95 | 96 | @views.route("/crash-reports/") 97 | def crash_detail(crash_id): 98 | minidump = db.session.query(Minidump).get(crash_id) 99 | 100 | if not minidump: 101 | abort(404) 102 | 103 | return render_template("crash/crash_detail.html", dump=minidump) 104 | 105 | 106 | @views.route("/symbols") 107 | def symbols(): 108 | projects = Project.query.with_entities(Project.id, Project.project_name).all() 109 | return render_template("symbols/symbols.html", projects=projects) 110 | 111 | 112 | @views.route("/upload-minidump", methods=["GET", "POST"]) 113 | def upload_minidump(): 114 | form = UploadMinidumpForm() 115 | 116 | if request.method == "POST" and form.validate_on_submit(): 117 | res = helpers.minidump_upload(db.session, form.project.data, {}, form.minidump.data.stream.read(), []) 118 | if res.status_code != 200: 119 | flash(res.json["error"], category="danger") 120 | return redirect(url_for("views.upload_minidump")) 121 | else: 122 | return redirect(url_for("views.crash_detail", crash_id=res.json["id"])) 123 | else: 124 | misc.flash_form_errors(form) 125 | 126 | projects = Project.query.with_entities(Project.id, Project.project_name).all() 127 | for p in projects: 128 | form.add_project_choice(str(p.id), p.project_name) 129 | return render_template("app/upload.html", form=form, projects=projects) 130 | 131 | 132 | @views.route("/upload-symbol", methods=["GET", "POST"]) 133 | @login_required 134 | def upload_symbol(): 135 | form = UploadSymbolForm() 136 | 137 | if request.method == "POST" and form.validate_on_submit(): 138 | project = Project.query.get(form.project.data) # Get project 139 | 140 | # Read first line of symbol file 141 | symbol_file_bytes = form.symbol.data.stream.read() 142 | with io.BytesIO(symbol_file_bytes) as f: 143 | first_line_str = f.readline().decode("utf-8") 144 | 145 | # Get relevant module info from first line of file 146 | symbol_data = misc.SymbolData.from_module_line(first_line_str) 147 | symbol_data.app_version = form.version.data if form.version.data else None 148 | 149 | res = helpers.symbol_upload(db.session, project, symbol_file_bytes, symbol_data) 150 | if res.status_code != 200: 151 | flash(res.json["error"], category="danger") 152 | else: 153 | flash(_(f"Symbol {symbol_data.module_id}:{symbol_data.os}:{symbol_data.build_id} received.")) 154 | else: 155 | misc.flash_form_errors(form) 156 | 157 | projects = Project.query.with_entities(Project.id, Project.project_name, Project.project_type).all() 158 | return render_template("symbols/symbol-upload.html", projects=projects, form=form) 159 | -------------------------------------------------------------------------------- /crashserver/server/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/crashserver/server/core/__init__.py -------------------------------------------------------------------------------- /crashserver/server/core/extensions.py: -------------------------------------------------------------------------------- 1 | import redis 2 | import rq 3 | from flask_babel import Babel 4 | from flask_debugtoolbar import DebugToolbarExtension 5 | from flask_limiter import Limiter 6 | from flask_limiter.util import get_remote_address 7 | from flask_login import LoginManager 8 | from flask_migrate import Migrate 9 | from flask_sqlalchemy import SQLAlchemy 10 | 11 | from crashserver.config import get_redis_url 12 | 13 | babel = Babel() 14 | debug_toolbar = DebugToolbarExtension() 15 | limiter = Limiter(key_func=get_remote_address) 16 | login = LoginManager() 17 | migrate = Migrate() 18 | db = SQLAlchemy() 19 | queue = rq.Queue("crashserver", connection=redis.Redis.from_url(get_redis_url())) 20 | -------------------------------------------------------------------------------- /crashserver/server/forms.py: -------------------------------------------------------------------------------- 1 | from flask_babel import lazy_gettext 2 | from flask_wtf import FlaskForm 3 | from flask_wtf.file import FileField, FileRequired, FileAllowed 4 | from wtforms import StringField, PasswordField, BooleanField, SelectField, ValidationError 5 | from wtforms.validators import DataRequired, Email, Length, EqualTo 6 | 7 | from crashserver.server.models import Project, ProjectType 8 | 9 | 10 | class LoginForm(FlaskForm): 11 | email = StringField(lazy_gettext("Email Address"), validators=[DataRequired(), Email()]) 12 | password = PasswordField(lazy_gettext("Password"), validators=[DataRequired(), Length(min=8, max=256)]) 13 | remember_me = BooleanField(lazy_gettext("Remember Me")) 14 | 15 | 16 | class UpdateAccount(FlaskForm): 17 | current_pass = PasswordField(lazy_gettext("Current Password"), validators=[DataRequired(), Length(min=8, max=256)]) 18 | new_pass = PasswordField( 19 | lazy_gettext("New Password"), 20 | validators=[ 21 | DataRequired(), 22 | EqualTo("new_pass_verify", message=lazy_gettext("Passwords must match")), 23 | Length(min=8, max=256), 24 | ], 25 | ) 26 | new_pass_verify = PasswordField(lazy_gettext("Verify Password"), validators=[DataRequired(), Length(min=8, max=256)]) 27 | 28 | def __init__(self, user, *args, **kwargs): 29 | super(UpdateAccount, self).__init__(*args, **kwargs) 30 | self.user = user 31 | 32 | def validate_current_pass(self, field): 33 | if not self.user.check_password(field.data): 34 | raise ValidationError(lazy_gettext("Current password incorrect")) 35 | 36 | 37 | class CreateAppForm(FlaskForm): 38 | title = StringField(lazy_gettext("Project Name"), validators=[DataRequired()]) 39 | project_type = SelectField( 40 | lazy_gettext("Project Type"), 41 | validators=[DataRequired()], 42 | choices=[("simple", lazy_gettext("Simple")), ("versioned", lazy_gettext("Versioned"))], 43 | ) 44 | 45 | 46 | class ProjectForm(FlaskForm): 47 | project = SelectField("Projects", coerce=str, validate_choice=False) 48 | 49 | def add_project_choice(self, value, title): 50 | data = (value, title) 51 | if not self.project.choices: 52 | self.project.choices = [data] 53 | else: 54 | self.project.choices.append(data) 55 | 56 | 57 | class UploadMinidumpForm(ProjectForm): 58 | minidump = FileField(lazy_gettext("Select Minidump file"), validators=[FileRequired(), FileAllowed(["dmp"], lazy_gettext("Minidump Files Only"))]) 59 | 60 | 61 | class UploadSymbolForm(ProjectForm): 62 | symbol = FileField(lazy_gettext("Select symbol file"), validators=[FileRequired(), FileAllowed(["sym"], lazy_gettext("Symbol Files Only"))]) 63 | version = SelectField("Version", validate_choice=False) 64 | 65 | @staticmethod 66 | def validate_version(form, field): 67 | project = Project.query.get(form.project.data) 68 | version = field.data.strip() 69 | 70 | if project.project_type == ProjectType.VERSIONED and not version: 71 | raise ValidationError("Versioned projects require a symbol version") 72 | -------------------------------------------------------------------------------- /crashserver/server/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | from .crash_upload import symbol_upload, minidump_upload 2 | -------------------------------------------------------------------------------- /crashserver/server/helpers/crash_upload.py: -------------------------------------------------------------------------------- 1 | """ 2 | symbol.py: Operations which coordinate transactions between 3 | the filesystem and the database on each api request 4 | """ 5 | import flask 6 | import magic 7 | from loguru import logger 8 | 9 | from crashserver.server.models import Symbol, BuildMetadata, Minidump, Annotation, Project, Attachment, ProjectType 10 | from crashserver.utility.misc import SymbolData 11 | 12 | 13 | def symbol_upload(session, project: Project, symbol_file: bytes, symbol_data: SymbolData): 14 | """ 15 | Store the symbol in the correct location, and track it in the database. 16 | 17 | While usually the data in `symbol_data` will be taken from `symbol_file` depending 18 | on the caller of this function (whether it be the sym-upload protocol, or the CrashServer 19 | web upload interface, the data might be from a different source. 20 | 21 | TODO(james): Is it worth separating this function out like this? The SymbolData struct 22 | will almost always be from the first line of the symbol file. 23 | 24 | :param session: The database session object 25 | :param project: The project to relate the symbol to 26 | :param symbol_file: The bytes to store in the file 27 | :param symbol_data: Metadata about the symfile param 28 | :return: The response to the client making this request 29 | """ 30 | # Check if a minidump was already uploaded with the current module_id and build_id 31 | build = ( 32 | session.query(BuildMetadata) 33 | .filter_by( 34 | project_id=project.id, 35 | build_id=symbol_data.build_id, 36 | module_id=symbol_data.module_id, 37 | ) 38 | .first() 39 | ) 40 | if build is None: 41 | # If we can't find the metadata for the symbol (which will usually be the case unless a minidump was uploaded 42 | # before the symbol file was uploaded), then create a new BuildMetadata, flush, and relate to symbol 43 | build = BuildMetadata( 44 | project_id=project.id, 45 | module_id=symbol_data.module_id, 46 | build_id=symbol_data.build_id, 47 | ) 48 | session.add(build) 49 | 50 | if build.symbol: 51 | session.rollback() 52 | logger.error("Symbol {} already uploaded. Subsequent upload rejected.", symbol_data.build_id) 53 | return flask.make_response({"error": "Symbol file already uploaded"}, 203) 54 | 55 | build.symbol = Symbol( 56 | project_id=project.id, 57 | os=symbol_data.os, 58 | arch=symbol_data.arch, 59 | app_version=symbol_data.app_version, 60 | ) 61 | build.symbol.store_file(symbol_file) 62 | logger.info( 63 | "Symbols received for {project_name} [{project_id}][{project_type}{sym_version}][{os}:{arch}]".format( 64 | project_name=project.project_name, 65 | project_id=str(project.id).split("-")[0], 66 | project_type=str(project.project_type).split(".")[-1], 67 | sym_version=(":" + symbol_data.app_version if project.project_type == ProjectType.VERSIONED else ""), 68 | os=symbol_data.os, 69 | arch=symbol_data.arch, 70 | ) 71 | ) 72 | 73 | # Send all minidump id's to task processor to for decoding 74 | to_process = build.unprocessed_dumps 75 | if to_process: 76 | logger.info("Attempting to reprocess {} unprocessed minidump", len(to_process)) 77 | for dump in to_process: 78 | dump.decode_task() 79 | session.commit() 80 | 81 | res = { 82 | "id": build.symbol.id, 83 | "os": build.symbol.os, 84 | "arch": build.symbol.arch, 85 | "build_id": build.build_id, 86 | "module_id": build.module_id, 87 | "date_created": build.symbol.date_created.isoformat(), 88 | } 89 | return flask.make_response(res, 200) 90 | 91 | 92 | def minidump_upload(session, project_id: str, annotations: dict, minidump_file: bytes, attachments): 93 | # Ensure file is not empty 94 | if len(minidump_file) == 0: 95 | logger.warning(f"Minidump rejected from {flask.request.remote_addr}. File is empty.") 96 | return flask.make_response({"error": "Bad Minidump"}, 400) 97 | 98 | # Verify file is actually a minidump based on magic number 99 | # Validate magic number 100 | magic_number = magic.from_buffer(minidump_file, mime=True) 101 | if magic_number != "application/x-dmp": 102 | logger.warning("Minidump rejected from {}. File detected as {}", flask.request.remote_addr, magic_number) 103 | return flask.make_response({"error": "Bad Minidump"}, 400) 104 | 105 | # Add minidump to database 106 | new_dump = Minidump(project_id=project_id) 107 | new_dump.upload_ip = flask.request.remote_addr 108 | new_dump.client_guid = annotations.pop("guid", None) 109 | new_dump.store_minidump(minidump_file) 110 | session.add(new_dump) 111 | session.flush() 112 | 113 | # Store attachments 114 | for attach in attachments: 115 | new_attach = Attachment(project_id=project_id, minidump_id=new_dump.id, original_filename=attach.filename) 116 | new_attach.store_file(attach.stream.read()) 117 | session.add(new_attach) 118 | 119 | # Store annotations 120 | if annotations: 121 | annotations.pop("api_key", None) # Remove API key from being added as annotation 122 | for key, value in annotations.items(): 123 | new_dump.annotations.append(Annotation(key=key, value=value)) 124 | 125 | session.commit() 126 | new_dump.decode_task() 127 | logger.info(f"Minidump received [{new_dump.id}] for project [{project_id}] - [{flask.request.remote_addr}] - [{len(attachments)} attachments]") 128 | 129 | return flask.make_response({"status": "success", "id": str(new_dump.id)}, 200) 130 | -------------------------------------------------------------------------------- /crashserver/server/jobs.py: -------------------------------------------------------------------------------- 1 | import json 2 | import subprocess 3 | from pathlib import Path 4 | import os 5 | 6 | import requests 7 | from loguru import logger 8 | 9 | from crashserver.server.core.extensions import db 10 | from crashserver.server.models import Minidump, BuildMetadata, Storage 11 | from crashserver.utility import processor 12 | 13 | 14 | class LocalSymCache: 15 | def __init__(self, module_id: str, build_id: str): 16 | self.module_id = module_id 17 | self.build_id = build_id 18 | 19 | @property 20 | def url_path(self) -> str: 21 | return "{0}/{1}/{0}".format(self.module_id, self.build_id) 22 | 23 | def does_sym_exist(self) -> bool: 24 | symbol = Path("/tmp/crash_decode/cache", self.url_path) 25 | return symbol.exists() 26 | 27 | def store_and_convert_symbol(self, file_content: bytes): 28 | dump_syms = str(Path("res/bin/linux/dump_syms").absolute()) 29 | 30 | # Store PDB 31 | pdb_location = Path("/tmp/crash_decode/downloads", self.url_path) 32 | pdb_location.parent.mkdir(exist_ok=True, parents=True) 33 | with open(pdb_location, "wb") as f: 34 | f.write(file_content) 35 | 36 | # Convert pdb to sym and store to file 37 | filename = self.module_id.split(".")[0] + ".sym" 38 | symfile = Path("/tmp/crash_decode/cache", self.module_id, self.build_id, filename) 39 | symfile.parent.mkdir(parents=True, exist_ok=True) 40 | with open(symfile, "wb") as f: 41 | # Write symbol data 42 | subprocess.run([dump_syms, pdb_location], stdout=f) 43 | 44 | # Delete original pdb 45 | os.remove(pdb_location.absolute()) 46 | 47 | 48 | def download_windows_symbol(module_id: str, build_id: str) -> (bool, bool): 49 | """Attempts to download and convert symbols from Microsoft Symbol Server. 50 | Returns tuple: 51 | - 1st tuple: True if successful download, otherwise false 52 | - 2nd tuple: True if already downloaded, otherwise false 53 | """ 54 | cached_sym = LocalSymCache(module_id, build_id) 55 | 56 | if cached_sym.does_sym_exist(): 57 | return False, True # Not downloaded, already exists 58 | 59 | logger.debug("LocalSymCache Miss. Attempting to download {}:{}".format(module_id, build_id)) 60 | res = requests.get("https://msdl.microsoft.com/download/symbols/" + cached_sym.url_path) 61 | if res.status_code != 200: 62 | logger.debug("Symbol not available on Windows Symbol Server => {}:{}".format(module_id, build_id)) 63 | return False, False 64 | 65 | cached_sym.store_and_convert_symbol(res.content) 66 | return True, False 67 | 68 | 69 | def decode_minidump(crash_id): 70 | # Prepare decode environment 71 | stackwalker = str(Path("res/bin/linux/stackwalker").absolute()) 72 | cache_dir = Path("/tmp/crash_decode/cache") 73 | current_dump = Path("/tmp/crash_decode/current_dump.dmp") 74 | 75 | cache_dir.mkdir(parents=True, exist_ok=True) 76 | current_dump.parent.mkdir(parents=True, exist_ok=True) 77 | current_dump.unlink(missing_ok=True) 78 | 79 | # Symbolicate without symbols to get metadata 80 | # TODO: Proper error handling for if executable fails 81 | minidump = db.session.query(Minidump).get(crash_id) 82 | if not minidump: 83 | logger.error(f"Minidump [{crash_id}] - Unable to decode. No database entry found.") 84 | return 85 | 86 | # Request symbol and minidump from Storage 87 | with open(current_dump, "wb") as dump_out: 88 | try: 89 | dump_out.write(Storage.retrieve(minidump.file_location).read()) 90 | except FileNotFoundError: 91 | logger.error(f"Minidump [{minidump.id}] was not found. Cancelling decode process.") 92 | return 93 | 94 | machine = subprocess.run([stackwalker, current_dump], capture_output=True) 95 | json_stack = json.loads(machine.stdout.decode("utf-8")) 96 | crash_data = processor.ProcessedCrash.generate(json_stack) 97 | 98 | # Check if a build_metadata already exists. (Previous minidump from same build, or symbol already uploaded) 99 | minidump.build = db.session.query(BuildMetadata).filter(BuildMetadata.build_id == crash_data.main_module.debug_id).first() 100 | 101 | # The symbol file needed to decode this minidump does not exist. 102 | # Make a record in the CompileMetadata table with {build,module}_id. There will be a 103 | # relationship from that metadata to the minidump 104 | if minidump.build is None: 105 | minidump.build = BuildMetadata( 106 | project_id=minidump.project_id, 107 | module_id=crash_data.main_module.debug_file, 108 | build_id=crash_data.main_module.debug_id, 109 | ) 110 | db.session.flush() 111 | 112 | # No symbols? Notify and return 113 | if not minidump.build.symbol: 114 | logger.info(f"Minidump [{crash_id}] - Symbol [{crash_data.main_module.debug_file}:{crash_data.main_module.debug_id}] does not exist. Partial stacktrace stored.") 115 | minidump.stacktrace = json_stack 116 | minidump.symbolicated = False 117 | minidump.decode_task_complete = True 118 | db.session.commit() 119 | return 120 | 121 | # If we get here, then the symbol exists. Get it from the storage module. 122 | sym_path = Path(cache_dir, minidump.build.symbol.file_location) 123 | sym_path.parent.mkdir(parents=True, exist_ok=True) 124 | with open(sym_path, "wb") as out_sym: 125 | out_sym.write(Storage.retrieve(minidump.build.symbol.file_location_stored).read()) 126 | 127 | # If windows, attempt to download all possible windows symbols before decoding 128 | # TODO(james): This is good as a prototype, but should be in a separate HTTP symbol supplier module/class 129 | if minidump.build.symbol.os == "windows": 130 | logger.debug(f"Minidump [{crash_id}] - Attempting Windows Symbol Server download") 131 | num_downloaded, num_existing = 0, 0 132 | for module in crash_data.modules_no_symbols: 133 | wasDownloaded, alreadyDownloaded = download_windows_symbol(module.debug_file, module.debug_id) 134 | 135 | if wasDownloaded: 136 | num_downloaded += 1 137 | if alreadyDownloaded: 138 | num_existing += 1 139 | 140 | logger.info(f"Minidump [{crash_id}] - Windows Symbols Obtained - [{num_downloaded}] Downloaded - [{num_existing}] Preexisting") 141 | 142 | json_stackwalk = subprocess.run([stackwalker, current_dump, cache_dir], capture_output=True) 143 | minidump.stacktrace = json.loads(json_stackwalk.stdout.decode("utf-8")) 144 | minidump.symbolicated = True 145 | minidump.decode_task_complete = True 146 | db.session.commit() 147 | logger.info(f"Minidump [{crash_id}] - Sucessfully decoded.", minidump.id) 148 | -------------------------------------------------------------------------------- /crashserver/server/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .annotation import Annotation 2 | from .attachments import Attachment 3 | from .build_metadata import BuildMetadata 4 | from .minidump import Minidump 5 | from .project import Project, ProjectType 6 | from .storage import Storage 7 | from .symbol import Symbol 8 | from .symbol_upload import SymbolUploadV2 9 | from .user import User 10 | 11 | __all__ = [ 12 | "Annotation", 13 | "BuildMetadata", 14 | "Minidump", 15 | "Project", 16 | "ProjectType", 17 | "Storage", 18 | "Symbol", 19 | "SymbolUploadV2", 20 | "User", 21 | "Attachment", 22 | ] 23 | -------------------------------------------------------------------------------- /crashserver/server/models/annotation.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.dialects.postgresql import UUID 2 | from sqlalchemy.sql import text 3 | 4 | from crashserver.server import db 5 | 6 | 7 | class Annotation(db.Model): 8 | """ 9 | A crashpad_handler may be configured to upload an arbitrary number of annotations alongside 10 | the minidump itself. These annotations must be stored in a separate table, and related to the 11 | minidump which they were uploaded by. 12 | id: Generated GUID for this table 13 | minidump_id: Foreign key to minidump.id 14 | key: The annotation key 15 | value: The annotation value 16 | """ 17 | 18 | __tablename__ = "annotation" 19 | id = db.Column(UUID(as_uuid=True), primary_key=True, server_default=text("gen_random_uuid()")) 20 | minidump_id = db.Column(UUID(as_uuid=True), db.ForeignKey("minidump.id"), nullable=False) 21 | key = db.Column(db.Text(), nullable=False) 22 | value = db.Column(db.Text(), nullable=False) 23 | 24 | # Relationships 25 | minidump = db.relationship("Minidump", back_populates="annotations") 26 | -------------------------------------------------------------------------------- /crashserver/server/models/attachments.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from pathlib import Path 3 | 4 | import magic 5 | from sqlalchemy.dialects.postgresql import UUID 6 | from sqlalchemy.sql import func, text 7 | 8 | from crashserver.server import db 9 | from .storage import Storage 10 | 11 | 12 | class Attachment(db.Model): 13 | """ 14 | id: Generated GUID for this table 15 | project_id: The project which this attachment relates to 16 | minidump_id: The minidump which this attachment was uploaded with 17 | date_created: The timestamp of when the attachment was uploaded 18 | filename: The name of the attachment on disk 19 | mime_type: The uploaded file mime_type 20 | file_size_bytes: The size of the file 21 | """ 22 | 23 | __tablename__ = "attachments" 24 | id = db.Column(UUID(as_uuid=True), primary_key=True, server_default=text("gen_random_uuid()")) 25 | project_id = db.Column(UUID(as_uuid=True), db.ForeignKey("project.id"), nullable=False) 26 | minidump_id = db.Column(UUID(as_uuid=True), db.ForeignKey("minidump.id"), nullable=False) 27 | date_created = db.Column(db.DateTime(timezone=True), server_default=func.now()) 28 | mime_type = db.Column(db.Text(), nullable=False) 29 | file_size_bytes = db.Column(db.Integer(), nullable=False) 30 | filename = db.Column(db.Text(), nullable=False) 31 | original_filename = db.Column(db.Text(), nullable=False) 32 | 33 | # Relationships 34 | project = db.relationship("Project") 35 | minidump = db.relationship("Minidump", back_populates="attachments") 36 | 37 | @property 38 | def file_location(self) -> Path: 39 | """File location with prefix as stored on S3""" 40 | return Path("attachments", str(self.project_id), self.filename) 41 | 42 | def store_file(self, file_content: bytes): 43 | # Determine storage location 44 | dump_id_part = str(self.minidump_id).split("-")[0] 45 | filename = "attachment-%s-%s" % (dump_id_part, str(uuid.uuid4().hex)[:8]) 46 | attach_loc = Path("attachments", str(self.project_id), filename) 47 | 48 | Storage.create(attach_loc, file_content) # Store file 49 | 50 | self.mime_type = magic.from_buffer(file_content, mime=True) # Determine mime-type 51 | self.filename = str(filename) 52 | self.file_size_bytes = len(file_content) 53 | 54 | def delete_file(self): 55 | Storage.delete(self.file_location) 56 | 57 | @property 58 | def file_content(self): 59 | file = Storage.retrieve(self.file_location) 60 | if not file: 61 | return None 62 | res = file.read().decode("utf-8", "replace") 63 | file.close() 64 | return res 65 | -------------------------------------------------------------------------------- /crashserver/server/models/build_metadata.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.dialects.postgresql import UUID 2 | from sqlalchemy.sql import text 3 | 4 | from crashserver.server import db 5 | 6 | 7 | class BuildMetadata(db.Model): 8 | """ 9 | Table to story the common elements between symbols and minidump files. The `symbol_exists` row is added 10 | for convenience in the the decode_minidump task. 11 | 12 | This data is stored separately in the database in-case we receive a minidump file, but have not received 13 | symbol files to decode that minidump. Even if we can't decode the minidump, we still want a record that we are 14 | aware of the existence of a given module and build id combination. 15 | """ 16 | 17 | __tablename__ = "build_metadata" 18 | id = db.Column(UUID(as_uuid=True), primary_key=True, server_default=text("gen_random_uuid()")) 19 | project_id = db.Column(UUID(as_uuid=True), db.ForeignKey("project.id"), nullable=False) 20 | module_id = db.Column(db.Text(), nullable=False) 21 | build_id = db.Column(db.Text(), nullable=False) 22 | 23 | # Relationships 24 | symbol = db.relationship("Symbol", uselist=False, back_populates="build") 25 | unprocessed_dumps = db.relationship( 26 | "Minidump", 27 | primaryjoin="and_(Minidump.build_metadata_id==BuildMetadata.id, Minidump.symbolicated=='false')", 28 | ) 29 | -------------------------------------------------------------------------------- /crashserver/server/models/minidump.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from functools import cached_property 3 | from pathlib import Path 4 | 5 | import redis 6 | import rq 7 | from sqlalchemy.dialects.postgresql import UUID, JSONB, INET 8 | from sqlalchemy.sql import func, text, expression 9 | 10 | from crashserver import config 11 | from crashserver.server import db, queue 12 | from crashserver.utility import processor 13 | from .storage import Storage 14 | 15 | 16 | class Minidump(db.Model): 17 | """ 18 | Each minidump uploaded will get a file reference. The file won't always exist on system, 19 | but any data to regenerate the UI view of the minidump on /crash-report endpoints 20 | 21 | id: Generated GUID for this table 22 | project_id: The project which this minidump relates to 23 | date_created: The timestamp of when the minidump was uploaded 24 | filename: The filename of the guid stored in the MINIDUMP_STORE directory 25 | client_guid: The guid parameter passed in from the post parameters. Optional. 26 | stacktrace: Stacktrace decoded with `./stackwalker []` 27 | """ 28 | 29 | __tablename__ = "minidump" 30 | id = db.Column(UUID(as_uuid=True), primary_key=True, server_default=text("gen_random_uuid()")) 31 | project_id = db.Column(UUID(as_uuid=True), db.ForeignKey("project.id"), nullable=False) 32 | build_metadata_id = db.Column( 33 | UUID(as_uuid=True), 34 | db.ForeignKey("build_metadata.id"), 35 | nullable=True, 36 | default=None, 37 | ) 38 | date_created = db.Column(db.DateTime(timezone=True), server_default=func.now()) 39 | symbolicated = db.Column(db.Boolean(), default=expression.false()) 40 | client_guid = db.Column(UUID(as_uuid=True), nullable=True) 41 | upload_ip = db.Column(INET(), nullable=True, server_default=None) 42 | filename = db.Column(db.Text(), nullable=False) 43 | stacktrace = db.Column(JSONB, nullable=True) 44 | decode_task_id = db.Column(db.String(36)) 45 | decode_task_complete = db.Column(db.Boolean()) 46 | 47 | # Relationships 48 | project = db.relationship("Project") 49 | build = db.relationship("BuildMetadata", back_populates="unprocessed_dumps") 50 | annotations = db.relationship("Annotation") 51 | symbol = db.relationship( 52 | "Symbol", 53 | primaryjoin="Minidump.build_metadata_id == BuildMetadata.id", 54 | secondary="join(BuildMetadata, Symbol, BuildMetadata.id == Symbol.build_metadata_id)", 55 | viewonly=True, 56 | uselist=False, 57 | ) 58 | attachments = db.relationship("Attachment") 59 | 60 | @property 61 | def file_location(self) -> Path: 62 | return Path("minidump", str(self.project_id), self.filename) 63 | 64 | def store_minidump(self, file_contents: bytes): 65 | filename = "minidump-%s.dmp" % str(uuid.uuid4().hex) 66 | 67 | dump_file = Path("minidump", str(self.project_id), filename) 68 | Storage.create(dump_file, file_contents) 69 | 70 | self.filename = filename 71 | 72 | def delete_minidump(self): 73 | from crashserver.server.models import Annotation, Attachment 74 | 75 | # Get all annotations 76 | annotations = db.session.query(Annotation).filter_by(minidump_id=self.id).all() 77 | attachments = db.session.query(Attachment).filter_by(minidump_id=self.id).all() 78 | [db.session.delete(a) for a in annotations] 79 | for a in attachments: 80 | a.delete_file() 81 | db.session.delete(a) 82 | 83 | Storage.delete(self.file_location) 84 | 85 | def decode_task(self, *args, **kwargs): 86 | rq_job = queue.enqueue("crashserver.server.jobs." + "decode_minidump", self.id, *args, **kwargs) 87 | self.decode_task_id = rq_job.get_id() 88 | self.decode_task_complete = False 89 | return rq_job 90 | 91 | def get_decode_job(self): 92 | try: 93 | rq_job = rq.job.Job.fetch(self.id, connection=config.get_redis_url()) 94 | except (redis.exceptions.RedisError, rq.exceptions.NoSuchJobError): 95 | return None 96 | return rq_job 97 | 98 | @cached_property 99 | def json(self): 100 | return processor.ProcessedCrash.generate(self.stacktrace) 101 | 102 | def symbols_exist(self): 103 | return self.symbol is not None 104 | 105 | def currently_processing(self): 106 | return self.task is not None 107 | -------------------------------------------------------------------------------- /crashserver/server/models/project.py: -------------------------------------------------------------------------------- 1 | import enum 2 | import pathlib 3 | 4 | from sqlalchemy.dialects.postgresql import UUID 5 | from sqlalchemy.sql import func, text 6 | 7 | from crashserver.server import db 8 | from .minidump import Minidump 9 | from .symbol import Symbol 10 | 11 | 12 | class ProjectType(enum.Enum): 13 | SIMPLE = "Simple" 14 | VERSIONED = "Versioned" 15 | 16 | @staticmethod 17 | def get_type_from_str(ptype): 18 | if ptype == "simple": 19 | return ProjectType.SIMPLE 20 | if ptype == "versioned": 21 | return ProjectType.VERSIONED 22 | return None 23 | 24 | def __str__(self): 25 | return str(self.name) 26 | 27 | 28 | class Project(db.Model): 29 | """ 30 | Crash Server is capable of storing symbols for, and decoding minidumps for multiple projects. 31 | 32 | id: Generated GUID for this table 33 | date_created: The timestamp of when the minidump was uploaded 34 | project_name: User-friendly interface name of the project 35 | api_key: An api key to be used when uploading minidumps 36 | """ 37 | 38 | __tablename__ = "project" 39 | id = db.Column(UUID(as_uuid=True), primary_key=True, server_default=text("gen_random_uuid()")) 40 | date_created = db.Column(db.DateTime(timezone=True), server_default=func.now()) 41 | project_name = db.Column(db.Text(), nullable=False) 42 | project_type = db.Column(db.Enum(ProjectType), nullable=False) 43 | minidump_api_key = db.Column(db.String(length=32), nullable=False) 44 | symbol_api_key = db.Column(db.String(length=32), nullable=False) 45 | 46 | # Relationships 47 | minidump = db.relationship("Minidump", viewonly=True) 48 | symbol = db.relationship("Symbol") 49 | unprocessed_dumps = db.relationship("Minidump", primaryjoin="and_(Minidump.project_id==Project.id, Minidump.symbolicated=='false')", viewonly=True) 50 | 51 | @property 52 | def symbol_location(self): 53 | return pathlib.Path("symbol", str(self.id)) 54 | 55 | @property 56 | def minidump_location(self): 57 | return pathlib.Path("minidump", str(self.id)) 58 | 59 | @property 60 | def total_minidump_size(self): 61 | """:return: Size of this projects minidump location in bytes""" 62 | # TODO: Store minidump size, and properly return it 63 | return 0 # sysinfo.get_directory_size(self.minidump_location) 64 | 65 | @property 66 | def total_symbol_size(self): 67 | """:return: Size of this projects symbol location in bytes""" 68 | return db.session.query(func.sum(Symbol.file_size_bytes)).scalar() 69 | 70 | @property 71 | def symbol_count(self): 72 | return db.session.query(func.count(Symbol.id)).filter_by(project_id=self.id).scalar() 73 | 74 | @property 75 | def minidump_count(self): 76 | return db.session.query(func.count(Minidump.id)).filter_by(project_id=self.id).scalar() 77 | 78 | @property 79 | def unprocessed_count(self): 80 | return db.session.query(func.count(Minidump.id)).filter_by(symbolicated=False, project_id=self.id).scalar() 81 | -------------------------------------------------------------------------------- /crashserver/server/models/storage.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import IO, Optional 3 | from functools import cache 4 | 5 | from loguru import logger 6 | from sqlalchemy.dialects.postgresql import JSONB 7 | 8 | import crashserver.server.storage.modules 9 | from crashserver.server import db 10 | from crashserver.server.storage import loader as storage_loader 11 | from crashserver.server.storage import storage_factory 12 | from crashserver.server.storage.backend import StorageBackend 13 | 14 | STORAGE_INSTANCES: dict[str, StorageBackend] = {} 15 | PRIMARY_STORAGE = "" 16 | 17 | 18 | class Storage(db.Model): 19 | """ 20 | Storage: Configuration data for the storage module. 21 | 22 | key: The filename where the module is stored under `crashserver.server.storage.modules.*` 23 | is_enabled: If the module is current active 24 | config: A json of config information for that module 25 | """ 26 | 27 | __tablename__ = "storage" 28 | key = db.Column(db.Text(), primary_key=True) 29 | is_enabled = db.Column(db.Boolean(), nullable=False, default=False) 30 | is_primary = db.Column(db.Boolean(), nullable=False, default=False) 31 | config = db.Column(JSONB, nullable=True) 32 | 33 | @staticmethod 34 | def load_storage_modules(): 35 | storage_loader.load_plugins(crashserver.server.storage.modules) 36 | 37 | @staticmethod 38 | def register_targets(): 39 | # Register internal targets 40 | storage_loader.load_plugins(crashserver.server.storage.modules) 41 | 42 | # Ensure all methods exist within database 43 | new_modules = [] 44 | current_targets = [key[0] for key in db.session.query(Storage.key)] 45 | 46 | modules = storage_factory.get_storage_methods() 47 | for key, storage in modules.items(): 48 | meta = storage_factory.get_metadata(key) 49 | 50 | if key not in current_targets: 51 | # If the target does not exist, get the default config, and insert that storage target into the database 52 | new_modules.append( 53 | Storage( 54 | key=key, 55 | is_enabled=meta.default_enabled(), 56 | is_primary=meta.default_primary(), 57 | config=meta.default_config(), 58 | ) 59 | ) 60 | else: 61 | # If the target does exist, compare config dicts, and add a blank config for each any new possible config keys 62 | existing_module = db.session.query(Storage).get(key) 63 | new_cfg = meta.default_config() 64 | new_cfg.update(existing_module.config) 65 | existing_module.config = new_cfg 66 | 67 | # Store new modules to database and log action 68 | if new_modules: 69 | [db.session.add(module) for module in new_modules] 70 | db.session.commit() 71 | logger.info(f"[STORAGE] {len(new_modules)} new storage targets created") 72 | else: 73 | logger.info(f"[STORAGE] No new storage targets created") 74 | 75 | # Delete rows fow deleted modules 76 | for target in current_targets: 77 | if target not in modules: 78 | ref = db.session.query(Storage).get(target) 79 | db.session.delete(ref) 80 | db.session.commit() 81 | logger.info(f"Removed target {target} because the storage module has been deleted") 82 | 83 | @staticmethod 84 | def init_targets(): 85 | global PRIMARY_STORAGE 86 | PRIMARY_STORAGE = db.session.query(Storage.key).filter_by(is_primary=True).first()[0] 87 | active_targets: [Storage] = db.session.query(Storage).filter_by(is_enabled=True).all() 88 | for target in sorted(active_targets, key=lambda x: x.key): 89 | STORAGE_INSTANCES[target.key] = storage_factory.get_storage_method(target.key)(target.config) 90 | STORAGE_INSTANCES[target.key].init() 91 | 92 | @property 93 | def meta(self): 94 | return storage_factory.get_metadata(self.key) 95 | 96 | @staticmethod 97 | def create(path: Path, file_contents: bytes, backend: str = None): 98 | if backend: 99 | STORAGE_INSTANCES[backend].create(path, file_contents) 100 | return 101 | 102 | # # Attempt primary backend first 103 | success = STORAGE_INSTANCES[PRIMARY_STORAGE].create(path, file_contents) 104 | if success: 105 | return 106 | 107 | success_backends = [] 108 | for key, instance in STORAGE_INSTANCES.items(): 109 | if instance.create(path, file_contents): 110 | success_backends.append(key) 111 | 112 | # If we reach here, saving was unsuccessful. 113 | logger.error(f"Failed [{PRIMARY_STORAGE}] storage of file [{path}]. Successfully stored in {success_backends}.") 114 | 115 | @staticmethod 116 | def retrieve(path: Path) -> Optional[IO]: 117 | for key, instance in STORAGE_INSTANCES.items(): 118 | file = instance.read(path) 119 | if file is not None: 120 | return file 121 | raise FileNotFoundError 122 | 123 | @staticmethod 124 | def retrieve_from_backend(path: Path, key: str) -> Optional[IO]: 125 | file = STORAGE_INSTANCES[key].read(path) 126 | if file is not None: 127 | return file 128 | raise FileNotFoundError 129 | 130 | @staticmethod 131 | def delete(path: Path): 132 | for key, instance in STORAGE_INSTANCES.items(): 133 | instance.delete(path) 134 | -------------------------------------------------------------------------------- /crashserver/server/models/symbol.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | from pathlib import Path 3 | 4 | from sqlalchemy.dialects.postgresql import UUID 5 | from sqlalchemy.sql import func, text 6 | 7 | from crashserver.server import db 8 | from .storage import Storage 9 | 10 | 11 | class Symbol(db.Model): 12 | """ 13 | id: Generated GUID for this table 14 | project_id: The project which this minidump relates to 15 | date_created: The timestamp of when the minidump was uploaded 16 | file_location: The location of the minidump file, with respect to the root storage location 17 | """ 18 | 19 | __tablename__ = "symbol" 20 | id = db.Column(UUID(as_uuid=True), primary_key=True, server_default=text("gen_random_uuid()")) 21 | project_id = db.Column(UUID(as_uuid=True), db.ForeignKey("project.id"), nullable=False) 22 | build_metadata_id = db.Column(UUID(as_uuid=True), db.ForeignKey("build_metadata.id"), nullable=False) 23 | date_created = db.Column(db.DateTime(timezone=True), server_default=func.now()) 24 | app_version = db.Column(db.Text(), nullable=True) 25 | os = db.Column(db.Text(), nullable=False) 26 | arch = db.Column(db.Text(), nullable=False) 27 | file_location = db.Column(db.Text(), nullable=False) 28 | file_size_bytes = db.Column(db.Integer(), nullable=False) 29 | file_hash = db.Column(db.String(length=64), nullable=False) 30 | 31 | # Relationships 32 | project = db.relationship("Project", back_populates="symbol") 33 | build = db.relationship("BuildMetadata") 34 | 35 | @property 36 | def file_location_stored(self) -> Path: 37 | return Path("symbol", str(self.project_id), self.file_location) 38 | 39 | def store_file(self, file_content: bytes): 40 | filesystem_module_id = self.build.module_id.split(".")[0] 41 | dir_location = Path(self.build.module_id, self.build.build_id, filesystem_module_id + ".sym") 42 | 43 | sym_loc = Path("symbol", str(self.project_id), dir_location) 44 | Storage.create(sym_loc, file_content) 45 | 46 | self.file_size_bytes = len(file_content) 47 | self.file_location = str(dir_location) 48 | self.file_hash = str(hashlib.blake2s(file_content).hexdigest()) 49 | -------------------------------------------------------------------------------- /crashserver/server/models/symbol_upload.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | from pathlib import Path 3 | 4 | from sqlalchemy.dialects.postgresql import UUID 5 | from sqlalchemy.sql import func, text 6 | 7 | from crashserver.server import db 8 | from crashserver.utility.misc import SymbolData 9 | 10 | 11 | class SymbolUploadV2(db.Model): 12 | """ 13 | Track upload_locations for the `sym-upload-v2` protocol. 14 | 15 | While `sym-upload-v1` is a more direct uploading protocol, `sym-upload-v2` defines 16 | additional endpoints to use to check the status of a symbol before/after it's been 17 | uploaded. Steps are explained in the header of `sym_upload_v2.py` 18 | """ 19 | 20 | __tablename__ = "sym_upload_tracker" 21 | id = db.Column(UUID(as_uuid=True), primary_key=True, server_default=text("gen_random_uuid()")) 22 | project_id = db.Column(UUID(as_uuid=True), db.ForeignKey("project.id"), nullable=False) 23 | date_created = db.Column(db.DateTime(timezone=True), server_default=func.now()) 24 | module_id = db.Column(db.Text(), nullable=True) 25 | build_id = db.Column(db.Text(), nullable=True) 26 | arch = db.Column(db.Text(), nullable=True) 27 | os = db.Column(db.Text(), nullable=True) 28 | file_hash = db.Column(db.String(length=64)) 29 | 30 | # Relationships 31 | project = db.relationship("Project") 32 | 33 | @property 34 | def file_location(self): 35 | return Path("sym_upload_v2", f"{self.id}.sym") 36 | 37 | @property 38 | def symbol_data(self): 39 | return SymbolData(module_id=self.module_id, build_id=self.build_id, arch=self.arch, os=self.os) 40 | 41 | def store_file(self, file_content: bytes): 42 | # Ensure very first line starts with word "MODULE" 43 | file_content = file_content[file_content.find("MODULE".encode()) :] 44 | 45 | first_line = file_content[: file_content.find("\n".encode())].decode("utf-8") 46 | symbol_data = SymbolData.from_module_line(first_line) 47 | self.build_id = symbol_data.build_id 48 | self.module_id = symbol_data.module_id 49 | self.arch = symbol_data.arch 50 | self.os = symbol_data.os 51 | 52 | self.file_location.parent.mkdir(parents=True, exist_ok=True) 53 | with open(self.file_location.absolute(), "wb") as f: 54 | f.write(file_content) 55 | 56 | self.file_hash = str(hashlib.blake2s(file_content).hexdigest()) 57 | 58 | def load_file(self) -> bytes: 59 | with open(self.file_location.absolute(), "rb") as f: 60 | file_content = f.read() 61 | return file_content 62 | -------------------------------------------------------------------------------- /crashserver/server/models/user.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | from flask_login import UserMixin 4 | from sqlalchemy.dialects.postgresql import UUID 5 | from sqlalchemy.sql import func, text 6 | from werkzeug.security import generate_password_hash, check_password_hash 7 | 8 | from crashserver.server import db 9 | 10 | 11 | class User(db.Model, UserMixin): 12 | """ 13 | Crash Server keeps track of user accounts ot determine who has has permission to administrate Crash Server. 14 | There will be zero permissions available. 15 | - An anonymous user can upload minidumps, view symbols, and view the crash dashboard for each application. 16 | - An authenticated user can access api-keys, delete symbols, and manage any application settings. 17 | """ 18 | 19 | __tablename__ = "users" 20 | id = db.Column( 21 | UUID(as_uuid=True), 22 | primary_key=True, 23 | server_default=text("gen_random_uuid()"), 24 | default=uuid.uuid4, 25 | ) 26 | date_created = db.Column(db.DateTime(timezone=True), server_default=func.now()) 27 | email = db.Column(db.String(254), nullable=False) 28 | password = db.Column(db.String(200), nullable=False) 29 | 30 | def __init__(self, email): 31 | self.email = email 32 | 33 | def set_password(self, password): 34 | self.password = generate_password_hash(password, method="pbkdf2:sha512:310000") 35 | 36 | def check_password(self, password): 37 | return check_password_hash(self.password, password) 38 | -------------------------------------------------------------------------------- /crashserver/server/storage/__init__.py: -------------------------------------------------------------------------------- 1 | from .storage_factory import register, unregister, get_storage_method, get_storage_methods 2 | from .storage_target import StorageTarget 3 | -------------------------------------------------------------------------------- /crashserver/server/storage/backend.py: -------------------------------------------------------------------------------- 1 | import typing 2 | from pathlib import Path 3 | 4 | 5 | class StorageBackend(typing.Protocol): 6 | """Protocol for a class capable of managing files""" 7 | 8 | def init(self): 9 | """Initialize any necessary components for the storage module""" 10 | 11 | def create(self, path: Path, file_content: bytes) -> bool: 12 | """Store file_content at the given path. Return true for success, otherwise false""" 13 | 14 | def read(self, path: Path) -> typing.Optional[typing.IO]: 15 | """Read data from given path. Return bytes of result. May raise storage.errors.FileNotFound""" 16 | 17 | def delete(self, path: Path) -> bool: 18 | """Delete file at given path. Return bool for success""" 19 | 20 | 21 | class StorageMeta(typing.Protocol): 22 | """Protocol for storing metadata about a backend""" 23 | 24 | @staticmethod 25 | def ui_name() -> str: 26 | """Get user-facing name of target""" 27 | 28 | @staticmethod 29 | def default_enabled() -> bool: 30 | """Return true if module is enabled by default, otherwise false""" 31 | 32 | @staticmethod 33 | def default_primary(): 34 | """Return true if module is primary storage backend by default, otherwise false""" 35 | 36 | @staticmethod 37 | def default_config() -> dict: 38 | """Get default config options for this storage target""" 39 | 40 | @staticmethod 41 | def web_config() -> dict: 42 | """Retrieve parameters for web config""" 43 | 44 | @staticmethod 45 | def validate_credentials(config) -> bool: 46 | """Return true if given credentials are valid, otherwise false""" 47 | -------------------------------------------------------------------------------- /crashserver/server/storage/loader.py: -------------------------------------------------------------------------------- 1 | import typing 2 | import importlib 3 | import importlib.resources 4 | 5 | from loguru import logger 6 | 7 | 8 | class ModuleInterface: 9 | """Represents a plugin interface. A plugin has a single register function.""" 10 | 11 | @staticmethod 12 | def register() -> None: 13 | """Register the necessary items in the storage factory.""" 14 | 15 | 16 | def import_module(name: str) -> ModuleInterface: 17 | """Imports a module given a name.""" 18 | return importlib.import_module(name) # type: ignore 19 | 20 | 21 | def load_plugins(package: typing.Any): 22 | """Import all plugins in a package""" 23 | files = importlib.resources.contents(package) 24 | plugins = [f[:-3] for f in files if f.endswith(".py") and f[0] != "_"] 25 | for plugin in plugins: 26 | name = f"{package.__package__}.{plugin}" 27 | plugin_ref = import_module(name) 28 | if not hasattr(plugin_ref, "register"): 29 | logger.warning(f"Module {name} does not have top-level function `register()`. {name} will be ignored.") 30 | continue 31 | plugin_ref.register() 32 | -------------------------------------------------------------------------------- /crashserver/server/storage/modules/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/crashserver/server/storage/modules/__init__.py -------------------------------------------------------------------------------- /crashserver/server/storage/modules/filesystem.py: -------------------------------------------------------------------------------- 1 | import io 2 | import typing 3 | from pathlib import Path 4 | from loguru import logger 5 | from crashserver.server.storage import storage_factory 6 | 7 | 8 | class DiskStorage: 9 | def __init__(self, config: dict): 10 | self.config = config 11 | self.config["path"] = Path(self.config.get("path")) 12 | 13 | def init(self) -> None: 14 | logger.info("[STORAGE/DISK] Initializing...") 15 | self.config.get("path").mkdir(parents=True, exist_ok=True) 16 | logger.info("[STORAGE/DISK] Initialization complete") 17 | 18 | def create(self, path: Path, file_contents: bytes) -> bool: 19 | """Store the data in file at path. Return bool for success""" 20 | filepath = Path(self.config.get("path"), path) 21 | filepath.parent.mkdir(parents=True, exist_ok=True) 22 | 23 | logger.debug(f"[STORAGE/DISK] Creating file {filepath}") 24 | try: 25 | with open(filepath, "wb+") as outfile: 26 | outfile.write(file_contents) 27 | return True 28 | except: 29 | return False 30 | 31 | def read(self, path: Path) -> typing.Optional[typing.IO]: 32 | """Retrieve and return the file at path as a file-like object""" 33 | filepath = self.config.get("path") / path 34 | if not filepath.exists(): 35 | logger.debug(f"[STORAGE/DISK] Cannot load file [{filepath}]. File does not exist.") 36 | return None 37 | 38 | logger.debug(f"[STORAGE/DISK] Reading file {filepath}") 39 | with open(filepath, "rb") as outfile: 40 | return io.BytesIO(outfile.read()) 41 | 42 | def delete(self, path: Path) -> bool: 43 | """Delete the file at path. Return bool for success""" 44 | file = Path(self.config.get("path") / path) 45 | if file.exists(): 46 | logger.info(f"[STORAGE/DISK] Deleting file {path}") 47 | file.unlink(missing_ok=True) 48 | else: 49 | logger.warning(f"[STORAGE/DISK] File not deleted. File does not exist. File: {path}") 50 | return True 51 | 52 | 53 | class DiskStorageMeta: 54 | @staticmethod 55 | def ui_name() -> str: 56 | return "Filesystem" 57 | 58 | @staticmethod 59 | def default_enabled() -> bool: 60 | """Return true if module is enabled by default, otherwise false""" 61 | return True 62 | 63 | @staticmethod 64 | def default_primary(): 65 | return True 66 | 67 | @staticmethod 68 | def default_config() -> dict: 69 | """Get default config options for this storage target""" 70 | return {"path": "/storage"} 71 | 72 | @staticmethod 73 | def web_config() -> dict: 74 | """Retrieve parameters for web config""" 75 | return { 76 | "options": [ 77 | {"key": "path", "title": "Path", "default": DiskStorageMeta.default_config()["path"], "desc": "Absolute path without a trailing slash (e.g. /storage)"}, 78 | ] 79 | } 80 | 81 | @staticmethod 82 | def validate_credentials(config) -> bool: 83 | """Return true if given credentials are valid, otherwise false""" 84 | return True 85 | 86 | 87 | def register() -> None: 88 | storage_factory.register("filesystem", DiskStorage, DiskStorageMeta) 89 | -------------------------------------------------------------------------------- /crashserver/server/storage/modules/s3.py: -------------------------------------------------------------------------------- 1 | import io 2 | import typing 3 | from pathlib import Path 4 | 5 | import boto3 6 | import botocore.exceptions 7 | from loguru import logger 8 | 9 | from crashserver.server.storage import storage_factory 10 | 11 | 12 | class S3CompatibleStorage: 13 | def __init__(self, storage_name: str, config: dict = None): 14 | self.storage_name = storage_name 15 | self.config = config 16 | self.bucket_name = self.config.pop("bucket_name", "crashserver") 17 | self.s3 = None 18 | 19 | def init_storage(self): 20 | logger.info(f"[STORAGE/{self.storage_name}] Initializing...") 21 | self.s3 = boto3.client("s3", **self.config) 22 | self.s3.head_bucket(Bucket=self.bucket_name) 23 | logger.info(f"[STORAGE/{self.storage_name}] Initialization complete") 24 | 25 | def create(self, path: Path, file_contents: bytes) -> bool: 26 | logger.debug(f"[STORAGE/{self.storage_name}] Creating file {path}") 27 | try: 28 | self.s3.upload_fileobj(io.BytesIO(file_contents), self.bucket_name, str(path)) 29 | return True 30 | except: 31 | return False 32 | 33 | def retrieve(self, path: Path) -> typing.Optional[typing.IO]: 34 | logger.debug(f"[STORAGE/{self.storage_name}] Reading file {path}") 35 | data = io.BytesIO() 36 | try: 37 | self.s3.download_fileobj(self.bucket_name, str(path), data) 38 | data.seek(0) 39 | return data 40 | except botocore.exceptions.ClientError: # Thrown when file is not available 41 | logger.debug(f"[STORAGE/{self.storage_name}] Unable to read file [{path}]") 42 | return None 43 | 44 | def delete(self, path: Path) -> bool: 45 | logger.debug(f"[STORAGE/{self.storage_name}] Deleting file {path}") 46 | self.s3.delete_object(Bucket=self.bucket_name, Key=str(path)) 47 | 48 | 49 | class S3Storage: 50 | def __init__(self, config: dict): 51 | self.store = S3CompatibleStorage("S3", config) 52 | self.config = config 53 | 54 | def init(self) -> None: 55 | """Initialize the storage module""" 56 | self.store.init_storage() 57 | 58 | def create(self, path: Path, file_contents: bytes) -> bool: 59 | """Store the data in file at path. Return bool for success""" 60 | return self.store.create(path, file_contents) 61 | 62 | def read(self, path: Path) -> typing.Optional[typing.IO]: 63 | """Retrieve and return the file at path as a file-like object""" 64 | return self.store.retrieve(path) 65 | 66 | def delete(self, path: Path) -> bool: 67 | """Delete the file at path. Return bool for success""" 68 | return self.store.delete(path) 69 | 70 | 71 | class S3Meta: 72 | @staticmethod 73 | def ui_name() -> str: 74 | return "S3" 75 | 76 | @staticmethod 77 | def default_enabled() -> bool: 78 | return False 79 | 80 | @staticmethod 81 | def default_primary(): 82 | return False 83 | 84 | @staticmethod 85 | def default_config() -> dict: 86 | return { 87 | "aws_access_key_id": "", 88 | "aws_secret_access_key": "", 89 | "bucket_name": "", 90 | "region_name": "", 91 | } 92 | 93 | @staticmethod 94 | def web_config() -> dict: 95 | """Retrieve parameters for web config""" 96 | return { 97 | "options": [ 98 | {"key": "bucket_name", "title": "Bucket Name", "desc": "The unique bucket name to create"}, 99 | {"key": "aws_access_key_id", "title": "Access Key ID", "desc": "The Access Key ID"}, 100 | {"key": "aws_secret_access_key", "title": "Secret Access Key", "desc": "The Access Key Secret for the Access Key ID"}, 101 | {"key": "region_name", "title": "Bucket Region", "desc": "The region for the bucket"}, 102 | ], 103 | "actions": [ 104 | {"func": "upload_data", "desc": "Upload all local data to AWS S3"}, 105 | ], 106 | } 107 | 108 | @staticmethod 109 | def validate_credentials(config) -> bool: 110 | """Return true if given credentials are valid, otherwise false""" 111 | logger.info(config) 112 | 113 | client = boto3.client( 114 | "s3", 115 | endpoint_url="https://s3.amazonaws.com", 116 | aws_access_key_id=config.get("aws_access_key_id", ""), 117 | aws_secret_access_key=config.get("aws_secret_access_key", ""), 118 | region_name=config.get("region_name", ""), 119 | ) 120 | try: 121 | res = client.head_bucket(Bucket=config.get("bucket_name", "")) 122 | return res 123 | except: 124 | return False 125 | 126 | 127 | class S3Generic(S3Storage): 128 | def __init__(self, config: dict): 129 | super().__init__(config) 130 | self.store = S3CompatibleStorage("S3Generic", config) 131 | 132 | 133 | class S3GenericMeta: 134 | @staticmethod 135 | def ui_name() -> str: 136 | return "S3 Generic" 137 | 138 | @staticmethod 139 | def default_enabled() -> bool: 140 | """Return true if module is enabled by default, otherwise false""" 141 | return False 142 | 143 | @staticmethod 144 | def default_primary(): 145 | return False 146 | 147 | @staticmethod 148 | def default_config() -> dict: 149 | """Get default config options for this storage target""" 150 | return { 151 | "aws_access_key_id": "", 152 | "aws_secret_access_key": "", 153 | "endpoint_url": "", 154 | "bucket_name": "", 155 | "region_name": "", 156 | } 157 | 158 | @staticmethod 159 | def web_config() -> dict: 160 | """Retrieve parameters for web config""" 161 | return { 162 | "options": [ 163 | {"key": "endpoint_url", "title": "Endpoint URI", "desc": "The full S3-compliant endpoint URI"}, 164 | {"key": "bucket_name", "title": "Bucket Name", "desc": "The unique bucket name to create"}, 165 | {"key": "aws_access_key_id", "title": "Access Key ID", "desc": "The Access Key ID"}, 166 | {"key": "aws_secret_access_key", "title": "Secret Access Key", "desc": "The Access Key Secret for the Access Key ID"}, 167 | {"key": "region_name", "title": "Bucket Region", "desc": "The region for the bucket"}, 168 | ], 169 | "actions": [ 170 | {"func": "upload_data", "desc": "Upload all local data to AWS S3"}, 171 | ], 172 | } 173 | 174 | @staticmethod 175 | def validate_credentials(config) -> bool: 176 | """Return true if given credentials are valid, otherwise false""" 177 | client = boto3.client( 178 | "s3", 179 | aws_access_key_id=config.get("aws_access_key_id", ""), 180 | aws_secret_access_key=config.get("aws_secret_access_key", ""), 181 | endpoint_url=config.get("endpoint_url", ""), 182 | region_name=config.get("region_name", ""), 183 | ) 184 | try: 185 | res = client.head_bucket(Bucket=config.get("bucket_name", "")) 186 | return res 187 | except: 188 | return False 189 | 190 | 191 | def register() -> None: 192 | storage_factory.register("s3", S3Storage, S3Meta) 193 | storage_factory.register("s3generic", S3Generic, S3GenericMeta) 194 | -------------------------------------------------------------------------------- /crashserver/server/storage/storage_factory.py: -------------------------------------------------------------------------------- 1 | """ 2 | Storage Factory 3 | =============== 4 | This factory stores a registry of all classes which follow the StorageTarget protocol. Currently, the elements 5 | must be registered manually within the `storage.py:init_targets` function. This factory is used heavily within 6 | that function in order to separate the loading and initialization of a storage module, and the usage of any 7 | storage module. 8 | """ 9 | import typing 10 | 11 | from loguru import logger 12 | 13 | from crashserver.server.storage.backend import StorageBackend, StorageMeta 14 | 15 | storage_registry: dict[str, typing.Type[StorageBackend]] = {} 16 | storage_meta: dict[str, typing.Type[StorageMeta]] = {} 17 | 18 | 19 | def register(storage_key: str, instance: typing.Type[StorageBackend], meta: typing.Type[StorageMeta]): 20 | """Register a new storage target""" 21 | if storage_key in storage_registry: 22 | logger.warning(f"Storage key '{storage_key}' already exists. {instance.__name__} was not added to registry.") 23 | storage_registry[storage_key] = instance 24 | storage_meta[storage_key] = meta 25 | 26 | 27 | def unregister(storage_key: str) -> None: 28 | """Unregister a storage target""" 29 | storage_registry.pop(storage_key, None) 30 | 31 | 32 | def get_storage_methods() -> dict[str, StorageBackend]: 33 | return storage_registry 34 | 35 | 36 | def get_metadata(key: str) -> typing.Type[StorageMeta]: 37 | return storage_meta.get(key, None) 38 | 39 | 40 | def get_storage_method(key: str) -> typing.Callable[..., StorageBackend]: 41 | return storage_registry.get(key, None) 42 | -------------------------------------------------------------------------------- /crashserver/server/storage/storage_target.py: -------------------------------------------------------------------------------- 1 | import typing 2 | from pathlib import Path 3 | 4 | 5 | class StorageTarget(typing.Protocol): 6 | """Protocol for a class capable of managing files""" 7 | 8 | def init(self) -> None: 9 | """Initialize the storage module""" 10 | pass 11 | 12 | @staticmethod 13 | def get_user_friendly_name() -> str: 14 | """Get user-facing name of target""" 15 | pass 16 | 17 | @staticmethod 18 | def is_default_enabled() -> bool: 19 | """Return true if module is enabled by default, otherwise false""" 20 | pass 21 | 22 | @staticmethod 23 | def get_default_config() -> dict: 24 | """Get default config options for this storage target""" 25 | pass 26 | 27 | @staticmethod 28 | def get_web_config() -> dict: 29 | """Retrieve parameters for web config""" 30 | pass 31 | 32 | @staticmethod 33 | def validate_credentials(config) -> bool: 34 | """Return true if given credentials are valid, otherwise false""" 35 | pass 36 | 37 | def create(self, path: Path, file_contents: bytes) -> bool: 38 | """Store the data in file at path. Return bool for success""" 39 | pass 40 | 41 | def retrieve(self, path: Path) -> typing.IO: 42 | """Retrieve and return the file at path as a file-like object""" 43 | pass 44 | 45 | def delete(self, path: Path) -> bool: 46 | """Delete the file at path. Return bool for success""" 47 | pass 48 | -------------------------------------------------------------------------------- /crashserver/syscheck.py: -------------------------------------------------------------------------------- 1 | import os 2 | import stat 3 | 4 | from loguru import logger 5 | from redis import Redis 6 | 7 | import crashserver.config as config 8 | 9 | 10 | def validate_all_settings(): 11 | success = all( 12 | [ 13 | valid_postgres_settings(), 14 | valid_redis_settings(), 15 | validate_binary_executable_bit(), 16 | ] 17 | ) 18 | if not success: 19 | logger.error("Startup check failed. Terminating.") 20 | exit(1) 21 | else: 22 | logger.info("Startup check complete.") 23 | 24 | 25 | def valid_postgres_settings(): 26 | """ 27 | Attempt to connect to the database with the credentials from application settings. 28 | :return: True if successful, otherwise false. 29 | """ 30 | from sqlalchemy import create_engine 31 | 32 | # Connect to database 33 | db = config.settings.db 34 | engine = create_engine(f"postgresql://{db.user}:{db.passwd}@{db.host}:{db.port}/{db.name}") 35 | 36 | try: 37 | # Don't do any operation, just try to make a connection to the server 38 | with engine.begin(): 39 | pass 40 | except Exception as ex: 41 | logger.error("Database connection failed: {}", str(ex.args[0]).strip()) 42 | return False 43 | 44 | logger.info(f"Credentials verified for postgresql://{db.user}@{db.host}:{db.port}/{db.name}") 45 | return True 46 | 47 | 48 | def valid_redis_settings(): 49 | """ 50 | Attempt to connect to redis host with the credentials from application settings. 51 | :return: True if successful, otherwise false. 52 | """ 53 | r = Redis.from_url(config.get_redis_url()) 54 | try: 55 | return r.ping() 56 | except Exception: 57 | logger.error(f"Unable to connect to redis instance at redis://:***@{config.settings.redis.host}:{config.settings.redis.port}") 58 | return False 59 | 60 | 61 | def validate_binary_executable_bit(): 62 | exe_path = "res/bin/linux" 63 | exe_files = ["dump_syms", "stackwalker", "minidump_stackwalk"] 64 | validated = True 65 | for f in exe_files: 66 | full_path = os.path.join(exe_path, f) 67 | can_rx = os.access(full_path, os.R_OK | os.X_OK) 68 | 69 | # If we cant read/execute, attempt to add read/execute 70 | if not can_rx: 71 | try: 72 | st = os.stat(full_path) 73 | os.chmod(full_path, st.st_mode | stat.S_IEXEC) 74 | logger.debug(f"File {full_path} given executable bit.") 75 | except PermissionError: 76 | logger.error(f"Unable to read or write {full_path}. Unable to add executable bit. " "PermissionError: Operation not permitted") 77 | validated = False 78 | return validated 79 | -------------------------------------------------------------------------------- /crashserver/utility/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/crashserver/utility/__init__.py -------------------------------------------------------------------------------- /crashserver/utility/decorators.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | import flask 4 | 5 | from crashserver.server import db 6 | from crashserver.server.models import Project, ProjectType 7 | 8 | 9 | def api_key_required(key_type="minidump", url_arg_key="api_key", pass_project=True): 10 | """ 11 | Requires that the `api_key` url argument has been included in request. 12 | Queries the database for a matching api_key, and passes the project in as the first parameter 13 | Used after a flask `app.route` decorator. 14 | :arg url_arg_key The url argument to require. Typically url_arg_key though different for symupload 15 | :arg pass_project True if the project object queried from the database should be passed into the decorated function 16 | :return: 17 | """ 18 | 19 | def decorator(func): 20 | @functools.wraps(func) 21 | def action(*args, **kwargs): 22 | # Ensure arg exists 23 | if url_arg_key not in flask.request.args.keys(): 24 | return {"error": "Endpoint requires %s" % url_arg_key}, 400 25 | 26 | # Get the project 27 | res = None 28 | if key_type == "minidump": 29 | res = db.session.query(Project).filter_by(minidump_api_key=flask.request.args[url_arg_key]).first() 30 | elif key_type == "symbol": 31 | res = db.session.query(Project).filter_by(symbol_api_key=flask.request.args[url_arg_key]).first() 32 | 33 | if res is None: 34 | return {"error": "Bad %s" % url_arg_key}, 400 35 | 36 | if pass_project: 37 | return func(res, *args, **kwargs) 38 | else: 39 | return func(*args, **kwargs) 40 | 41 | return action 42 | 43 | return decorator 44 | 45 | 46 | def check_project_versioned(): 47 | def decorator(func): 48 | @functools.wraps(func) 49 | def action(project, *args, **kwargs): 50 | 51 | # Check if project is versioned 52 | if project.project_type == ProjectType.VERSIONED: 53 | version = flask.request.args.get("version") 54 | if not version: 55 | return {"error": "Project requires 'version' parameter for symbol upload"}, 400 56 | else: 57 | return func(project, version, *args, **kwargs) 58 | 59 | # Do nothing, pass-through 60 | else: 61 | return func(project, None, *args, **kwargs) 62 | 63 | return action 64 | 65 | return decorator 66 | 67 | 68 | def url_arg_required(arg=""): 69 | """ 70 | Used after a flask `app.route` decorator. Requires that the arg is in the url parameters of the request 71 | :param arg: The arg to look for 72 | """ 73 | 74 | def decorator(func): 75 | @functools.wraps(func) 76 | def inner(*args, **kwargs): 77 | if arg not in flask.request.args.keys(): 78 | return {"error": "missing url argument {}".format(arg)}, 400 79 | return func(*args, **kwargs) 80 | 81 | return inner 82 | 83 | return decorator 84 | 85 | 86 | def file_key_required(file_key=""): 87 | """ 88 | Used after a flask `app.route` decorator. Requires that the file_key is one of the uploaded file keys 89 | :param file_key: The file key to look for 90 | """ 91 | 92 | def decorator(func): 93 | @functools.wraps(func) 94 | def inner(*args, **kwargs): 95 | if file_key not in flask.request.files.keys(): 96 | return {"error": "missing file parameter {}".format(file_key)}, 400 97 | return func(*args, **kwargs) 98 | 99 | return inner 100 | 101 | return decorator 102 | -------------------------------------------------------------------------------- /crashserver/utility/hostinfo.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import sys 3 | 4 | 5 | class HostInfo: 6 | @staticmethod 7 | def get_hostname() -> str: 8 | return socket.gethostname() 9 | 10 | @staticmethod 11 | def get_python_version(): 12 | info = sys.version_info 13 | return "Python {}.{}.{}".format(info.major, info.minor, info.micro) 14 | -------------------------------------------------------------------------------- /crashserver/utility/misc.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | import datetime 3 | 4 | import humanize 5 | from flask import flash 6 | 7 | 8 | def flash_form_errors(form): 9 | for field_name in form.errors: 10 | for error in form.errors[field_name]: 11 | flash("{}: {}".format(form[field_name].label.text, error), "error") 12 | 13 | 14 | def get_font_awesome_os_icon(os: str): 15 | os = os.lower() 16 | if os == "windows": 17 | return "fab fa-windows" 18 | if os == "mac": 19 | return "fab fa-apple" 20 | if os == "linux": 21 | return "fab fa-linux" 22 | return "" 23 | 24 | 25 | def get_storage_icon(key: str): 26 | key = key.lower() 27 | if key == "filesystem": 28 | return "fas fa-hdd" 29 | if key == "s3": 30 | return "fab fa-aws" 31 | if key == "s3generic": 32 | return "fas fa-cube" 33 | 34 | 35 | def naturaltime(time) -> str: 36 | now = datetime.datetime.now(tz=time.tzinfo) 37 | return humanize.naturaltime(now - time) 38 | 39 | 40 | @dataclasses.dataclass 41 | class SymbolData: 42 | """ 43 | These attributes uniquely identify any symbol file. It is used over the Symbol db model as the db model is 44 | organized different to keep data duplication to a minimum 45 | """ 46 | 47 | os: str = "" 48 | arch: str = "" 49 | build_id: str = "" 50 | module_id: str = "" 51 | app_version: str = None 52 | 53 | @staticmethod 54 | def from_module_line(module_line: str): 55 | metadata = module_line.strip().split(" ") 56 | return SymbolData( 57 | os=metadata[1], 58 | arch=metadata[2], 59 | build_id=metadata[3], 60 | module_id=metadata[4], 61 | ) 62 | -------------------------------------------------------------------------------- /crashserver/utility/processor.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | 4 | @dataclass 5 | class DumpModule: 6 | base_address: str 7 | end_address: str 8 | code_id: str 9 | debug_file: str # filename | empty string 10 | debug_id: str # [[:xdigit:]]{33} | empty string 11 | filename: str 12 | version: str 13 | missing_symbols: bool 14 | 15 | @staticmethod 16 | def generate_list(json: dict): 17 | res = [] 18 | 19 | for mod in json: 20 | res.append( 21 | DumpModule( 22 | base_address=mod.get("base_addr"), 23 | end_address=mod.get("end_addr"), 24 | code_id=mod.get("code_id"), 25 | debug_file=mod.get("debug_file"), 26 | debug_id=mod.get("debug_id"), 27 | filename=mod.get("filename"), 28 | version=mod.get("version"), 29 | # Default false, as the key won't be there if it's true 30 | missing_symbols=mod.get("missing_symbols", False), 31 | ) 32 | ) 33 | return res 34 | 35 | 36 | @dataclass 37 | class SystemInfo: 38 | os_name: str = "" 39 | os_version: str = "" # Linux | Windows NT | Mac OS X 40 | cpu_arch: str = "" # x86 | amd64 | arm | ppc | sparc 41 | cpu_core_count: int = 0 42 | cpu_info: str = "" 43 | cpu_version_microcode: str = "" 44 | 45 | @staticmethod 46 | def generate(json: dict): 47 | return SystemInfo( 48 | os_name=json.get("os"), 49 | os_version=json.get("os_ver"), 50 | cpu_arch=json.get("cpu_arch"), 51 | cpu_info=json.get("cpu_info"), 52 | cpu_core_count=json.get("cpu_count"), 53 | cpu_version_microcode=json.get("cpu_microcode_version", ""), 54 | ) 55 | 56 | 57 | @dataclass 58 | class CrashReason: 59 | crash_type: str 60 | crash_address: str 61 | crashing_thread: int 62 | assertion: str 63 | 64 | @staticmethod 65 | def generate(json: dict): 66 | return CrashReason( 67 | crash_type=json.get("type"), 68 | crash_address=json.get("address", "Unknown"), 69 | crashing_thread=json.get("crashing_thread"), 70 | assertion=json.get("assertion"), 71 | ) 72 | 73 | 74 | @dataclass 75 | class ThreadFrame: 76 | frame_index: int 77 | file: str 78 | func: str 79 | func_offset: str 80 | line: int 81 | module: str 82 | module_offset: str 83 | offset: str 84 | registers: dict 85 | trust: str # none | scan | cfi_scan | frame_pointer | cfi | context | prewalked 86 | 87 | @staticmethod 88 | def generate_list(frames: list): 89 | res = [] 90 | for frame in frames: 91 | res.append( 92 | ThreadFrame( 93 | frame_index=frame.get("frame"), 94 | file=frame.get("file"), 95 | func=frame.get("function"), 96 | func_offset=frame.get("function_offset"), 97 | line=frame.get("line"), 98 | module=frame.get("module"), 99 | module_offset=frame.get("module_offset"), 100 | offset=frame.get("offset"), 101 | registers={}, 102 | trust=frame.get("trust"), 103 | ) 104 | ) 105 | 106 | res.sort(key=lambda x: x.frame_index) 107 | return res 108 | 109 | 110 | @dataclass 111 | class StackThread: 112 | thread_index: int 113 | total_frames: int 114 | frames: [ThreadFrame] 115 | 116 | @staticmethod 117 | def generate_list(threads: list): 118 | res = [] 119 | 120 | for i in range(len(threads)): 121 | res.append( 122 | StackThread( 123 | thread_index=i, 124 | total_frames=threads[i].get("frame_count"), 125 | frames=ThreadFrame.generate_list(threads[i].get("frames")), 126 | ) 127 | ) 128 | 129 | res.sort(key=lambda x: x.thread_index) 130 | return res 131 | 132 | 133 | @dataclass 134 | class ProcessedCrash: 135 | crash_reason: CrashReason 136 | system: SystemInfo 137 | modules: [DumpModule] 138 | threads: [StackThread] 139 | 140 | main_module_index: int 141 | read_success: bool 142 | pid: int 143 | 144 | @property 145 | def modules_no_symbols(self) -> [DumpModule]: 146 | return [m for m in self.modules if m.missing_symbols] 147 | 148 | @property 149 | def main_module(self) -> DumpModule: 150 | return self.modules[self.main_module_index] 151 | 152 | @property 153 | def os_icon(self): 154 | if self.system.os_name == "Windows NT": 155 | return "fab fa-windows" 156 | if self.system.os_name == "Mac OS X": 157 | return "fab fa-apple" 158 | if self.system.os_name == "Linux": 159 | return "fab fa-linux" 160 | return "" 161 | 162 | @property 163 | def os_name(self): 164 | if self.system.os_name == "Windows NT": 165 | return "Windows" 166 | if self.system.os_name == "Mac OS X": 167 | return "macOS" 168 | if self.system.os_name == "Linux": 169 | return "Linux" 170 | return "" 171 | 172 | @staticmethod 173 | def generate(json: dict): 174 | res_reason = CrashReason.generate(json.get("crash_info")) 175 | res_threads = StackThread.generate_list(json.get("threads")) 176 | 177 | # Update first frame of crashing thread with registers. 178 | # The registers should always be in the first frame of the crashing thread 179 | crash_registers = json.get("crashing_thread").get("frames")[0]["registers"] 180 | res_threads[res_reason.crashing_thread].frames[0].registers = crash_registers 181 | 182 | return ProcessedCrash( 183 | read_success=True, 184 | crash_reason=res_reason, 185 | system=SystemInfo.generate(json.get("system_info", {})), 186 | modules=DumpModule.generate_list(json.get("modules", {})), 187 | threads=res_threads, 188 | main_module_index=json.get("main_module"), 189 | pid=json.get("pid"), 190 | ) 191 | -------------------------------------------------------------------------------- /crashserver/utility/sysinfo.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pathlib 3 | 4 | 5 | def get_directory_size(start_path): 6 | """ 7 | Traverse through all directories and folders within a path, and sum all the file size, 8 | within that path 9 | :param start_path: The path to search through 10 | :return: The file size in bytes 11 | """ 12 | total_size = 0 13 | for dirpath, dirnames, filenames in os.walk(start_path): 14 | for f in filenames: 15 | fp = os.path.join(dirpath, f) 16 | # skip if it is symbolic link 17 | if not os.path.islink(fp): 18 | total_size += os.path.getsize(fp) 19 | 20 | return total_size 21 | 22 | 23 | def get_filename_from_path(path): 24 | if path: 25 | path = path.replace("\\", "/") 26 | return pathlib.Path(path).name 27 | return None 28 | -------------------------------------------------------------------------------- /main-rq.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from rq import Connection, Worker 3 | from redis import Redis 4 | 5 | from crashserver.server import create_app 6 | from crashserver.server.models import Storage 7 | from crashserver.config import get_redis_url 8 | 9 | if __name__ == "__main__": 10 | app = create_app() 11 | with app.app_context(), Connection(Redis.from_url(get_redis_url())): 12 | Storage.load_storage_modules() 13 | Storage.init_targets() 14 | Worker("crashserver").work() 15 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | """ 2 | CrashServer 3 | 4 | Logging Setup from: https://pawamoy.github.io/posts/unify-logging-for-a-gunicorn-uvicorn-app/ 5 | """ 6 | # fmt: off 7 | import flask_migrate 8 | from gevent import monkey 9 | 10 | monkey.patch_all() 11 | # fmt: on 12 | 13 | import importlib.metadata as meta 14 | import logging 15 | import sys 16 | import os 17 | 18 | from werkzeug.middleware.proxy_fix import ProxyFix 19 | from gunicorn.app.base import BaseApplication 20 | from gunicorn.glogging import Logger 21 | from loguru import logger 22 | 23 | from crashserver.server.models import Storage 24 | from crashserver.server import create_app 25 | from crashserver.config import settings 26 | from crashserver import syscheck 27 | 28 | LOG_LEVEL = logging.getLevelName(os.environ.get("LOG_LEVEL", "INFO")) 29 | JSON_LOGS = True if os.environ.get("JSON_LOGS", "0") == "1" else False 30 | WORKERS = int(os.environ.get("GUNICORN_WORKERS", "5")) 31 | 32 | 33 | class InterceptHandler(logging.Handler): 34 | """ 35 | This InterceptHandler is given via loguru's documentation, and lets us intercept standard logging messages to be 36 | forwarded to loguru 37 | 38 | https://github.com/Delgan/loguru#entirely-compatible-with-standard-logging 39 | """ 40 | 41 | def emit(self, record): 42 | # Get corresponding Loguru level if it exists 43 | try: 44 | level = logger.level(record.levelname).name 45 | except ValueError: 46 | level = record.levelno 47 | 48 | # Find caller from where originated the logged message 49 | frame, depth = logging.currentframe(), 2 50 | while frame.f_code.co_filename == logging.__file__: 51 | frame = frame.f_back 52 | depth += 1 53 | logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage()) 54 | 55 | 56 | class StubbedGunicornLogger(Logger): 57 | """ 58 | This logger lets us override gunicorns logging configuration to be formatted how we choose 59 | """ 60 | 61 | def setup(self, cfg): 62 | handler = logging.NullHandler() 63 | self.error_logger = logging.getLogger("gunicorn.error") 64 | self.error_logger.addHandler(handler) 65 | self.access_logger = logging.getLogger("gunicorn.access") 66 | self.access_logger.addHandler(handler) 67 | self.error_logger.setLevel(LOG_LEVEL) 68 | self.access_logger.setLevel(LOG_LEVEL) 69 | 70 | 71 | class StandaloneApplication(BaseApplication): 72 | """Our Gunicorn application.""" 73 | 74 | def __init__(self, app, options=None): 75 | self.options = options or {} 76 | self.application = app 77 | super().__init__() 78 | 79 | def load_config(self): 80 | config = {key: value for key, value in self.options.items() if key in self.cfg.settings and value is not None} 81 | for key, value in config.items(): 82 | self.cfg.set(key.lower(), value) 83 | 84 | def load(self): 85 | return self.application 86 | 87 | 88 | if __name__ == "__main__": 89 | # Change to directory of this file 90 | os.chdir(os.path.abspath(os.path.dirname(__file__))) 91 | 92 | # Intercept and initialize loggers 93 | logger.remove() # Remove default logger 94 | intercept_handler = InterceptHandler() 95 | logging.root.setLevel(LOG_LEVEL) 96 | logger.configure(handlers=[{"sink": sys.stdout, "serialize": JSON_LOGS}]) 97 | seen = set() 98 | for name in [*logging.root.manager.loggerDict.keys(), "gunicorn", "gunicorn.access", "gunicorn.error", "werkzeug"]: 99 | if name not in seen: 100 | seen.add(name.split(".")[0]) 101 | logging.getLogger(name).handlers = [intercept_handler] 102 | 103 | # fmt: off 104 | LOG_FORMAT = "[{time:YYYY-MM-DD HH:mm:ss}][{level}]: {message}" 105 | filter_app = lambda record: any(part in record["name"] for part in ["crashserver", "__main__"]) 106 | filter_access = lambda record: record["name"] == "gunicorn.glogging" and record["function"] == "access" 107 | # fmt: on 108 | 109 | config = { 110 | "handlers": [ 111 | {"sink": sys.stdout, "format": LOG_FORMAT, "colorize": True, "level": LOG_LEVEL}, 112 | {"sink": os.path.join(settings.storage.logs, "app.log"), "filter": filter_app, "format": LOG_FORMAT}, 113 | {"sink": os.path.join(settings.storage.logs, "access.log"), "filter": filter_access, "format": "{message}"}, 114 | ], 115 | } 116 | logger.configure(**config) 117 | 118 | syscheck.validate_all_settings() # Ensure application has a sane environment 119 | 120 | app = create_app() 121 | app.wsgi_app = ProxyFix(app.wsgi_app, x_proto=1, x_host=1) # Activate proxy pass detection to get real ip 122 | 123 | with app.app_context(): 124 | flask_migrate.upgrade() 125 | Storage.register_targets() 126 | Storage.init_targets() 127 | 128 | if os.environ.get("FLASK_DEBUG"): 129 | app.run(host="0.0.0.0", port=settings.flask.web_port, debug=True) 130 | exit(0) 131 | else: 132 | logger.info("Starting CrashServer v{}", meta.version("crashserver")) 133 | 134 | # Configure and run gunicorn 135 | options = { 136 | "bind": f"0.0.0.0:{settings.flask.web_port}", 137 | "workers": WORKERS, 138 | "accesslog": "-", 139 | "errorlog": "-", 140 | "logger_class": StubbedGunicornLogger, 141 | "worker_class": "gevent", 142 | } 143 | StandaloneApplication(app, options).run() 144 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "crashserver" 3 | version = "0.3.6" 4 | authors = ["James "] 5 | description = "A Google Crashpad server, made for open source projects" 6 | 7 | homepage = "https://github.com/jameskr97/CrashServer" 8 | repository = "https://github.com/jameskr97/CrashServer" 9 | 10 | readme = "README.md" 11 | include = ["res", "config"] 12 | 13 | [tool.poetry.dependencies] 14 | python = "^3.10.1" 15 | 16 | boto3 = "^1.20.24" 17 | click = "^8.0.1" 18 | dynaconf = "^3.1.5" 19 | email-validator = "^1.1.3" 20 | Flask-Babel = "^2.0.0" 21 | Flask-DebugToolbar = "^0.13.0" 22 | Flask-Limiter = "^1.4" 23 | Flask-Migrate = "^3.1.0" 24 | Flask = "^2.0.1" 25 | Flask_Login = "^0.5.0" 26 | Flask_SQLAlchemy = "^2.5.1" 27 | Flask_WTF = "^0.15.1" 28 | gevent = "^21.8.0" 29 | gunicorn = "^20.1.0" 30 | humanize = "^3.11.0" 31 | loguru = "^0.5.3" 32 | natsort = "^8.0.0" 33 | psycopg2-binary = "^2.9.1" 34 | python-magic = "^0.4.24" 35 | requests = "^2.26.0" 36 | rq = "^1.10.0" 37 | SQLAlchemy = "^1.4.22" 38 | SQLAlchemy_Utils = "^0.37.8" 39 | toml = "^0.10.2" 40 | Werkzeug = "2.0.1" 41 | WTForms = "^2.3.3" 42 | 43 | [tool.poetry.dev-dependencies] 44 | black = "^21.12b0" 45 | pytest = "^6.2.5" 46 | minio = "^7.1.3" 47 | 48 | [build-system] 49 | requires = ["poetry-core>=1.0.0"] 50 | build-backend = "poetry.core.masonry.api" 51 | 52 | [tool.black] 53 | line-length = 180 54 | 55 | [tool.pytest.ini_options] 56 | log_cli = true 57 | log_cli_level = 10 58 | minversion = "6.0" 59 | addopts = "" 60 | testpaths = ["tests"] 61 | filterwarnings = ["ignore::DeprecationWarning:(flask_limiter|jinja2|flask_debugtoolbar):"] -------------------------------------------------------------------------------- /res/assets/crashserver_banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/assets/crashserver_banner.png -------------------------------------------------------------------------------- /res/assets/crashserver_logo.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /res/bin/linux/README.md: -------------------------------------------------------------------------------- 1 | Processing Executables 2 | ====================== 3 | 4 | These files are the bread-and-butter of CrashServer: 5 | 6 | - `minidump_stackwalker`: The original binary from google breakpad to process a minidump file 7 | - `stackwalker`: Mozilla's modification to `minidump_stackwalker` which outputs data to process in a json file format, and provides more data 8 | - Available at: https://github.com/mozilla-services/minidump-stackwalk 9 | - `dump_syms`: Mozilla's rewrite of google breakpad, which allows for dumping symbols from mac (DWARF), linux (ELF), or windows (PDB) debug files. 10 | - Available at: https://github.com/mozilla/dump_syms 11 | 12 | -------------------------------------------------------------------------------- /res/bin/linux/dump_syms: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/bin/linux/dump_syms -------------------------------------------------------------------------------- /res/bin/linux/minidump_stackwalk: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/bin/linux/minidump_stackwalk -------------------------------------------------------------------------------- /res/bin/linux/stackwalker: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/bin/linux/stackwalker -------------------------------------------------------------------------------- /res/static/css/prism.css: -------------------------------------------------------------------------------- 1 | /* PrismJS 1.25.0 2 | https://prismjs.com/download.html#themes=prism-tomorrow&languages=markup+css+clike+javascript&plugins=line-highlight+line-numbers */ 3 | /** 4 | * prism.js tomorrow night eighties for JavaScript, CoffeeScript, CSS and HTML 5 | * Based on https://github.com/chriskempson/tomorrow-theme 6 | * @author Rose Pritchard 7 | */ 8 | @import url('https://fonts.googleapis.com/css2?family=JetBrains+Mono&display=swap'); 9 | code[class*="language-"], 10 | pre[class*="language-"] { 11 | color: #ccc; 12 | background: none; 13 | font-family: "JetBrains Mono", monospace; 14 | font-size: 0.8rem; 15 | text-align: left; 16 | white-space: pre; 17 | word-spacing: normal; 18 | word-break: normal; 19 | word-wrap: normal; 20 | line-height: 1.5; 21 | 22 | -moz-tab-size: 4; 23 | -o-tab-size: 4; 24 | tab-size: 4; 25 | 26 | -webkit-hyphens: none; 27 | -moz-hyphens: none; 28 | -ms-hyphens: none; 29 | hyphens: none; 30 | 31 | } 32 | 33 | /* Code blocks */ 34 | pre[class*="language-"] { 35 | padding: 1em; 36 | margin: .5em 0; 37 | overflow: auto; 38 | } 39 | 40 | :not(pre) > code[class*="language-"], 41 | pre[class*="language-"] { 42 | background: #1d1f21; 43 | } 44 | 45 | /* Inline code */ 46 | :not(pre) > code[class*="language-"] { 47 | padding: .1em; 48 | border-radius: .3em; 49 | white-space: normal; 50 | } 51 | 52 | .token.comment, 53 | .token.block-comment, 54 | .token.prolog, 55 | .token.doctype, 56 | .token.cdata { 57 | color: #999; 58 | } 59 | 60 | .token.punctuation { 61 | color: #ccc; 62 | } 63 | 64 | .token.tag, 65 | .token.attr-name, 66 | .token.namespace, 67 | .token.deleted { 68 | color: #e2777a; 69 | } 70 | 71 | .token.function-name { 72 | color: #6196cc; 73 | } 74 | 75 | .token.boolean, 76 | .token.number, 77 | .token.function { 78 | color: #f08d49; 79 | } 80 | 81 | .token.property, 82 | .token.class-name, 83 | .token.constant, 84 | .token.symbol { 85 | color: #f8c555; 86 | } 87 | 88 | .token.selector, 89 | .token.important, 90 | .token.atrule, 91 | .token.keyword, 92 | .token.builtin { 93 | color: #cc99cd; 94 | } 95 | 96 | .token.string, 97 | .token.char, 98 | .token.attr-value, 99 | .token.regex, 100 | .token.variable { 101 | color: #7ec699; 102 | } 103 | 104 | .token.operator, 105 | .token.entity, 106 | .token.url { 107 | color: #67cdcc; 108 | } 109 | 110 | .token.important, 111 | .token.bold { 112 | font-weight: bold; 113 | } 114 | .token.italic { 115 | font-style: italic; 116 | } 117 | 118 | .token.entity { 119 | cursor: help; 120 | } 121 | 122 | .token.inserted { 123 | color: green; 124 | } 125 | 126 | pre[data-line] { 127 | position: relative; 128 | padding: 1em 0 1em 3em; 129 | } 130 | 131 | .line-highlight { 132 | position: absolute; 133 | left: 0; 134 | right: 0; 135 | padding: inherit 0; 136 | margin-top: 1em; /* Same as .prism’s padding-top */ 137 | 138 | background: hsla(24, 20%, 50%,.08); 139 | background: linear-gradient(to right, hsla(24, 20%, 50%,.1) 70%, hsla(24, 20%, 50%,0)); 140 | 141 | pointer-events: none; 142 | 143 | line-height: inherit; 144 | white-space: pre; 145 | } 146 | 147 | @media print { 148 | .line-highlight { 149 | /* 150 | * This will prevent browsers from replacing the background color with white. 151 | * It's necessary because the element is layered on top of the displayed code. 152 | */ 153 | -webkit-print-color-adjust: exact; 154 | color-adjust: exact; 155 | } 156 | } 157 | 158 | .line-highlight:before, 159 | .line-highlight[data-end]:after { 160 | content: attr(data-start); 161 | position: absolute; 162 | top: .4em; 163 | left: .6em; 164 | min-width: 1em; 165 | padding: 0 .5em; 166 | background-color: hsla(24, 20%, 50%,.4); 167 | color: hsl(24, 20%, 95%); 168 | font: bold 65%/1.5 sans-serif; 169 | text-align: center; 170 | vertical-align: .3em; 171 | border-radius: 999px; 172 | text-shadow: none; 173 | box-shadow: 0 1px white; 174 | } 175 | 176 | .line-highlight[data-end]:after { 177 | content: attr(data-end); 178 | top: auto; 179 | bottom: .4em; 180 | } 181 | 182 | .line-numbers .line-highlight:before, 183 | .line-numbers .line-highlight:after { 184 | content: none; 185 | } 186 | 187 | pre[id].linkable-line-numbers span.line-numbers-rows { 188 | pointer-events: all; 189 | } 190 | pre[id].linkable-line-numbers span.line-numbers-rows > span:before { 191 | cursor: pointer; 192 | } 193 | pre[id].linkable-line-numbers span.line-numbers-rows > span:hover:before { 194 | background-color: rgba(128, 128, 128, .2); 195 | } 196 | 197 | pre[class*="language-"].line-numbers { 198 | position: relative; 199 | padding-left: 3.8em; 200 | counter-reset: linenumber; 201 | } 202 | 203 | pre[class*="language-"].line-numbers > code { 204 | position: relative; 205 | white-space: inherit; 206 | } 207 | 208 | .line-numbers .line-numbers-rows { 209 | position: absolute; 210 | pointer-events: none; 211 | top: 0; 212 | font-size: 100%; 213 | left: -3.8em; 214 | width: 3em; /* works for line-numbers below 1000 lines */ 215 | letter-spacing: -1px; 216 | border-right: 1px solid #999; 217 | 218 | -webkit-user-select: none; 219 | -moz-user-select: none; 220 | -ms-user-select: none; 221 | user-select: none; 222 | 223 | } 224 | 225 | .line-numbers-rows > span { 226 | display: block; 227 | counter-increment: linenumber; 228 | } 229 | 230 | .line-numbers-rows > span:before { 231 | content: counter(linenumber); 232 | color: #999; 233 | display: block; 234 | padding-right: 0.8em; 235 | text-align: right; 236 | } 237 | -------------------------------------------------------------------------------- /res/static/img/apple-touch-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/img/apple-touch-icon.png -------------------------------------------------------------------------------- /res/static/img/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/img/favicon-32x32.png -------------------------------------------------------------------------------- /res/static/webfonts/fa-brands-400.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-brands-400.eot -------------------------------------------------------------------------------- /res/static/webfonts/fa-brands-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-brands-400.ttf -------------------------------------------------------------------------------- /res/static/webfonts/fa-brands-400.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-brands-400.woff -------------------------------------------------------------------------------- /res/static/webfonts/fa-brands-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-brands-400.woff2 -------------------------------------------------------------------------------- /res/static/webfonts/fa-regular-400.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-regular-400.eot -------------------------------------------------------------------------------- /res/static/webfonts/fa-regular-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-regular-400.ttf -------------------------------------------------------------------------------- /res/static/webfonts/fa-regular-400.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-regular-400.woff -------------------------------------------------------------------------------- /res/static/webfonts/fa-regular-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-regular-400.woff2 -------------------------------------------------------------------------------- /res/static/webfonts/fa-solid-900.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-solid-900.eot -------------------------------------------------------------------------------- /res/static/webfonts/fa-solid-900.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-solid-900.ttf -------------------------------------------------------------------------------- /res/static/webfonts/fa-solid-900.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-solid-900.woff -------------------------------------------------------------------------------- /res/static/webfonts/fa-solid-900.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/res/static/webfonts/fa-solid-900.woff2 -------------------------------------------------------------------------------- /res/templates/app/create.html: -------------------------------------------------------------------------------- 1 | {% extends "base/base.html" %} 2 | 3 | {% block content %} 4 |

Create Project

5 |
6 | 7 |
8 |
9 |

{{ _("Each project represents a piece of software capable of uploading minidump files after they crash.") }}

10 | 11 | {{ form.csrf_token }} 12 |
13 | {{ form.title.label(class="form-label") }} 14 | {{ form.title(class="form-control") }} 15 |
16 |
17 |
18 | {{ form.project_type.label(class="form-label") }} 19 |

{{ _("Project type determines symbol storage requirements, and minidump annotation recommendations.") }}

20 |
    21 |
  • 22 | {{ _("Simple") }}: 23 | {{ _("A version number is not required to upload symbols. All symbols uploaded will be stored in their correct location to allow for symbolication, though this won't relate an application version.")}} 24 |
  • 25 |
  • 26 | {{ _("Versioned") }}: 27 | {{ _("This will require a version number to be uploaded with the symbol file. Symbol files uploaded without a version number will be rejected from the server.") }} 28 |
  • 29 |
30 | {{ form.project_type(class="form-control") }} 31 |
32 |
33 | 34 |
35 |
36 |
37 | {% endblock %} -------------------------------------------------------------------------------- /res/templates/app/dashboard.html: -------------------------------------------------------------------------------- 1 | {% extends "base/base.html" %} 2 | 3 | {% block content %} 4 |

{{ project.project_name }}

5 |
6 | {% endblock %} -------------------------------------------------------------------------------- /res/templates/app/home.html: -------------------------------------------------------------------------------- 1 | {% extends "base/base.html" %} 2 | 3 | 4 | {% macro project_entry(project) %} 5 |
6 |
7 |
{{ project.project_name }}
8 |
9 |
{{ project.symbol_count }} Symbols
10 |
{{ project.minidump_count }} Crashes
11 |
{{ project.unprocessed_count }} Unprocessed
12 |
13 | {# #} 14 |
15 |
16 | {% if project.minidump_count != 0 %} 17 |
18 | 19 |
20 | {% else %} 21 |
22 |

23 | 24 | {{ _("No crashes have been uploaded for this project.") }} 25 |

26 |
27 | {% endif %} 28 |
29 |
30 | {% endmacro %} 31 | 32 | {% block content %} 33 |

{{ _("Symbol Storage and Minidump Decode") }}

34 |
35 | {% if apps|count != 0 %} 36 |
37 | {% for app in apps %} 38 | {{ project_entry(app) }} 39 | {% endfor %} 40 |
41 | {% else %} 42 |
43 |
44 |
45 |

46 |

{{ _("No projects have been created yet.") }}

47 |
48 |
49 |
50 | {% endif %} 51 | 52 | 62 | {% endblock %} -------------------------------------------------------------------------------- /res/templates/app/settings.macros.html: -------------------------------------------------------------------------------- 1 | {% macro info_line(title, value) %} 2 |
3 |
{{ title }}
4 |
{{ value }}
5 |
6 | {% endmacro %} 7 | 8 | {% macro api_key(title, key) %} 9 |
10 |
{{ title }}
11 | 14 |
15 | {% endmacro %} 16 | 17 | {% macro generate_project_row(p) %} 18 |
19 |
20 |

{{ p.project_name }}

21 | | {{ p.project_type.value|capitalize }} | {{ p.symbol_count }} {{ _("Symbols") }} | {{ p.minidump_count }} {{ _("Minidumps") }} 22 |
23 |
24 |
25 |
26 |
API Keys
27 | {{ api_key(_("Symbol key"), p.symbol_api_key) }} 28 | {{ api_key(_("Minidump key"), p.minidump_api_key) }} 29 |
30 |
31 | 32 | {# MOBILE BUTTONS #} 33 |
34 |
35 | 36 | 37 | 38 |
39 |
40 | 41 | {# DESKTOP FULLSCREEN BUTTONS #} 42 |
43 |
44 | 47 | 50 | 53 |
54 |
55 |
56 | 57 | 58 | {# BUTTON MODALS #} 59 | {# RENAME MODALS TODO: Is this generating too many modals for what is needed? #} 60 | 83 |
84 | {% endmacro %} 85 | 86 | {% macro generate_form_input(form_item) %} 87 |
88 |
{{ form_item.label(class="col-form-label") }}
89 |
{{ form_item(class="col-sm-10 form-control", **kwargs) }}
90 |
91 | {% endmacro %} -------------------------------------------------------------------------------- /res/templates/app/upload.html: -------------------------------------------------------------------------------- 1 | {% extends "base/base.html" %} 2 | 3 | {% block content %} 4 |

{{ _("Upload") }} Minidump

5 |
6 |
7 | {{ form.csrf_token }} 8 | 9 | 10 |
11 |
12 | 19 |
20 |
21 |
22 | 23 | 24 |
25 | 28 | 29 |
30 |
31 | {{ form.minidump.label(class="form-label") }} 32 | {{ form.minidump(class="form-control", disabled="true", id="minidump-upload") }} 33 |
34 |
35 | 36 |
37 |
38 |
39 |
40 | 65 | {% endblock %} 66 | -------------------------------------------------------------------------------- /res/templates/auth/login.html: -------------------------------------------------------------------------------- 1 | {% extends "base/base.html" %} 2 | 3 | 4 | {% block content %} 5 |

Login

6 |
7 |
8 |
9 | {{ form.csrf_token }} 10 |
11 | {{ form.email.label(class="form-label") }} 12 | {{ form.email(class="form-control") }} 13 |
14 |
15 | {{ form.password.label(class="form-label") }} 16 | {{ form.password(class="form-control") }} 17 |
18 |
19 | {{ form.remember_me(class="form-check-input") }} 20 | {{ form.remember_me.label(class="form-check-label") }} 21 |
22 |
23 | 24 |
25 |
26 |
27 |
28 | {% endblock %} 29 | -------------------------------------------------------------------------------- /res/templates/base/base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | {% block title %}Crash Server{% endblock %} 7 | 8 | 9 | 10 | 11 | 12 | 15 | 16 | 17 | 18 | 19 | 20 |
21 |
22 | 25 |
26 | 29 |
30 | {% include 'base/flash.html' %} 31 | {% block content %} 32 | {% endblock %} 33 |
34 |
35 |
36 |
37 | 38 | {##} 39 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /res/templates/base/flash.html: -------------------------------------------------------------------------------- 1 | {% macro flash_message(category, message) %} 2 | 5 | {% endmacro %} 6 | 7 | {% with messages = get_flashed_messages(with_categories=true) %} 8 | {% if messages %} 9 | {% for category, message in messages %} 10 | {% if category == "error" %} 11 | {{ flash_message("danger", message) }} 12 | {% elif category == "message" %} 13 | {{ flash_message("primary", message) }} 14 | {% else %} 15 | {{ flash_message(category, message) }} 16 | {% endif %} 17 | {% endfor %} 18 | {% endif %} 19 | {% endwith %} -------------------------------------------------------------------------------- /res/templates/base/navbar.html: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /res/templates/base/sidebar.html: -------------------------------------------------------------------------------- 1 | {% macro single_navbar_item(text, icon, link) %} 2 | 8 | {% endmacro %} 9 | 10 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /res/templates/crash/crash.html: -------------------------------------------------------------------------------- 1 | {% extends "base/base.html" %} 2 | 3 | {% macro GenDesktopRow(dump, project) %} 4 | 5 | {{ project }} 6 | 7 |
8 | {% if dump.json %} 9 | 10 | {{ _("View") }} 11 | {% if not dump.symbolicated %} 12 | {{ _("Missing Symbols") }} 13 | {% endif %} 14 | {{ dump.json.crash_reason.crash_type }} 15 | {% else %} 16 | 17 | {{ _("View") }} 18 | {% if not dump.decode_task_complete %} 19 | {{ _("Processing...")}} 20 | 21 | {% endif %} 22 | {% endif %} 23 |
24 | 25 | 26 | 27 | {{ humantime(dump.date_created) }} 28 | 29 | 30 | {% if current_user.is_authenticated %} 31 | 32 | 33 | 54 | 55 | {% endif %} 56 | 57 | {% endmacro %} 58 | 59 | {% macro GenMobileRow(dump, project) %} 60 |
61 |
62 | {% if dump.json %} 63 | 64 | {% else %} 65 | 66 | {% endif %} 67 | {{ project }} 68 | {{ _("View") }} 69 |
70 |
71 | {% if not dump.json %} 72 | {{ _("Processing...") }} 73 | 74 | {% else %} 75 | {% if not dump.symbolicated %} 76 | {{ _("Missing Symbols") }} 77 | {% endif %} 78 | {{ dump.json.crash_reason.crash_type }} 79 | {% endif %} 80 |
81 |
82 | {{ humantime(dump.date_created) }} 83 |
84 |
85 | {% endmacro %} 86 | 87 | {% block content %} 88 |

{{ _("Crash Reports") }}

89 |
90 | {% if dumps.items %} 91 |
92 |
93 | 94 |
95 |
96 | 97 | 101 |
102 |
103 |
104 | 105 | 106 |
107 | {% for row in dumps.items %} 108 | {{ GenMobileRow(row[0], row[1]) }} 109 | {% endfor %} 110 |
111 | 112 | 113 |
114 | 115 | 116 | 117 | 118 | 119 | 120 | {% if current_user.is_authenticated %} 121 | 122 | {% endif %} 123 | 124 | 125 | 126 | {% for row in dumps.items %} 127 | {{ GenDesktopRow(row[0], row[1]) }} 128 | {% endfor %} 129 | 130 |
{{ _("App Name") }}Minidump{{ _("Date Uploaded") }}{{ _("Actions") }}
131 |
132 | 133 | {# PAGINATION TABS #} 134 | 154 | 155 | 158 | {% else %} 159 |
160 |
161 |
162 |

163 |

{{ _("No minidumps have been uploaded yet.") }}

164 |
165 |
166 |
167 | {% endif %} 168 | {% endblock %} -------------------------------------------------------------------------------- /res/templates/errors/404.html: -------------------------------------------------------------------------------- 1 | {% extends 'base/base.html' %} 2 | 3 | {% block content %} 4 |
5 |

404

6 |

{{ _("This page isn't available. Sorry about that.") }}

7 |
8 | {% endblock %} -------------------------------------------------------------------------------- /res/templates/errors/500.html: -------------------------------------------------------------------------------- 1 | {% extends 'base/base.html' %} 2 | 3 | {% block content %} 4 |
5 |

500

6 |

{{ _("Sorry, something went wrong.") }}

7 |
8 | {% endblock %} -------------------------------------------------------------------------------- /res/templates/symbols/symbol-list-no-syms.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |
4 |

5 |

{{ _("No symbols have been uploaded for this project.") }}

6 |
7 |
8 |
-------------------------------------------------------------------------------- /res/templates/symbols/symbol-list-simple.html: -------------------------------------------------------------------------------- 1 | {% macro symbol_count_list(os, icon) %} 2 |
  • {{ stats["sym_count"][os] }} {{ os|capitalize }} {{ _("Symbols") }}
  • 3 | {% endmacro %} 4 | 5 | {% macro desktop_row(symbol) %} 6 | 7 | 8 | {{ symbol.build.module_id }} 9 | {{ symbol.build.build_id }} 10 | {{ symbol.arch }} 11 | {{ humanize.naturalsize(symbol.file_size_bytes) }} 12 | {{ symbol.date_created.strftime("%F %T") }} 13 | 14 | {% endmacro %} 15 | 16 | {% macro mobile_row(symbol) %} 17 |
    18 |
    19 | 20 | {{ symbol.build.module_id }} | {{ symbol.arch }} 21 |
    22 |

    {{ _("File Size") }}: {{ humanize.naturalsize(symbol.file_size_bytes) }}

    23 |

    {{ _("Uploaded on") }}: {{ symbol.date_created.strftime("%c") }}

    24 |
    25 | {% endmacro %} 26 | 27 | 28 |
    29 | 30 |
    31 |

    {{ project.project_name }}

    32 |
      33 | {{ symbol_count_list("mac", "fa-apple") }} 34 | {{ symbol_count_list("linux", "fa-linux") }} 35 | {{ symbol_count_list("windows", "fa-windows") }} 36 |
    37 | 38 | 39 |

    40 |
    41 | 42 |
    43 | {% for symbol in symbols %} 44 | {{ mobile_row(symbol) }} 45 | {% endfor %} 46 |
    47 | 48 | {# Destop Table #} 49 |
    50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | {% for symbol in symbols %} 63 | {{ desktop_row(symbol) }} 64 | {% endfor %} 65 | 66 |
    {{ _("Module ID") }}Build ID{{ _("Architecture") }}{{ _("File Size") }}{{ _("Date Uploaded") }}
    67 |
    68 |
    -------------------------------------------------------------------------------- /res/templates/symbols/symbol-list-versioned.html: -------------------------------------------------------------------------------- 1 | {% macro symbol_count_list(os, icon) %} 2 |
  • {{ stats["sym_count"][os] }} {{ os|capitalize }} {{ _("Symbols") }}
  • 3 | {% endmacro %} 4 | 5 | {% macro mobile_sym_item(symbol) %} 6 |
    7 |
    8 | 9 | {{ symbol.build.module_id }} | {{ symbol.arch }} 10 |
    11 |
      12 |
    • {{ _("File Size") }}: {{ humanize.naturalsize(symbol.file_size_bytes) }}
    • 13 |
    • {{ _("Uploaded") }}: {{ symbol.date_created.strftime("%c") }}
    • 14 | {#
    • {{ _("Uploaded") }}: {{ symbol.date_created.strftime("%F %T %Z") }}
    • #} 15 |
    16 |
    17 | {% endmacro %} 18 | 19 | {% macro desktop_sym_item(symbol) %} 20 | 21 | 22 | {{ symbol.build.module_id }} 23 | {{ symbol.build.build_id }} 24 | {{ symbol.arch }} 25 | {{ humanize.naturalsize(symbol.file_size_bytes) }} 26 | {{ symbol.date_created.strftime("%F %T") }} 27 | 28 | {% endmacro %} 29 | 30 |
    31 | 32 |
    33 |

    {{ project.project_name }}

    34 |
      35 |
    • {{ sym_dict.keys()|length }} {{ _("Different Versions") }}
    • 36 | {{ symbol_count_list("mac", "fa-apple") }} 37 | {{ symbol_count_list("linux", "fa-linux") }} 38 | {{ symbol_count_list("windows", "fa-windows") }} 39 |
    40 | 41 | 42 |

    43 |
    44 | 45 |
    46 |
    47 | {% for version in sym_dict %} 48 | {% set toggle_id = version|replace(".", "_") %} 49 |
    50 |

    51 | 54 |

    55 |
    56 |
    57 |
    58 |
    59 | 60 | 61 |
    62 | {% for symbol in sym_dict[version] %} 63 | {{ mobile_sym_item(symbol) }} 64 | {% endfor %} 65 |
    66 | 67 | 68 |
    69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | {% for symbol in sym_dict[version] %} 82 | {{ desktop_sym_item(symbol) }} 83 | {% endfor %} 84 | 85 |
    {{ _("Module ID") }}Build ID{{ _("Architecture") }}{{ _("File Size") }}{{ _("Date Uploaded") }}
    86 |
    87 |
    88 | 89 |
    90 |
    91 | {% endfor %} 92 |
    93 |
    94 |
    95 | -------------------------------------------------------------------------------- /res/templates/symbols/symbol-upload.html: -------------------------------------------------------------------------------- 1 | {% extends "base/base.html" %} 2 | 3 | {% block content %} 4 |

    {{ _("Symbol Upload") }}

    5 |
    6 | 7 |
    8 |
    9 |
    10 | {{ form.csrf_token }} 11 | 12 | {# Project selector #} 13 |
    14 | 15 | 22 |
    23 |
    24 | 25 | {# Version selector #} 26 |
    27 | 28 | 29 | 30 | 31 |
    32 | 33 | {# File Upload #} 34 |
    35 | {{ form.symbol.label(class="form-label") }} 36 | 39 | {{ form.symbol(class="form-control", id="symbol-upload") }} 40 |
    41 | 42 | {# File details #} 43 |
    44 |
      45 |
    • {{ _("File Size") }}:
    • 46 |
    • {{ _("Operating System") }}:
    • 47 |
    • {{ _("Build") }}:
    • 48 |
    • {{ _("Module ID") }}:
    • 49 |
    50 |
    51 | 52 | {# Submit Button #} 53 |
    54 | 55 |
    56 |
    57 |
    58 |
    59 | 60 | 139 | 140 | {% endblock %} -------------------------------------------------------------------------------- /res/templates/symbols/symbols.html: -------------------------------------------------------------------------------- 1 | {% extends "base/base.html" %} 2 | 3 | {% macro project_selector() %} 4 |
    5 |
    6 | {% if not projects %} 7 |

    8 |

    {{ _("No projects have been created yet.") }}

    9 |
    10 | {% if not current_user.is_authenticated %} 11 | {% autoescape false %} 12 |

    {{ _("Click %(open)shere%(close)s to create a new project.", open='' % url_for("views.project_create"), close="")|safe }}

    13 | {% endautoescape %} 14 | {% else %} 15 |

    Administrators can login to create a project.

    16 | {% endif %} 17 | 18 | {% else %} 19 | 25 | {% endif %} 26 | 27 |
    28 |
    29 | {% endmacro %} 30 | 31 | {% block content %} 32 |

    {{ _("Symbols") }}

    33 |
    34 | {{ project_selector() }} 35 |
    36 | 37 | {# Contents of symbols/symbol-list.html, populated by javascript api query #} 38 |
    39 |
    40 | {% endblock %} -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameskr97/CrashServer/5ca7d6000789791f4fd1dff9bde6eefc43abe09f/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import flask_migrate 2 | import pytest 3 | 4 | from crashserver.config import settings 5 | from crashserver.server import create_app, db 6 | from crashserver.server.models import User, Storage 7 | 8 | 9 | @pytest.fixture(scope="session", autouse=True) 10 | def setup_session(client): 11 | # Create Database 12 | flask_migrate.upgrade() 13 | 14 | # Create default user 15 | user = User(email=settings.login.email) 16 | user.set_password(settings.login.passwd) 17 | db.session.add(user) 18 | db.session.commit() 19 | 20 | yield # Do the tests 21 | 22 | # Delete default user 23 | db.session.delete(User.query.filter_by(email=settings.login.email).first()) 24 | db.session.commit() 25 | 26 | 27 | @pytest.fixture(scope="session") 28 | def client(): 29 | """Create a flask test client with database access""" 30 | settings.configure(FORCE_ENV_FOR_DYNACONF="testing") 31 | flask_app = create_app() 32 | flask_app.config.configure(FORCE_ENV_FOR_DYNACONF="testing") 33 | 34 | with flask_app.test_client() as testing_client: # Create a test client using the Flask application configured for testing 35 | with flask_app.app_context(): # Establish an application context 36 | yield testing_client # this is where the testing happens! 37 | -------------------------------------------------------------------------------- /tests/functional/test_auth.py: -------------------------------------------------------------------------------- 1 | from tests.util import login, logout 2 | from crashserver.config import settings 3 | 4 | 5 | class TestLogin: 6 | def test_user_login(self, client): 7 | res = login(client, settings.login.email, settings.login.passwd) 8 | assert b"Logged in" in res.data 9 | 10 | def test_user_logout(self, client): 11 | login(client, settings.login.email, settings.login.passwd) 12 | res = logout(client) 13 | assert b"Logged out" in res.data 14 | 15 | def test_user_login_bad_user(self, client): 16 | res = login(client, f"{settings.login.email}x", settings.login.passwd) 17 | assert b"Invalid email or password" in res.data 18 | 19 | def test_user_login_bad_pass(self, client): 20 | res = login(client, settings.login.email, f"{settings.login.passwd}x") 21 | assert b"Invalid email or password" in res.data 22 | -------------------------------------------------------------------------------- /tests/functional/test_storage.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | from minio import Minio 5 | 6 | from crashserver.server import db 7 | from crashserver.server.models import Storage 8 | 9 | 10 | def init_s3generic_config(bucket_name): 11 | # Store default credentials in database 12 | s3 = db.session.query(Storage).filter_by(key="s3generic").first() 13 | s3.config = { 14 | "aws_access_key_id": "cs-access-key", 15 | "aws_secret_access_key": "cs-secret-key", 16 | "endpoint_url": "http://minio:9000", 17 | "bucket_name": bucket_name, 18 | "region_name": "", 19 | } 20 | db.session.commit() 21 | 22 | 23 | def init_s3generic_client(bucket_name): 24 | client = Minio("minio:9000", "cs-access-key", "cs-secret-key", secure=False) 25 | 26 | # Create bucket if it doesn't exist 27 | if not client.bucket_exists(bucket_name): 28 | client.make_bucket(bucket_name) 29 | 30 | # Delete all objects 31 | for obj in client.list_objects(bucket_name): 32 | client.remove_object(bucket_name, obj.object_name) 33 | 34 | return client 35 | 36 | 37 | class StorageData: 38 | def setup_class(self): 39 | class SampleFile: 40 | def __init__(self, filename, file_content): 41 | self.path = Path(filename) 42 | self.content = file_content 43 | 44 | self.file_all = SampleFile("all_platforms.txt", b"ThisIsTheSavedFileContent!\n") 45 | self.file_s3 = SampleFile("s3only.txt", b"ThisFileIsOnS3Only!\n") 46 | self.file_fs = SampleFile("fsonly.txt", b"ThisFileIsOnFilesystemOnly!\n") 47 | 48 | @staticmethod 49 | def disable_backends_except(backends: list): 50 | # Disable all backends except filesystem 51 | res = db.session.query(Storage).all() 52 | for r in res: 53 | r.is_enabled = True if r.key in backends else False 54 | 55 | @staticmethod 56 | def get_config_data(backend): 57 | res = db.session.query(Storage).filter_by(key=backend).first() 58 | return res.config 59 | 60 | 61 | class TestFilesystem(StorageData): 62 | def setup_class(self): 63 | super().setup_class(self) 64 | StorageData.disable_backends_except(["filesystem"]) 65 | self.storage_config = StorageData.get_config_data("filesystem") 66 | 67 | # Init storage modules 68 | Storage.register_targets() 69 | Storage.init_targets() 70 | 71 | def test_fileio_save(self): 72 | # Create file 73 | Storage.create(self.file_all.path, self.file_all.content) 74 | 75 | # Ensure file exists 76 | assert Path(self.storage_config.get("path"), self.file_all.path).exists() 77 | 78 | def test_fileio_read(self): 79 | # Read file 80 | with Storage.retrieve(self.file_all.path) as file: 81 | data = file.read() 82 | assert data == self.file_all.content 83 | 84 | def test_fileio_delete(self): 85 | Storage.delete(self.file_all.path) 86 | 87 | # Ensure file deleted 88 | assert not Path(self.storage_config.get("path"), self.file_all.path).exists() 89 | 90 | 91 | class TestS3(StorageData): 92 | def setup_class(self): 93 | super().setup_class(self) 94 | self.bucket_name = "crashserver" 95 | 96 | StorageData.disable_backends_except(["s3generic"]) 97 | init_s3generic_config(self.bucket_name) 98 | 99 | # Init storage modules 100 | Storage.register_targets() 101 | Storage.init_targets() 102 | 103 | # Create bucket if not existent 104 | self.client = init_s3generic_client(self.bucket_name) 105 | 106 | def test_fileio_create(self): 107 | Storage.create(self.file_s3.path, self.file_s3.content) # Create file 108 | assert self.client.stat_object(self.bucket_name, str(self.file_s3.path)) # Ensure file exists 109 | 110 | def test_fileio_retrieve(self): 111 | data = Storage.retrieve(self.file_s3.path).read() 112 | assert data == self.file_s3.content 113 | 114 | def test_fileio_removed(self): 115 | Storage.delete(self.file_s3.path) 116 | 117 | pytest.raises(FileNotFoundError, Storage.retrieve, self.file_s3.path) 118 | 119 | 120 | class TestMultiBackend(StorageData): 121 | def setup_class(self): 122 | super().setup_class(self) 123 | self.testing_backends = ["s3generic", "filesystem"] 124 | self.bucket_name = "crashserver" 125 | self.fs_config = StorageData.get_config_data("filesystem") 126 | 127 | StorageData.disable_backends_except(self.testing_backends) 128 | init_s3generic_config(self.bucket_name) 129 | 130 | # Init storage modules 131 | Storage.register_targets() 132 | Storage.init_targets() 133 | 134 | # Create bucket if not existent 135 | self.client = init_s3generic_client(self.bucket_name) 136 | 137 | def test_multi_create(self): 138 | # Create file 139 | Storage.create(self.file_all.path, self.file_all.content) 140 | Storage.create(self.file_fs.path, self.file_fs.content, "filesystem") 141 | Storage.create(self.file_s3.path, self.file_s3.content, "s3generic") 142 | 143 | # Ensure files exists 144 | assert self.client.stat_object(self.bucket_name, str(self.file_all.path)) 145 | assert self.client.stat_object(self.bucket_name, str(self.file_s3.path)) 146 | assert Path(self.fs_config.get("path"), self.file_all.path).exists() 147 | assert Path(self.fs_config.get("path"), self.file_fs.path).exists() 148 | 149 | def test_multi_retrieve(self): 150 | # Attempt reading all stored files 151 | f_all = Storage.retrieve(self.file_all.path).read() 152 | f_ffs = Storage.retrieve(self.file_fs.path).read() 153 | f_fs3 = Storage.retrieve(self.file_s3.path).read() 154 | 155 | # Ensure Content Matches 156 | assert f_all == self.file_all.content 157 | assert f_ffs == self.file_fs.content 158 | assert f_fs3 == self.file_s3.content 159 | 160 | def test_multi_retrieve_expect_fail(self): 161 | # Ensure files are only available where they were stored 162 | pytest.raises(FileNotFoundError, Storage.retrieve_from_backend, self.file_s3.path, "filesystem") 163 | pytest.raises(FileNotFoundError, Storage.retrieve_from_backend, self.file_fs.path, "s3generic") 164 | 165 | def test_multi_removed(self): 166 | Storage.delete(self.file_all.path) 167 | Storage.delete(self.file_s3.path) 168 | Storage.delete(self.file_fs.path) 169 | 170 | pytest.raises(FileNotFoundError, Storage.retrieve, self.file_all.path) 171 | pytest.raises(FileNotFoundError, Storage.retrieve, self.file_s3.path) 172 | pytest.raises(FileNotFoundError, Storage.retrieve, self.file_fs.path) 173 | -------------------------------------------------------------------------------- /tests/functional/test_webviews.py: -------------------------------------------------------------------------------- 1 | from tests.util import login 2 | from crashserver.config import settings 3 | 4 | 5 | class TestEmptyProject: 6 | def test_path_root(self, client): 7 | response = client.get("/") 8 | assert response.status_code == 200 9 | assert b"No projects have been created yet." in response.data 10 | 11 | def test_path_crash_reports(self, client): 12 | response = client.get("/crash-reports") 13 | assert response.status_code == 200 14 | assert b"No minidumps have been uploaded yet." in response.data 15 | 16 | def test_settings_unauthenticated(self, client): 17 | response = client.get("/settings") 18 | assert response.status_code == 302 19 | assert "/auth/login" in response.location 20 | 21 | def test_settings_authenticated(self, client): 22 | login(client, settings.login.email, settings.login.passwd) 23 | response = client.get("/settings", follow_redirects=True) 24 | assert response.status_code == 200 25 | assert b"Crash Server supports collecting symbols and minidumps for multiple applications" in response.data 26 | -------------------------------------------------------------------------------- /tests/util.py: -------------------------------------------------------------------------------- 1 | def login(client, email, password): 2 | return client.post( 3 | "/auth/login", 4 | data=dict(email=email, password=password), 5 | follow_redirects=True, 6 | ) 7 | 8 | 9 | def logout(client): 10 | return client.get( 11 | "/auth/logout", 12 | follow_redirects=True, 13 | ) 14 | --------------------------------------------------------------------------------