├── .gitattributes ├── .gitignore ├── .gitlab-ci.yml ├── Attributions.md ├── LICENSE ├── README.md ├── SECURITY.md ├── backend ├── .gitignore ├── app │ ├── .flake8 │ ├── .gitignore │ ├── .python-version │ ├── alembic.ini │ ├── alembic │ │ ├── README │ │ ├── env.py │ │ ├── script.py.mako │ │ └── versions │ │ │ ├── .keep │ │ │ ├── 03d8017f5307_add_target_recall_setting.py │ │ │ ├── 139df0683d97_remove_unnecessary_columns_is_test_and_studyset_deck_id.py │ │ │ ├── 166024720669_add_test_history_and_convert_test_to_.py │ │ │ ├── 19a191ff920c_create_creation_date_col_in_studyset.py │ │ │ ├── 23e1f44f4f62_add_log_value_post_test.py │ │ │ ├── 250c2f90374e_add_models.py │ │ │ ├── 2ee368bbc6f6_add_test_mode_column.py │ │ │ ├── 36fb79f5f33e_json_to_jsonb.py │ │ │ ├── 47a0d502a95a_rename_study_set_to_studyset_and_create_session_deck.py │ │ │ ├── 47fe14c79175_rename_correct_to_response_for_.py │ │ │ ├── 491383f70589_add_separate_reported_and_deleted_tables.py │ │ │ ├── 4afdb8d415dd_add_log_types_for_test_mode.py │ │ │ ├── 4c116a99c4ee_add_log_update_user.py │ │ │ ├── 4c4de690e32f_add_test_mode_column_to_fact.py │ │ │ ├── 4cb1bd466ce1_add_show_tip.py │ │ │ ├── 5226e60d1987_remove_unnecessary_index.py │ │ │ ├── 57667899ac6f_change_test_mode_to_int.py │ │ │ ├── 61dba6b62053_add_is_test_to_deck_partial_index.py │ │ │ ├── 641d695f446e_add_other_reason_columns_to_mnemonic_.py │ │ │ ├── 66eb96210d4a_add_marked_table.py │ │ │ ├── 6872aa796f4a_add_rationale_column.py │ │ │ ├── 6af85811ee88_convert_bool_to_enum.py │ │ │ ├── 6c00659b79c1_rename_test_mode_to_new_test_mode.py │ │ │ ├── 7a19b60fa20c_add_mnemonic_logging_enums_to_history.py │ │ │ ├── 7a86599a211c_add_deleted_deck_type_and_fix_column_defaults_names.py │ │ │ ├── 7c432afcbe9e_add_show_mnemonic_help_column_to_user_.py │ │ │ ├── 7cf86cd78103_remove_viewed_mnemonic_column.py │ │ │ ├── 7e169b59629a_is_correct_should_be_nullable_create_.py │ │ │ ├── 7e18d411320e_add_repetition_model_studyset_type_enum_.py │ │ │ ├── 94625a55cb65_remove_next_test_mode_and_last_test_.py │ │ │ ├── 957c6698b8e2_add_retired_column.py │ │ │ ├── 98309991d026_add_beta_user_and_creation_date_to_user.py │ │ │ ├── 9afc4e3a9bf3_add_suggestion_and_comment_to_suspended.py │ │ │ ├── ab7e7e37b4cf_add_public_mnemonic_to_decktype_enum.py │ │ │ ├── ae31c23bebdf_remove_repetition_model_from_history_.py │ │ │ ├── b7b8acac9d4f_history_fact_id_should_be_optional.py │ │ │ ├── bcd5eab43566_rename_session_to_studyset.py │ │ │ ├── c15cf3a0a3f8_create_indices_session_and_session_fact.py │ │ │ ├── c8bcaad53b52_add_indices.py │ │ │ ├── ca02ff820178_karl_karl100.py │ │ │ ├── d30ad91b8772_add_sanity_check_to_decktype_enum.py │ │ │ ├── d39b49c62b4f_add_dark_mode_setting.py │ │ │ ├── df7ea91f1589_edit_debug_id_to_string.py │ │ │ ├── e0fa43c0f38d_add_column_last_test_date_to_user.py │ │ │ ├── e1488e104d80_create_mnemonic_table.py │ │ │ ├── e3a211187158_add_history_log.py │ │ │ ├── edb59940cef6_is_test_shouldn_t_be_int_convert_to_bool.py │ │ │ ├── f007a4240a14_add_enum_values.py │ │ │ ├── fdb5d50f8331_add_enum_values_to_log_and_repetition.py │ │ │ ├── fdcf07aac389_add_show_help_to_user.py │ │ │ └── ffdf470fe0ba_add_gin_indices.py │ ├── app │ │ ├── __init__.py │ │ ├── api │ │ │ ├── __init__.py │ │ │ ├── api_v1 │ │ │ │ ├── __init__.py │ │ │ │ ├── api.py │ │ │ │ └── endpoints │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── decks.py │ │ │ │ │ ├── facts.py │ │ │ │ │ ├── login.py │ │ │ │ │ ├── mnemonics.py │ │ │ │ │ ├── statistics.py │ │ │ │ │ ├── study.py │ │ │ │ │ ├── users.py │ │ │ │ │ └── utils.py │ │ │ └── deps.py │ │ ├── backend_pre_start.py │ │ ├── celeryworker_pre_start.py │ │ ├── core │ │ │ ├── __init__.py │ │ │ ├── celery_app.py │ │ │ ├── config.py │ │ │ └── security.py │ │ ├── crud │ │ │ ├── __init__.py │ │ │ ├── base.py │ │ │ ├── crud_deck.py │ │ │ ├── crud_fact.py │ │ │ ├── crud_history.py │ │ │ ├── crud_mnemonic.py │ │ │ ├── crud_studyset.py │ │ │ ├── crud_test_history.py │ │ │ ├── crud_user.py │ │ │ └── sqlalchemy_helper.py │ │ ├── data │ │ │ ├── formatted.train.clues.json │ │ │ ├── jeopardy.json │ │ │ ├── tfidf.pkl │ │ │ ├── train_tfidf.py │ │ │ ├── train_tfidf_vectorizer.py │ │ │ └── vocab_rlhf_testing.json │ │ ├── db │ │ │ ├── __init__.py │ │ │ ├── base.py │ │ │ ├── base_class.py │ │ │ ├── init_db.py │ │ │ └── session.py │ │ ├── email-templates │ │ │ ├── build │ │ │ │ ├── new_account.html │ │ │ │ ├── reset_password.html │ │ │ │ ├── test_email.html │ │ │ │ ├── test_mode_reminder.html │ │ │ │ └── vocab_reminder.html │ │ │ └── src │ │ │ │ ├── new_account.mjml │ │ │ │ ├── reset_password.mjml │ │ │ │ └── test_email.mjml │ │ ├── initial_data.py │ │ ├── interface │ │ │ ├── __init__.py │ │ │ ├── reassignment.py │ │ │ ├── scheduler.py │ │ │ └── statistics.py │ │ ├── main.py │ │ ├── models │ │ │ ├── __init__.py │ │ │ ├── deck.py │ │ │ ├── deleted.py │ │ │ ├── fact.py │ │ │ ├── history.py │ │ │ ├── marked.py │ │ │ ├── mnemonic.py │ │ │ ├── reported.py │ │ │ ├── session_deck.py │ │ │ ├── session_fact.py │ │ │ ├── studyset.py │ │ │ ├── suspended.py │ │ │ ├── test_history.py │ │ │ ├── user.py │ │ │ └── user_deck.py │ │ ├── schemas │ │ │ ├── __init__.py │ │ │ ├── deck.py │ │ │ ├── deck_type.py │ │ │ ├── fact.py │ │ │ ├── field.py │ │ │ ├── file_props.py │ │ │ ├── history.py │ │ │ ├── leaderboard.py │ │ │ ├── log.py │ │ │ ├── mnemonic.py │ │ │ ├── msg.py │ │ │ ├── permission.py │ │ │ ├── rank_type.py │ │ │ ├── repetition.py │ │ │ ├── schedule.py │ │ │ ├── set_parameters_schema.py │ │ │ ├── set_type.py │ │ │ ├── statistics.py │ │ │ ├── studyset.py │ │ │ ├── suspend_type.py │ │ │ ├── target_window.py │ │ │ ├── token.py │ │ │ └── user.py │ │ ├── tests │ │ │ ├── .gitignore │ │ │ ├── __init__.py │ │ │ ├── api │ │ │ │ ├── __init__.py │ │ │ │ └── api_v1 │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── test_all_facts.py │ │ │ │ │ ├── test_all_study.py │ │ │ │ │ ├── test_all_users.py │ │ │ │ │ ├── test_celery.py │ │ │ │ │ ├── test_decks.py │ │ │ │ │ └── test_login.py │ │ │ ├── conftest.py │ │ │ ├── crud │ │ │ │ ├── __init__.py │ │ │ │ ├── test_deck.py │ │ │ │ ├── test_fact.py │ │ │ │ ├── test_study.py │ │ │ │ └── test_user.py │ │ │ └── utils │ │ │ │ ├── __init__.py │ │ │ │ ├── deck.py │ │ │ │ ├── fact.py │ │ │ │ ├── user.py │ │ │ │ └── utils.py │ │ ├── tests_pre_start.py │ │ ├── utils │ │ │ ├── __init__.py │ │ │ ├── evaluate.py │ │ │ └── utils.py │ │ └── worker.py │ ├── mypy.ini │ ├── poetry.lock │ ├── prestart.sh │ ├── pyproject.toml │ ├── scripts │ │ ├── format-imports.sh │ │ ├── format.sh │ │ ├── lint.sh │ │ ├── reformat_test.py │ │ ├── test-cov-html.sh │ │ └── test.sh │ ├── tests-start.sh │ └── worker-start.sh ├── backend.dockerfile └── celeryworker.dockerfile ├── docker-compose.override.yml ├── docker-compose.yml ├── frontend ├── .browserslistrc ├── .dockerignore ├── .env ├── .eslintrc.js ├── .gitignore ├── .prettierrc.js ├── Dockerfile ├── README.md ├── babel.config.js ├── jest.config.js ├── nginx-backend-not-found.conf ├── package-lock.json ├── package.json ├── public │ ├── .well-known │ │ └── apple-app-site-association │ ├── apple-app-site-association │ ├── favicon.ico │ ├── img │ │ └── icons │ │ │ ├── android-chrome-192x192.png │ │ │ ├── android-chrome-512x512.png │ │ │ ├── apple-touch-icon.png │ │ │ ├── favicon-16x16.png │ │ │ ├── favicon-32x32.png │ │ │ ├── mstile-150x150.png │ │ │ └── safari-pinned-tab.svg │ ├── index.html │ ├── manifest.json │ └── robots.txt ├── src │ ├── App.vue │ ├── api.ts │ ├── assets │ │ ├── background-desktop-blue.png │ │ ├── background-desktop.png │ │ ├── ios_button.jpg │ │ ├── ios_screen.jpg │ │ ├── karl_install.png │ │ ├── logo.png │ │ ├── mnemonic-examples-wide.svg │ │ ├── neural-net-blue.svg │ │ ├── neural-net.svg │ │ ├── spaced-repetition-blue.svg │ │ ├── spaced-repetition.svg │ │ └── umd-horizontal.png │ ├── component-hooks.ts │ ├── components │ │ ├── ConnectionError.vue │ │ ├── NotificationsManager.vue │ │ ├── RouterComponent.vue │ │ ├── UpdateAvailable.vue │ │ └── UploadButton.vue │ ├── env.ts │ ├── interfaces │ │ └── index.ts │ ├── main.ts │ ├── plugins │ │ └── vuetify.ts │ ├── registerServiceWorker.ts │ ├── router │ │ └── index.ts │ ├── shims-tsx.d.ts │ ├── shims-vue.d.ts │ ├── store │ │ ├── index.ts │ │ └── modules │ │ │ ├── admin.ts │ │ │ ├── main.ts │ │ │ └── study.ts │ ├── utils │ │ ├── index.ts │ │ └── store-accessor.ts │ └── views │ │ ├── ConnectionPopup.vue │ │ ├── IRB.vue │ │ ├── Index.vue │ │ ├── Login.vue │ │ ├── MnemonicOnboard.vue │ │ ├── MnemonicStudy.vue │ │ ├── Onboard.vue │ │ ├── PWA.vue │ │ ├── PasswordRecovery.vue │ │ ├── ResetPassword.vue │ │ ├── Signup.vue │ │ └── main │ │ ├── Browser.vue │ │ ├── Contact.vue │ │ ├── Dashboard.vue │ │ ├── EditFact.vue │ │ ├── Leaderboards.vue │ │ ├── Main.vue │ │ ├── RecallPopup.vue │ │ ├── Start.vue │ │ ├── Statistics.vue │ │ ├── StudySet.vue │ │ ├── TestPopup.vue │ │ ├── add │ │ ├── AddDeck.vue │ │ ├── AddFact.vue │ │ ├── ChooseDecks.vue │ │ └── UploadFacts.vue │ │ ├── admin │ │ ├── Admin.vue │ │ ├── AdminUsers.vue │ │ ├── CreateUser.vue │ │ └── EditUser.vue │ │ ├── profile │ │ ├── UserProfile.vue │ │ ├── UserProfileEdit.vue │ │ └── UserProfileEditPassword.vue │ │ └── study │ │ ├── Decks.vue │ │ └── Learn.vue ├── tests │ └── unit │ │ └── upload-button.spec.ts ├── tsconfig.json └── vue.config.js └── scripts ├── backup.sh ├── backup_app.sh ├── build-and-deploy.sh ├── build-push.sh ├── build.sh ├── deploy.sh ├── load-backup.sh ├── test-local.sh ├── test.sh └── update.sh /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | **/data/** filter=lfs diff=lfs merge=lfs -text 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .env* 2 | .DS_Store 3 | docker-stack.yml 4 | db-backups/ 5 | cookiecutter-config-file.yml 6 | .git.bfg-report/ 7 | .idea/ 8 | backend/3.8/ 9 | *.code-workspace 10 | *test_mode*.json 11 | *.gz 12 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | image: tiangolo/docker-with-compose 2 | 3 | before_script: 4 | - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY 5 | - pip install docker-auto-labels 6 | 7 | stages: 8 | - test 9 | - build 10 | - deploy 11 | 12 | tests: 13 | stage: test 14 | script: 15 | - sh ./scripts/test.sh 16 | tags: 17 | - build 18 | - test 19 | 20 | build-stag: 21 | stage: build 22 | script: 23 | - TAG=stag FRONTEND_ENV=staging sh ./scripts/build-push.sh 24 | only: 25 | - master 26 | tags: 27 | - build 28 | - test 29 | 30 | build-prod: 31 | stage: build 32 | script: 33 | - TAG=prod FRONTEND_ENV=production sh ./scripts/build-push.sh 34 | only: 35 | - production 36 | tags: 37 | - build 38 | - test 39 | 40 | deploy-stag: 41 | stage: deploy 42 | script: 43 | - > 44 | DOMAIN=stag.karl.qanta.org 45 | TRAEFIK_TAG=stag.karl 46 | STACK_NAME=stag-karl 47 | TAG=stag 48 | sh ./scripts/deploy.sh 49 | environment: 50 | name: staging 51 | url: https://stag.karl.qanta.org 52 | only: 53 | - master 54 | tags: 55 | - swarm 56 | - stag 57 | 58 | deploy-prod: 59 | stage: deploy 60 | script: 61 | - > 62 | DOMAIN=karl.qanta.org 63 | TRAEFIK_TAG=karl 64 | STACK_NAME=karl 65 | TAG=prod 66 | sh ./scripts/deploy.sh 67 | environment: 68 | name: production 69 | url: https://karl.qanta.org 70 | only: 71 | - production 72 | tags: 73 | - swarm 74 | - prod 75 | -------------------------------------------------------------------------------- /Attributions.md: -------------------------------------------------------------------------------- 1 | KAR³L Flashcards is built with the help of open-sourced projects attributed below. 2 | 3 | Full-Stack-FastAPI-PostgreSQL 4 | Copyright (c) 2019 Sebastián Ramírez 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. 23 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting a Vulnerability 4 | 5 | Please report security vulnerabilities using one of the methods listed in KARL's Contact Us page or the [following](https://hsquizbowl.org/forums/viewtopic.php?f=123&p=379140&sid=8ae602e914bc1e56736a07030176c718) forum post. 6 | -------------------------------------------------------------------------------- /backend/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | app.egg-info 3 | -------------------------------------------------------------------------------- /backend/app/.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | exclude = .git,__pycache__,__init__.py,.mypy_cache,.pytest_cache 4 | -------------------------------------------------------------------------------- /backend/app/.gitignore: -------------------------------------------------------------------------------- 1 | .mypy_cache 2 | .coverage 3 | htmlcov 4 | .idea/ 5 | -------------------------------------------------------------------------------- /backend/app/.python-version: -------------------------------------------------------------------------------- 1 | 3.10.4 2 | -------------------------------------------------------------------------------- /backend/app/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = alembic 6 | 7 | # template used to generate migration files 8 | # file_template = %%(rev)s_%%(slug)s 9 | 10 | # timezone to use when rendering the date 11 | # within the migration file as well as the filename. 12 | # string value is passed to dateutil.tz.gettz() 13 | # leave blank for localtime 14 | # timezone = 15 | 16 | # max length of characters to apply to the 17 | # "slug" field 18 | #truncate_slug_length = 40 19 | 20 | # set to 'true' to run the environment during 21 | # the 'revision' command, regardless of autogenerate 22 | # revision_environment = false 23 | 24 | # set to 'true' to allow .pyc and .pyo files without 25 | # a source .py file to be detected as revisions in the 26 | # versions/ directory 27 | # sourceless = false 28 | 29 | # version location specification; this defaults 30 | # to alembic/versions. When using multiple version 31 | # directories, initial revisions must be specified with --version-path 32 | # version_locations = %(here)s/bar %(here)s/bat alembic/versions 33 | 34 | # the output encoding used when revision files 35 | # are written from script.py.mako 36 | # output_encoding = utf-8 37 | 38 | # Logging configuration 39 | [loggers] 40 | keys = root,sqlalchemy,alembic 41 | 42 | [handlers] 43 | keys = console 44 | 45 | [formatters] 46 | keys = generic 47 | 48 | [logger_root] 49 | level = WARN 50 | handlers = console 51 | qualname = 52 | 53 | [logger_sqlalchemy] 54 | level = WARN 55 | handlers = 56 | qualname = sqlalchemy.engine 57 | 58 | [logger_alembic] 59 | level = INFO 60 | handlers = 61 | qualname = alembic 62 | 63 | [handler_console] 64 | class = StreamHandler 65 | args = (sys.stderr,) 66 | level = NOTSET 67 | formatter = generic 68 | 69 | [formatter_generic] 70 | format = %(levelname)-5.5s [%(name)s] %(message)s 71 | datefmt = %H:%M:%S 72 | -------------------------------------------------------------------------------- /backend/app/alembic/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /backend/app/alembic/env.py: -------------------------------------------------------------------------------- 1 | from __future__ import with_statement 2 | 3 | import os 4 | 5 | from alembic import context 6 | from sqlalchemy import engine_from_config, pool 7 | from logging.config import fileConfig 8 | 9 | # this is the Alembic Config object, which provides 10 | # access to the values within the .ini file in use. 11 | config = context.config 12 | 13 | # Interpret the config file for Python logging. 14 | # This line sets up loggers basically. 15 | fileConfig(config.config_file_name) 16 | 17 | # add your model's MetaData object here 18 | # for 'autogenerate' support 19 | # from myapp import mymodel 20 | # target_metadata = mymodel.Base.metadata 21 | # target_metadata = None 22 | 23 | from app.db.base import Base # noqa 24 | 25 | target_metadata = Base.metadata 26 | 27 | # other values from the config, defined by the needs of env.py, 28 | # can be acquired: 29 | # my_important_option = config.get_main_option("my_important_option") 30 | # ... etc. 31 | 32 | 33 | def get_url(): 34 | user = os.getenv("POSTGRES_USER", "postgres") 35 | password = os.getenv("POSTGRES_PASSWORD", "") 36 | server = os.getenv("POSTGRES_SERVER", "db") 37 | db = os.getenv("POSTGRES_DB", "app") 38 | return f"postgresql://{user}:{password}@{server}/{db}" 39 | 40 | 41 | def run_migrations_offline(): 42 | """Run migrations in 'offline' mode. 43 | 44 | This configures the context with just a URL 45 | and not an Engine, though an Engine is acceptable 46 | here as well. By skipping the Engine creation 47 | we don't even need a DBAPI to be available. 48 | 49 | Calls to context.execute() here emit the given string to the 50 | script output. 51 | 52 | """ 53 | url = get_url() 54 | context.configure( 55 | url=url, target_metadata=target_metadata, literal_binds=True, compare_type=True 56 | ) 57 | 58 | with context.begin_transaction(): 59 | context.run_migrations() 60 | 61 | 62 | def run_migrations_online(): 63 | """Run migrations in 'online' mode. 64 | 65 | In this scenario we need to create an Engine 66 | and associate a connection with the context. 67 | 68 | """ 69 | configuration = config.get_section(config.config_ini_section) 70 | configuration["sqlalchemy.url"] = get_url() 71 | connectable = engine_from_config( 72 | configuration, prefix="sqlalchemy.", poolclass=pool.NullPool, 73 | ) 74 | 75 | with connectable.connect() as connection: 76 | context.configure( 77 | connection=connection, target_metadata=target_metadata, compare_type=True 78 | ) 79 | 80 | with context.begin_transaction(): 81 | context.run_migrations() 82 | 83 | 84 | if context.is_offline_mode(): 85 | run_migrations_offline() 86 | else: 87 | run_migrations_online() 88 | -------------------------------------------------------------------------------- /backend/app/alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/alembic/versions/.keep -------------------------------------------------------------------------------- /backend/app/alembic/versions/03d8017f5307_add_target_recall_setting.py: -------------------------------------------------------------------------------- 1 | """Add target recall setting 2 | 3 | Revision ID: 03d8017f5307 4 | Revises: ca02ff820178 5 | Create Date: 2021-06-11 20:02:58.118767 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = '03d8017f5307' 13 | down_revision = 'ca02ff820178' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | # ### commands auto generated by Alembic - please adjust! ### 20 | op.add_column('user', sa.Column('recall_target', sa.SmallInteger(), nullable=False, server_default='-1')) 21 | # ### end Alembic commands ### 22 | 23 | 24 | def downgrade(): 25 | # ### commands auto generated by Alembic - please adjust! ### 26 | op.drop_column('user', 'recall_target') 27 | # ### end Alembic commands ### 28 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/139df0683d97_remove_unnecessary_columns_is_test_and_studyset_deck_id.py: -------------------------------------------------------------------------------- 1 | """remove unnecessary columns is_test and studyset deck_id 2 | 3 | Revision ID: 139df0683d97 4 | Revises: 61dba6b62053 5 | Create Date: 2022-05-16 01:35:10.802805 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = '139df0683d97' 13 | down_revision = '61dba6b62053' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | # ### commands auto generated by Alembic - please adjust! ### 20 | op.drop_index('ix_deck_is_test', table_name='deck') 21 | op.drop_column('deck', 'is_test') 22 | op.drop_column('fact', 'test_mode') 23 | op.drop_index('ix_studyset_deck_id', table_name='studyset') 24 | op.drop_constraint('studyset_deck_id_fkey', 'studyset', type_='foreignkey') 25 | op.drop_column('studyset', 'deck_id') 26 | # ### end Alembic commands ### 27 | 28 | 29 | def downgrade(): 30 | # ### commands auto generated by Alembic - please adjust! ### 31 | op.add_column('studyset', sa.Column('deck_id', sa.INTEGER(), autoincrement=False, nullable=True)) 32 | op.create_foreign_key('studyset_deck_id_fkey', 'studyset', 'deck', ['deck_id'], ['id']) 33 | op.create_index('ix_studyset_deck_id', 'studyset', ['deck_id'], unique=False) 34 | op.add_column('fact', sa.Column('test_mode', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, 35 | nullable=False)) 36 | op.add_column('deck', sa.Column('is_test', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, 37 | nullable=False)) 38 | op.create_index('ix_deck_is_test', 'deck', ['is_test'], unique=True) 39 | # ### end Alembic commands ### 40 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/166024720669_add_test_history_and_convert_test_to_.py: -------------------------------------------------------------------------------- 1 | """add test_history and convert test to boolean 2 | 3 | Revision ID: 166024720669 4 | Revises: e0fa43c0f38d 5 | Create Date: 2022-05-12 17:47:49.727732 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '166024720669' 14 | down_revision = 'e0fa43c0f38d' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('test_history', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('time', sa.TIMESTAMP(timezone=True), nullable=False), 24 | sa.Column('user_id', sa.Integer(), nullable=False), 25 | sa.Column('fact_id', sa.Integer(), nullable=True), 26 | sa.Column('correct', sa.Boolean(), nullable=False), 27 | sa.Column('details', postgresql.JSONB(astext_type=sa.Text()), nullable=True), 28 | sa.ForeignKeyConstraint(['fact_id'], ['fact.fact_id'], ), 29 | sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), 30 | sa.PrimaryKeyConstraint('id') 31 | ) 32 | op.create_index(op.f('ix_test_history_id'), 'test_history', ['id'], unique=False) 33 | op.drop_column('fact', 'test_mode') 34 | op.add_column('fact', sa.Column('test_mode', sa.Boolean(), nullable=False, server_default='false')) 35 | # ### end Alembic commands ### 36 | 37 | 38 | def downgrade(): 39 | # ### commands auto generated by Alembic - please adjust! ### 40 | op.drop_column('fact', 'test_mode') 41 | op.add_column('fact', sa.Column('test_mode', sa.SMALLINT(), nullable=False, server_default='0')) 42 | op.drop_index(op.f('ix_test_history_id'), table_name='test_history') 43 | op.drop_table('test_history') 44 | # ### end Alembic commands ### 45 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/19a191ff920c_create_creation_date_col_in_studyset.py: -------------------------------------------------------------------------------- 1 | """Create creation_date col in studyset 2 | 3 | Revision ID: 19a191ff920c 4 | Revises: 5226e60d1987 5 | Create Date: 2022-05-27 23:19:32.861669 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '19a191ff920c' 14 | down_revision = '5226e60d1987' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('studyset', sa.Column('create_date', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False)) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('studyset', 'create_date') 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/23e1f44f4f62_add_log_value_post_test.py: -------------------------------------------------------------------------------- 1 | """add log value post test 2 | 3 | Revision ID: 23e1f44f4f62 4 | Revises: 7a86599a211c 5 | Create Date: 2023-10-30 02:37:31.719564 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '23e1f44f4f62' 14 | down_revision = '7a86599a211c' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.execute("COMMIT") 22 | op.execute("ALTER TYPE log ADD VALUE 'get_post_test_facts'") 23 | op.execute("ALTER TYPE log ADD VALUE 'post_test_study'") 24 | # ### end Alembic commands ### 25 | 26 | 27 | def downgrade(): 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | pass 30 | # ### end Alembic commands ### 31 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/2ee368bbc6f6_add_test_mode_column.py: -------------------------------------------------------------------------------- 1 | """Add test mode column 2 | 3 | Revision ID: 2ee368bbc6f6 4 | Revises: 03d8017f5307 5 | Create Date: 2021-06-17 17:19:09.428462 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = '2ee368bbc6f6' 13 | down_revision = '03d8017f5307' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | # ### commands auto generated by Alembic - please adjust! ### 20 | op.add_column('user', sa.Column('test_mode', sa.Boolean(), nullable=False, server_default='false')) 21 | # ### end Alembic commands ### 22 | 23 | 24 | def downgrade(): 25 | # ### commands auto generated by Alembic - please adjust! ### 26 | op.drop_column('user', 'test_mode') 27 | # ### end Alembic commands ### 28 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/36fb79f5f33e_json_to_jsonb.py: -------------------------------------------------------------------------------- 1 | """json to jsonb 2 | 3 | Revision ID: 36fb79f5f33e 4 | Revises: 66eb96210d4a 5 | Create Date: 2020-05-31 18:36:03.622213 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '36fb79f5f33e' 14 | down_revision = '66eb96210d4a' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.alter_column('fact', 'extra', 22 | existing_type=postgresql.JSON(astext_type=sa.Text()), 23 | type_=postgresql.JSONB(astext_type=sa.Text()), 24 | existing_nullable=True) 25 | op.alter_column('history', 'details', 26 | existing_type=postgresql.JSON(astext_type=sa.Text()), 27 | type_=postgresql.JSONB(astext_type=sa.Text()), 28 | existing_nullable=True) 29 | # ### end Alembic commands ### 30 | 31 | 32 | def downgrade(): 33 | # ### commands auto generated by Alembic - please adjust! ### 34 | op.alter_column('history', 'details', 35 | existing_type=postgresql.JSONB(astext_type=sa.Text()), 36 | type_=postgresql.JSON(astext_type=sa.Text()), 37 | existing_nullable=True) 38 | op.alter_column('fact', 'extra', 39 | existing_type=postgresql.JSONB(astext_type=sa.Text()), 40 | type_=postgresql.JSON(astext_type=sa.Text()), 41 | existing_nullable=True) 42 | # ### end Alembic commands ### 43 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/47a0d502a95a_rename_study_set_to_studyset_and_create_session_deck.py: -------------------------------------------------------------------------------- 1 | """rename study_set to studyset and create session_deck 2 | 3 | Revision ID: 47a0d502a95a 4 | Revises: 139df0683d97 5 | Create Date: 2022-05-16 02:11:11.782600 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = '47a0d502a95a' 13 | down_revision = '139df0683d97' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | # ### commands auto generated by Alembic - please adjust! ### 20 | op.create_table('session_deck', 21 | sa.Column('studyset_id', sa.Integer(), nullable=False), 22 | sa.Column('deck_id', sa.Integer(), nullable=False), 23 | sa.ForeignKeyConstraint(['deck_id'], ['deck.id'], ), 24 | sa.ForeignKeyConstraint(['studyset_id'], ['studyset.id'], ), 25 | sa.PrimaryKeyConstraint('studyset_id', 'deck_id') 26 | ) 27 | # ### end Alembic commands ### 28 | 29 | 30 | def downgrade(): 31 | # ### commands auto generated by Alembic - please adjust! ### 32 | op.drop_table('session_deck') 33 | # ### end Alembic commands ### 34 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/47fe14c79175_rename_correct_to_response_for_.py: -------------------------------------------------------------------------------- 1 | """rename correct to response for consistency 2 | 3 | Revision ID: 47fe14c79175 4 | Revises: 166024720669 5 | Create Date: 2022-05-14 01:11:52.755163 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = '47fe14c79175' 13 | down_revision = '166024720669' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | # ### commands auto generated by Alembic - please adjust! ### 20 | op.add_column('test_history', sa.Column('response', sa.Boolean(), nullable=False)) 21 | op.drop_column('test_history', 'correct') 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.add_column('test_history', sa.Column('correct', sa.BOOLEAN(), autoincrement=False, nullable=False)) 28 | op.drop_column('test_history', 'response') 29 | # ### end Alembic commands ### 30 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/491383f70589_add_separate_reported_and_deleted_tables.py: -------------------------------------------------------------------------------- 1 | """add separate reported and deleted tables 2 | 3 | Revision ID: 491383f70589 4 | Revises: 9afc4e3a9bf3 5 | Create Date: 2020-06-26 05:23:30.267933 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '491383f70589' 14 | down_revision = '9afc4e3a9bf3' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('deleted', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('fact_id', sa.Integer(), nullable=False), 24 | sa.Column('user_id', sa.Integer(), nullable=False), 25 | sa.Column('date_deleted', sa.TIMESTAMP(timezone=True), nullable=False), 26 | sa.ForeignKeyConstraint(['fact_id'], ['fact.fact_id'], ), 27 | sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), 28 | sa.PrimaryKeyConstraint('id') 29 | ) 30 | op.create_index(op.f('ix_deleted_id'), 'deleted', ['id'], unique=False) 31 | op.create_table('reported', 32 | sa.Column('id', sa.Integer(), nullable=False), 33 | sa.Column('fact_id', sa.Integer(), nullable=False), 34 | sa.Column('user_id', sa.Integer(), nullable=False), 35 | sa.Column('date_reported', sa.TIMESTAMP(timezone=True), nullable=False), 36 | sa.Column('suggestion', postgresql.JSONB(astext_type=sa.Text()), nullable=True), 37 | sa.ForeignKeyConstraint(['fact_id'], ['fact.fact_id'], ), 38 | sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), 39 | sa.PrimaryKeyConstraint('id') 40 | ) 41 | op.create_index(op.f('ix_reported_id'), 'reported', ['id'], unique=False) 42 | op.drop_column('suspended', 'comment') 43 | op.drop_column('suspended', 'suspend_type') 44 | op.drop_column('suspended', 'suggestion') 45 | # ### end Alembic commands ### 46 | 47 | 48 | def downgrade(): 49 | # ### commands auto generated by Alembic - please adjust! ### 50 | op.add_column('suspended', sa.Column('suggestion', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True)) 51 | op.add_column('suspended', sa.Column('suspend_type', postgresql.ENUM('delete', 'suspend', 'report', name='suspendtype'), autoincrement=False, nullable=False)) 52 | op.add_column('suspended', sa.Column('comment', sa.VARCHAR(), autoincrement=False, nullable=True)) 53 | op.drop_index(op.f('ix_reported_id'), table_name='reported') 54 | op.drop_table('reported') 55 | op.drop_index(op.f('ix_deleted_id'), table_name='deleted') 56 | op.drop_table('deleted') 57 | # ### end Alembic commands ### 58 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/4afdb8d415dd_add_log_types_for_test_mode.py: -------------------------------------------------------------------------------- 1 | """add log types for test mode 2 | 3 | Revision ID: 4afdb8d415dd 4 | Revises: 4c4de690e32f 5 | Create Date: 2021-06-18 17:59:17.091617 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = '4afdb8d415dd' 13 | down_revision = '4c4de690e32f' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | # ### commands auto generated by Alembic - please adjust! ### 20 | op.execute("COMMIT") 21 | op.execute("ALTER TYPE log ADD VALUE 'test_study'") 22 | op.execute("ALTER TYPE log ADD VALUE 'get_test_facts'") 23 | # ### end Alembic commands ### 24 | 25 | 26 | def downgrade(): 27 | # ### commands auto generated by Alembic - please adjust! ### 28 | pass 29 | # ### end Alembic commands ### 30 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/4c116a99c4ee_add_log_update_user.py: -------------------------------------------------------------------------------- 1 | """add log update_user 2 | 3 | Revision ID: 4c116a99c4ee 4 | Revises: df7ea91f1589 5 | Create Date: 2022-07-08 17:11:30.328345 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '4c116a99c4ee' 14 | down_revision = 'df7ea91f1589' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | op.execute("ALTER TYPE log ADD VALUE 'update_user'") 21 | 22 | 23 | def downgrade(): 24 | # ### commands auto generated by Alembic - please adjust! ### 25 | pass 26 | # ### end Alembic commands ### 27 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/4c4de690e32f_add_test_mode_column_to_fact.py: -------------------------------------------------------------------------------- 1 | """add test_mode column to fact 2 | 3 | Revision ID: 4c4de690e32f 4 | Revises: 57667899ac6f 5 | Create Date: 2021-06-18 17:55:18.970840 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '4c4de690e32f' 14 | down_revision = '57667899ac6f' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('fact', sa.Column('test_mode', sa.SmallInteger(), nullable=True)) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('fact', 'test_mode') 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/4cb1bd466ce1_add_show_tip.py: -------------------------------------------------------------------------------- 1 | """add show tip 2 | 3 | Revision ID: 4cb1bd466ce1 4 | Revises: d39b49c62b4f 5 | Create Date: 2020-07-21 02:05:38.737593 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '4cb1bd466ce1' 14 | down_revision = 'd39b49c62b4f' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('user', sa.Column('pwa_tip', sa.Boolean(), nullable=False, server_default='true')) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('user', 'pwa_tip') 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/5226e60d1987_remove_unnecessary_index.py: -------------------------------------------------------------------------------- 1 | """remove unnecessary index 2 | 3 | Revision ID: 5226e60d1987 4 | Revises: 957c6698b8e2 5 | Create Date: 2022-05-24 18:46:10.123696 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '5226e60d1987' 14 | down_revision = '957c6698b8e2' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.drop_index('ix_deck_title', table_name='deck') 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.create_index('ix_deck_title', 'deck', ['title'], unique=False) 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/57667899ac6f_change_test_mode_to_int.py: -------------------------------------------------------------------------------- 1 | """change test mode to int 2 | 3 | Revision ID: 57667899ac6f 4 | Revises: 2ee368bbc6f6 5 | Create Date: 2021-06-18 17:04:17.860490 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = '57667899ac6f' 13 | down_revision = '2ee368bbc6f6' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | # ### commands auto generated by Alembic - please adjust! ### 20 | op.drop_column('user', 'test_mode') 21 | op.add_column('user', sa.Column('test_mode', sa.SmallInteger(), nullable=False, server_default='0')) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('user', 'test_mode') 28 | op.add_column('user', sa.Column('test_mode', sa.Boolean(), nullable=False, server_default='false')) 29 | # ### end Alembic commands ### 30 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/61dba6b62053_add_is_test_to_deck_partial_index.py: -------------------------------------------------------------------------------- 1 | """add is_test to deck, partial index 2 | 3 | Revision ID: 61dba6b62053 4 | Revises: bcd5eab43566 5 | Create Date: 2022-05-14 18:45:16.313435 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = '61dba6b62053' 13 | down_revision = 'bcd5eab43566' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | # ### commands auto generated by Alembic - please adjust! ### 20 | op.add_column('deck', sa.Column('is_test', sa.Boolean(), nullable=False, server_default='false')) 21 | op.create_index('ix_deck_is_test', 'deck', ['is_test'], unique=True, postgresql_where=sa.text('is_test = true')) 22 | op.drop_constraint('studyset_deck_id_fkey', 'studyset', type_='foreignkey') 23 | op.create_foreign_key(None, 'studyset', 'deck', ['deck_id'], ['id']) 24 | # ### end Alembic commands ### 25 | 26 | 27 | def downgrade(): 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | op.drop_constraint(None, 'studyset', type_='foreignkey') 30 | op.create_foreign_key('studyset_deck_id_fkey', 'studyset', 'fact', ['deck_id'], ['fact_id']) 31 | op.drop_index('ix_deck_is_test', table_name='deck') 32 | op.drop_column('deck', 'is_test') 33 | # ### end Alembic commands ### 34 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/641d695f446e_add_other_reason_columns_to_mnemonic_.py: -------------------------------------------------------------------------------- 1 | """Add other reason columns to Mnemonic table 2 | 3 | Revision ID: 641d695f446e 4 | Revises: e1488e104d80 5 | Create Date: 2023-12-20 20:29:17.475127 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '641d695f446e' 14 | down_revision = 'e1488e104d80' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('mnemonic', sa.Column('is_bad_for_other_reason', sa.Boolean(), nullable=True)) 22 | op.add_column('mnemonic', sa.Column('other_reason_text', sa.String(), nullable=True)) 23 | op.alter_column('mnemonic', 'study_id', 24 | existing_type=sa.INTEGER(), 25 | nullable=True) 26 | op.alter_column('mnemonic', 'fact_id', 27 | existing_type=sa.INTEGER(), 28 | nullable=True) 29 | op.alter_column('mnemonic', 'user_id', 30 | existing_type=sa.INTEGER(), 31 | nullable=True) 32 | op.create_foreign_key(None, 'mnemonic', 'user', ['user_id'], ['id']) 33 | # ### end Alembic commands ### 34 | 35 | 36 | def downgrade(): 37 | # ### commands auto generated by Alembic - please adjust! ### 38 | op.drop_constraint(None, 'mnemonic', type_='foreignkey') 39 | op.alter_column('mnemonic', 'user_id', 40 | existing_type=sa.INTEGER(), 41 | nullable=False) 42 | op.alter_column('mnemonic', 'fact_id', 43 | existing_type=sa.INTEGER(), 44 | nullable=False) 45 | op.alter_column('mnemonic', 'study_id', 46 | existing_type=sa.INTEGER(), 47 | nullable=False) 48 | op.drop_column('mnemonic', 'other_reason_text') 49 | op.drop_column('mnemonic', 'is_bad_for_other_reason') 50 | # ### end Alembic commands ### 51 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/66eb96210d4a_add_marked_table.py: -------------------------------------------------------------------------------- 1 | """add marked table 2 | 3 | Revision ID: 66eb96210d4a 4 | Revises: ae31c23bebdf 5 | Create Date: 2020-05-31 00:05:21.489564 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '66eb96210d4a' 14 | down_revision = 'ae31c23bebdf' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('marked', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('fact_id', sa.Integer(), nullable=False), 24 | sa.Column('user_id', sa.Integer(), nullable=False), 25 | sa.Column('date_marked', sa.TIMESTAMP(timezone=True), nullable=False), 26 | sa.ForeignKeyConstraint(['fact_id'], ['fact.fact_id'], ), 27 | sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), 28 | sa.PrimaryKeyConstraint('id') 29 | ) 30 | op.create_index(op.f('ix_marked_id'), 'marked', ['id'], unique=False) 31 | # ### end Alembic commands ### 32 | 33 | 34 | def downgrade(): 35 | # ### commands auto generated by Alembic - please adjust! ### 36 | op.drop_index(op.f('ix_marked_id'), table_name='marked') 37 | op.drop_table('marked') 38 | # ### end Alembic commands ### 39 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/6872aa796f4a_add_rationale_column.py: -------------------------------------------------------------------------------- 1 | """add rationale column 2 | 3 | Revision ID: 6872aa796f4a 4 | Revises: 6af85811ee88 5 | Create Date: 2022-05-22 21:11:44.142967 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = '6872aa796f4a' 13 | down_revision = '6af85811ee88' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | # ### commands auto generated by Alembic - please adjust! ### 20 | op.create_index(op.f('ix_deck_deck_type'), 'deck', ['deck_type'], unique=False) 21 | op.drop_index('ix_deck_public', table_name='deck') 22 | op.add_column('session_fact', sa.Column('rationale', sa.String(), nullable=True)) 23 | op.add_column('studyset', sa.Column('debug_id', sa.Integer(), nullable=True)) 24 | # ### end Alembic commands ### 25 | 26 | 27 | def downgrade(): 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | op.drop_column('studyset', 'debug_id') 30 | op.drop_column('session_fact', 'rationale') 31 | op.create_index('ix_deck_public', 'deck', ['deck_type'], unique=False) 32 | op.drop_index(op.f('ix_deck_deck_type'), table_name='deck') 33 | # ### end Alembic commands ### 34 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/6af85811ee88_convert_bool_to_enum.py: -------------------------------------------------------------------------------- 1 | """convert bool to enum 2 | 3 | Revision ID: 6af85811ee88 4 | Revises: edb59940cef6 5 | Create Date: 2022-05-19 14:20:06.609946 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | from sqlalchemy.dialects import postgresql 13 | 14 | revision = '6af85811ee88' 15 | down_revision = 'edb59940cef6' 16 | branch_labels = None 17 | depends_on = None 18 | 19 | decktype_enum = postgresql.ENUM('default', 'public', 'hidden', name='decktype') 20 | 21 | 22 | # using clause: https://stackoverflow.com/questions/29069506/alembic-alter-column-type-with-using 23 | # https://github.com/sqlalchemy/alembic/issues/278 24 | # https://www.munderwood.ca/index.php/2015/05/28/altering-postgresql-columns-from-one-enum-to-another/ 25 | def upgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | decktype_enum.create(op.get_bind()) 28 | op.alter_column('deck', 'public', 29 | existing_type=sa.BOOLEAN(), 30 | type_=decktype_enum, 31 | existing_nullable=False, 32 | new_column_name='deck_type', 33 | postgresql_using="(CASE public::text WHEN 'true' then 'public' WHEN 'false' then 'default' ELSE public::text END)::decktype") 34 | op.create_index(op.f('ix_deck_public'), 'deck', ['deck_type'], unique=False) 35 | op.create_index(op.f('ix_studyset_is_test'), 'studyset', ['is_test'], unique=False) 36 | # ### end Alembic commands ### 37 | 38 | 39 | def downgrade(): 40 | # ### commands auto generated by Alembic - please adjust! ### 41 | op.drop_index(op.f('ix_studyset_is_test'), table_name='studyset') 42 | op.drop_index(op.f('ix_deck_public'), table_name='deck') 43 | op.alter_column('deck', 'deck_type', 44 | existing_type=decktype_enum, 45 | type_=sa.BOOLEAN(), 46 | new_column_name='public', 47 | existing_nullable=False, 48 | postgresql_using="(CASE deck_type::text WHEN 'public' then 'true' WHEN 'default' then 'false' WHEN 'hidden' then 'false' ELSE deck_type::text END)::boolean") 49 | # ### end Alembic commands ### 50 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/6c00659b79c1_rename_test_mode_to_new_test_mode.py: -------------------------------------------------------------------------------- 1 | """rename test_mode to new_test_mode 2 | 3 | Revision ID: 6c00659b79c1 4 | Revises: 4afdb8d415dd 5 | Create Date: 2021-06-19 17:05:45.039087 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '6c00659b79c1' 14 | down_revision = '4afdb8d415dd' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('user', sa.Column('next_test_mode', sa.SmallInteger(), nullable=False, server_default=sa.text("'0'::smallint"))) 22 | op.drop_column('user', 'test_mode') 23 | # ### end Alembic commands ### 24 | 25 | 26 | def downgrade(): 27 | # ### commands auto generated by Alembic - please adjust! ### 28 | op.add_column('user', sa.Column('test_mode', sa.SMALLINT(), server_default=sa.text("'0'::smallint"), autoincrement=False, nullable=False)) 29 | op.drop_column('user', 'next_test_mode') 30 | # ### end Alembic commands ### 31 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/7a19b60fa20c_add_mnemonic_logging_enums_to_history.py: -------------------------------------------------------------------------------- 1 | """Add mnemonic logging enums to history 2 | 3 | Revision ID: 7a19b60fa20c 4 | Revises: 7cf86cd78103 5 | Create Date: 2024-01-16 19:50:47.878556 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '7a19b60fa20c' 14 | down_revision = '7cf86cd78103' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.execute("ALTER TYPE log ADD VALUE 'mnemonic_learning_feedback'") 22 | op.execute("ALTER TYPE log ADD VALUE 'mnemonic_comparison_feedback'") 23 | pass 24 | # ### end Alembic commands ### 25 | 26 | 27 | def downgrade(): 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | pass 30 | # ### end Alembic commands ### 31 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/7a86599a211c_add_deleted_deck_type_and_fix_column_defaults_names.py: -------------------------------------------------------------------------------- 1 | """add deleted deck type and fix column defaults names 2 | 3 | Revision ID: 7a86599a211c 4 | Revises: 7e18d411320e 5 | Create Date: 2023-10-29 18:29:05.978179 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '7a86599a211c' 14 | down_revision = '7e18d411320e' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.execute("ALTER TYPE decktype ADD VALUE 'deleted'") 22 | op.alter_column('studyset', 'set_type', 23 | existing_type=postgresql.ENUM('test', 'post_test', 'normal', name='settype'), 24 | nullable=False, 25 | existing_server_default=sa.text("'normal'::settype")) 26 | op.alter_column('studyset', 'repetition_model', 27 | existing_type=postgresql.ENUM('leitner', 'sm2', 'karl100', 'karl50', 'karl85', 'karl', 'settles', 'fsrs', 'karlAblation', name='repetition'), 28 | nullable=False) 29 | op.alter_column('user_deck', 'temp_repetition_model_override', new_column_name='repetition_model_override') 30 | 31 | # ### end Alembic commands ### 32 | 33 | 34 | def downgrade(): 35 | # ### commands auto generated by Alembic - please adjust! ### 36 | op.alter_column('user_deck', 'repetition_model_override', new_column_name='temp_repetition_model_override') 37 | op.alter_column('studyset', 'repetition_model', 38 | existing_type=postgresql.ENUM('leitner', 'sm2', 'karl100', 'karl50', 'karl85', 'karl', 'settles', 'fsrs', 'karlAblation', name='repetition'), 39 | nullable=True) 40 | op.alter_column('studyset', 'set_type', 41 | existing_type=postgresql.ENUM('test', 'post_test', 'normal', name='settype'), 42 | nullable=True, 43 | existing_server_default=sa.text("'normal'::settype")) 44 | # ### end Alembic commands ### 45 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/7c432afcbe9e_add_show_mnemonic_help_column_to_user_.py: -------------------------------------------------------------------------------- 1 | """Add show_mnemonic_help column to user table 2 | 3 | Revision ID: 7c432afcbe9e 4 | Revises: d30ad91b8772 5 | Create Date: 2024-02-28 01:22:43.397472 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '7c432afcbe9e' 14 | down_revision = 'd30ad91b8772' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('user', sa.Column('show_mnemonic_help', sa.Boolean(), nullable=True, server_default='true')) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('user', 'show_mnemonic_help') 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/7cf86cd78103_remove_viewed_mnemonic_column.py: -------------------------------------------------------------------------------- 1 | """Remove viewed_mnemonic column 2 | 3 | Revision ID: 7cf86cd78103 4 | Revises: ab7e7e37b4cf 5 | Create Date: 2023-12-26 22:03:52.005680 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '7cf86cd78103' 14 | down_revision = 'ab7e7e37b4cf' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | op.drop_column('mnemonic', 'viewed_mnemonic') 21 | 22 | 23 | 24 | def downgrade(): 25 | op.add_column('mnemonic', sa.Column("viewed_mnemonic", sa.Boolean(), nullable=True)) 26 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/7e169b59629a_is_correct_should_be_nullable_create_.py: -------------------------------------------------------------------------------- 1 | """is_correct should be nullable, create new correct column 2 | 3 | Revision ID: 7e169b59629a 4 | Revises: 47a0d502a95a 5 | Create Date: 2022-05-16 04:32:56.617688 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '7e169b59629a' 14 | down_revision = '47a0d502a95a' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('history', sa.Column('correct', sa.Boolean(), nullable=True)) 22 | op.create_index(op.f('ix_history_correct'), 'history', ['correct'], unique=False) 23 | op.drop_index('ix_history_is_correct', table_name='history') 24 | op.drop_column('history', 'is_correct') 25 | # ### end Alembic commands ### 26 | 27 | 28 | def downgrade(): 29 | # ### commands auto generated by Alembic - please adjust! ### 30 | op.add_column('history', sa.Column('is_correct', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=False)) 31 | op.create_index('ix_history_is_correct', 'history', ['is_correct'], unique=False) 32 | op.drop_index(op.f('ix_history_correct'), table_name='history') 33 | op.drop_column('history', 'correct') 34 | # ### end Alembic commands ### 35 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/94625a55cb65_remove_next_test_mode_and_last_test_.py: -------------------------------------------------------------------------------- 1 | """remove next_test_mode and last_test_date columns 2 | 3 | Revision ID: 94625a55cb65 4 | Revises: 19a191ff920c 5 | Create Date: 2022-06-15 20:04:19.400986 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '94625a55cb65' 14 | down_revision = '19a191ff920c' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.drop_column('user', 'next_test_mode') 22 | op.drop_column('user', 'last_test_date') 23 | # ### end Alembic commands ### 24 | 25 | 26 | def downgrade(): 27 | # ### commands auto generated by Alembic - please adjust! ### 28 | op.add_column('user', sa.Column('last_test_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True)) 29 | op.add_column('user', sa.Column('next_test_mode', sa.SMALLINT(), server_default=sa.text("'0'::smallint"), autoincrement=False, nullable=False)) 30 | # ### end Alembic commands ### 31 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/957c6698b8e2_add_retired_column.py: -------------------------------------------------------------------------------- 1 | """add retired column 2 | 3 | Revision ID: 957c6698b8e2 4 | Revises: 6872aa796f4a 5 | Create Date: 2022-05-23 15:49:05.045968 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '957c6698b8e2' 14 | down_revision = '6872aa796f4a' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('studyset', sa.Column('retired', sa.Boolean(), nullable=True)) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('studyset', 'retired') 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/98309991d026_add_beta_user_and_creation_date_to_user.py: -------------------------------------------------------------------------------- 1 | """Add beta user and creation date to user 2 | 3 | Revision ID: 98309991d026 4 | Revises: fdb5d50f8331 5 | Create Date: 2020-08-27 17:32:52.541227 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = '98309991d026' 13 | down_revision = 'fdb5d50f8331' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | # ### commands auto generated by Alembic - please adjust! ### 20 | op.add_column('user', sa.Column('beta_user', sa.Boolean(), nullable=False, server_default='false')) 21 | op.add_column('user', sa.Column('create_date', sa.TIMESTAMP(timezone=True), nullable=True)) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('user', 'create_date') 28 | op.drop_column('user', 'beta_user') 29 | # ### end Alembic commands ### 30 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/9afc4e3a9bf3_add_suggestion_and_comment_to_suspended.py: -------------------------------------------------------------------------------- 1 | """add suggestion and comment to suspended 2 | 3 | Revision ID: 9afc4e3a9bf3 4 | Revises: e3a211187158 5 | Create Date: 2020-06-25 21:56:53.066974 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '9afc4e3a9bf3' 14 | down_revision = 'e3a211187158' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('suspended', sa.Column('comment', sa.String(), nullable=True)) 22 | op.add_column('suspended', sa.Column('suggestion', postgresql.JSONB(astext_type=sa.Text()), nullable=True)) 23 | # ### end Alembic commands ### 24 | 25 | 26 | def downgrade(): 27 | # ### commands auto generated by Alembic - please adjust! ### 28 | op.drop_column('suspended', 'suggestion') 29 | op.drop_column('suspended', 'comment') 30 | # ### end Alembic commands ### 31 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/ab7e7e37b4cf_add_public_mnemonic_to_decktype_enum.py: -------------------------------------------------------------------------------- 1 | """Add public_mnemonic to DeckType enum 2 | 3 | Revision ID: ab7e7e37b4cf 4 | Revises: 641d695f446e 5 | Create Date: 2023-12-26 18:36:28.043481 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'ab7e7e37b4cf' 14 | down_revision = '641d695f446e' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # Add 'public_mnemonic' to the ENUM type in the database 21 | op.execute("ALTER TYPE decktype ADD VALUE 'public_mnemonic'") 22 | 23 | def downgrade(): 24 | # PostgreSQL doesn't support removing a value from an ENUM directly. 25 | # Downgrade logic is complex and depends on your requirements. 26 | # You might need to create a new ENUM without the 'public_mnemonic' value, 27 | # update the column to the new ENUM, and then drop the old ENUM. 28 | pass -------------------------------------------------------------------------------- /backend/app/alembic/versions/ae31c23bebdf_remove_repetition_model_from_history_.py: -------------------------------------------------------------------------------- 1 | """remove repetition_model from history (put in details) 2 | 3 | Revision ID: ae31c23bebdf 4 | Revises: 250c2f90374e 5 | Create Date: 2020-04-28 20:01:09.686488 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'ae31c23bebdf' 14 | down_revision = '250c2f90374e' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.drop_column('history', 'repetition_model') 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.add_column('history', sa.Column('repetition_model', postgresql.ENUM('leitner', 'sm2', 'karl', name='repetition'), autoincrement=False, nullable=False)) 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/b7b8acac9d4f_history_fact_id_should_be_optional.py: -------------------------------------------------------------------------------- 1 | """history fact.id should be optional 2 | 3 | Revision ID: b7b8acac9d4f 4 | Revises: ffdf470fe0ba 5 | Create Date: 2020-06-17 19:31:18.958679 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'b7b8acac9d4f' 14 | down_revision = 'ffdf470fe0ba' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.alter_column('history', 'fact_id', 22 | existing_type=sa.INTEGER(), 23 | nullable=True) 24 | # ### end Alembic commands ### 25 | 26 | 27 | def downgrade(): 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | op.alter_column('history', 'fact_id', 30 | existing_type=sa.INTEGER(), 31 | nullable=False) 32 | # ### end Alembic commands ### 33 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/c8bcaad53b52_add_indices.py: -------------------------------------------------------------------------------- 1 | """add indices 2 | 3 | Revision ID: c8bcaad53b52 4 | Revises: 36fb79f5f33e 5 | Create Date: 2020-06-14 01:33:50.548139 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'c8bcaad53b52' 14 | down_revision = '36fb79f5f33e' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_index(op.f('ix_fact_category'), 'fact', ['category'], unique=False) 22 | op.create_index(op.f('ix_fact_deck_id'), 'fact', ['deck_id'], unique=False) 23 | op.create_index(op.f('ix_fact_identifier'), 'fact', ['identifier'], unique=False) 24 | op.create_index(op.f('ix_fact_user_id'), 'fact', ['user_id'], unique=False) 25 | # ### end Alembic commands ### 26 | 27 | 28 | def downgrade(): 29 | # ### commands auto generated by Alembic - please adjust! ### 30 | op.drop_index(op.f('ix_fact_user_id'), table_name='fact') 31 | op.drop_index(op.f('ix_fact_identifier'), table_name='fact') 32 | op.drop_index(op.f('ix_fact_deck_id'), table_name='fact') 33 | op.drop_index(op.f('ix_fact_category'), table_name='fact') 34 | # ### end Alembic commands ### 35 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/ca02ff820178_karl_karl100.py: -------------------------------------------------------------------------------- 1 | """karl -> karl100 2 | 3 | Revision ID: ca02ff820178 4 | Revises: 98309991d026 5 | Create Date: 2020-10-07 05:25:05.042850 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = 'ca02ff820178' 13 | down_revision = '98309991d026' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | op.execute("COMMIT") 20 | op.execute("ALTER TYPE repetition RENAME VALUE 'karl' TO 'karl100'") 21 | 22 | 23 | def downgrade(): 24 | op.execute("COMMIT") 25 | op.execute("ALTER TYPE repetition RENAME VALUE 'karl100' TO 'karl'") 26 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/d30ad91b8772_add_sanity_check_to_decktype_enum.py: -------------------------------------------------------------------------------- 1 | """Add sanity_check to DeckType enum 2 | 3 | Revision ID: d30ad91b8772 4 | Revises: 7a19b60fa20c 5 | Create Date: 2024-01-30 01:49:17.765232 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'd30ad91b8772' 14 | down_revision = '7a19b60fa20c' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | op.execute("ALTER TYPE decktype ADD VALUE 'sanity_check' AFTER 'public_mnemonic'") 21 | pass 22 | 23 | 24 | def downgrade(): 25 | pass 26 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/d39b49c62b4f_add_dark_mode_setting.py: -------------------------------------------------------------------------------- 1 | """add dark mode setting 2 | 3 | Revision ID: d39b49c62b4f 4 | Revises: fdcf07aac389 5 | Create Date: 2020-07-16 04:30:16.728987 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'd39b49c62b4f' 14 | down_revision = 'fdcf07aac389' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('user', sa.Column('dark_mode', sa.Boolean(), nullable=False, server_default='true')) 22 | op.alter_column('user', 'show_help', 23 | existing_type=sa.BOOLEAN(), 24 | nullable=False, 25 | existing_server_default=sa.text('true')) 26 | # ### end Alembic commands ### 27 | 28 | 29 | def downgrade(): 30 | # ### commands auto generated by Alembic - please adjust! ### 31 | op.alter_column('user', 'show_help', 32 | existing_type=sa.BOOLEAN(), 33 | nullable=True, 34 | existing_server_default=sa.text('true')) 35 | op.drop_column('user', 'dark_mode') 36 | # ### end Alembic commands ### 37 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/df7ea91f1589_edit_debug_id_to_string.py: -------------------------------------------------------------------------------- 1 | """edit debug_id to string 2 | 3 | Revision ID: df7ea91f1589 4 | Revises: 94625a55cb65 5 | Create Date: 2022-07-05 20:55:56.509990 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'df7ea91f1589' 14 | down_revision = '94625a55cb65' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.alter_column('studyset', 'debug_id', 22 | existing_type=sa.INTEGER(), 23 | type_=sa.String(), 24 | existing_nullable=True) 25 | # ### end Alembic commands ### 26 | 27 | 28 | def downgrade(): 29 | # ### commands auto generated by Alembic - please adjust! ### 30 | op.alter_column('studyset', 'debug_id', 31 | existing_type=sa.String(), 32 | type_=sa.INTEGER(), 33 | existing_nullable=True) 34 | # ### end Alembic commands ### 35 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/e0fa43c0f38d_add_column_last_test_date_to_user.py: -------------------------------------------------------------------------------- 1 | """Add column last_test_date to User 2 | 3 | Revision ID: e0fa43c0f38d 4 | Revises: 6c00659b79c1 5 | Create Date: 2021-12-26 04:54:06.897334 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'e0fa43c0f38d' 14 | down_revision = '6c00659b79c1' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('user', sa.Column('last_test_date', sa.TIMESTAMP(timezone=True), nullable=True)) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('user', 'last_test_date') 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/e1488e104d80_create_mnemonic_table.py: -------------------------------------------------------------------------------- 1 | """create mnemonic table 2 | 3 | Revision ID: e1488e104d80 4 | Revises: 23e1f44f4f62 5 | Create Date: 2023-10-14 17:50:03.985012 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "e1488e104d80" 14 | down_revision = "23e1f44f4f62" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table( 22 | "mnemonic", 23 | sa.Column("id", sa.Integer(), nullable=False, index=True), 24 | sa.Column("study_id", sa.Integer(), nullable=False), 25 | sa.Column("fact_id", sa.Integer(), nullable=False), 26 | sa.Column("user_id", sa.Integer(), nullable=False), 27 | sa.Column("viewed_mnemonic", sa.Boolean(), nullable=True), 28 | sa.Column("user_rating", sa.Integer(), nullable=True), 29 | sa.Column("is_offensive", sa.Boolean(), nullable=True), 30 | sa.Column("is_incorrect_definition", sa.Boolean(), nullable=True), 31 | sa.Column("is_difficult_to_understand", sa.Boolean(), nullable=True), 32 | sa.Column("is_bad_keyword_link", sa.Boolean(), nullable=True), 33 | sa.Column("create_date", sa.TIMESTAMP(timezone=True), nullable=True), 34 | sa.Column("correct", sa.Boolean(), index=True), 35 | sa.ForeignKeyConstraint( 36 | ["fact_id"], 37 | ["fact.fact_id"], 38 | ), 39 | sa.ForeignKeyConstraint( 40 | ["study_id"], 41 | ["studyset.id"], 42 | ), 43 | sa.PrimaryKeyConstraint("id"), 44 | ) 45 | # ### end Alembic commands ### 46 | 47 | 48 | def downgrade(): 49 | # ### commands auto generated by Alembic - please adjust! ### 50 | # ### end Alembic commands ### 51 | op.drop_table("mnemonic") 52 | pass 53 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/e3a211187158_add_history_log.py: -------------------------------------------------------------------------------- 1 | """add history log 2 | 3 | Revision ID: e3a211187158 4 | Revises: b7b8acac9d4f 5 | Create Date: 2020-06-17 19:38:00.388140 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = 'e3a211187158' 13 | down_revision = 'b7b8acac9d4f' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | op.execute("COMMIT") 20 | op.execute("ALTER TYPE log ADD VALUE 'browser'") 21 | op.execute("ALTER TYPE log ADD VALUE 'get_facts'") 22 | op.execute("ALTER TYPE log ADD VALUE 'update_fact'") 23 | op.execute("ALTER TYPE log ADD VALUE 'mark'") 24 | op.execute("ALTER TYPE log ADD VALUE 'undo_suspend'") 25 | op.execute("ALTER TYPE log ADD VALUE 'undo_report'") 26 | op.execute("ALTER TYPE log ADD VALUE 'undo_mark'") 27 | op.execute("ALTER TYPE log ADD VALUE 'clear_report_or_suspend'") 28 | op.execute("ALTER TYPE log ADD VALUE 'assign_viewer'") 29 | 30 | 31 | def downgrade(): 32 | pass 33 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/edb59940cef6_is_test_shouldn_t_be_int_convert_to_bool.py: -------------------------------------------------------------------------------- 1 | """is_test shouldn't be int, convert to bool 2 | 3 | Revision ID: edb59940cef6 4 | Revises: 7e169b59629a 5 | Create Date: 2022-05-16 05:07:01.805238 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = 'edb59940cef6' 13 | down_revision = '7e169b59629a' 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade(): 19 | # ### commands auto generated by Alembic - please adjust! ### 20 | op.drop_column('studyset', 'is_test') 21 | op.add_column('studyset', sa.Column('is_test', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, 22 | nullable=False)) 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | 28 | op.drop_column('studyset', 'is_test') 29 | op.add_column('studyset', 30 | sa.Column('is_test', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=False)) 31 | # ### end Alembic commands ### 32 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/f007a4240a14_add_enum_values.py: -------------------------------------------------------------------------------- 1 | """add enum values 2 | 3 | Revision ID: f007a4240a14 4 | Revises: 4c116a99c4ee 5 | Create Date: 2022-08-04 21:57:35.267113 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'f007a4240a14' 14 | down_revision = '4c116a99c4ee' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | op.execute("COMMIT") 21 | op.execute("ALTER TYPE repetition ADD VALUE 'karl'") 22 | op.execute("ALTER TYPE repetition ADD VALUE 'settles'") 23 | 24 | 25 | def downgrade(): 26 | pass 27 | # ### commands auto generated by Alembic - please adjust! ### 28 | # ### end Alembic commands ### -------------------------------------------------------------------------------- /backend/app/alembic/versions/fdb5d50f8331_add_enum_values_to_log_and_repetition.py: -------------------------------------------------------------------------------- 1 | """add enum reassign_model 2 | 3 | Revision ID: fdb5d50f8331 4 | Revises: 4cb1bd466ce1 5 | Create Date: 2020-08-12 00:14:46.101377 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'fdb5d50f8331' 14 | down_revision = '4cb1bd466ce1' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | op.execute("COMMIT") 21 | op.execute("ALTER TYPE log ADD VALUE 'reassign_model'") 22 | op.execute("ALTER TYPE repetition ADD VALUE 'karl50'") 23 | op.execute("ALTER TYPE repetition ADD VALUE 'karl85'") 24 | 25 | 26 | def downgrade(): 27 | # ### commands auto generated by Alembic - please adjust! ### 28 | pass 29 | # ### end Alembic commands ### 30 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/fdcf07aac389_add_show_help_to_user.py: -------------------------------------------------------------------------------- 1 | """add show_help to user 2 | 3 | Revision ID: fdcf07aac389 4 | Revises: 491383f70589 5 | Create Date: 2020-07-03 00:34:16.915740 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'fdcf07aac389' 14 | down_revision = '491383f70589' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('user', sa.Column('show_help', sa.Boolean(), nullable=True, server_default='true')) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('user', 'show_help') 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/ffdf470fe0ba_add_gin_indices.py: -------------------------------------------------------------------------------- 1 | """add GIN indices 2 | 3 | Revision ID: ffdf470fe0ba 4 | Revises: c8bcaad53b52 5 | Create Date: 2020-06-16 22:40:36.753058 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'ffdf470fe0ba' 14 | down_revision = 'c8bcaad53b52' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | op.create_index(op.f('idx_fact_fts'), 21 | 'fact', 22 | [sa.text('to_tsvector(\'english\'::regconfig, text || \' \' || answer || \' \' || category || \' \' || identifier)')], 23 | postgresql_using='gin') 24 | 25 | def downgrade(): 26 | op.drop_index(op.f('idx_fact_fts'), table_name='fact') 27 | -------------------------------------------------------------------------------- /backend/app/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/app/__init__.py -------------------------------------------------------------------------------- /backend/app/app/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/app/api/__init__.py -------------------------------------------------------------------------------- /backend/app/app/api/api_v1/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/app/api/api_v1/__init__.py -------------------------------------------------------------------------------- /backend/app/app/api/api_v1/api.py: -------------------------------------------------------------------------------- 1 | from app.api.api_v1.endpoints import facts, mnemonics, login, users, utils, decks, study, statistics 2 | from fastapi import APIRouter 3 | 4 | api_router = APIRouter() 5 | api_router.include_router(login.router, tags=["login"]) 6 | api_router.include_router(users.router, prefix="/users", tags=["users"]) 7 | api_router.include_router(utils.router, prefix="/utils", tags=["utils"]) 8 | api_router.include_router(decks.router, prefix="/decks", tags=["decks"]) 9 | api_router.include_router(facts.router, prefix="/facts", tags=["facts"]) 10 | api_router.include_router(mnemonics.router, prefix="/mnemonics", tags=["mnemonics"]) 11 | api_router.include_router(study.router, prefix="/study", tags=["study"]) 12 | api_router.include_router(statistics.router, prefix="/statistics", tags=["statistics"]) 13 | -------------------------------------------------------------------------------- /backend/app/app/api/api_v1/endpoints/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/app/api/api_v1/endpoints/__init__.py -------------------------------------------------------------------------------- /backend/app/app/api/api_v1/endpoints/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from app import models, schemas 4 | from app.api import deps 5 | from app.core.celery_app import celery_app 6 | from app.utils.utils import send_test_email 7 | from fastapi import APIRouter, Depends 8 | from pydantic.networks import EmailStr 9 | 10 | router = APIRouter() 11 | 12 | 13 | @router.post("/test-celery/", response_model=schemas.Msg, status_code=201) 14 | def test_celery( 15 | msg: schemas.Msg, 16 | current_user: models.User = Depends(deps.get_current_active_superuser), 17 | ) -> Any: 18 | """ 19 | Test Celery worker. 20 | """ 21 | celery_app.send_task("app.worker.test_celery", args=[msg.msg]) 22 | return {"msg": "Word received"} 23 | 24 | 25 | @router.post("/test-email/", response_model=schemas.Msg, status_code=201) 26 | def test_email( 27 | email_to: EmailStr, 28 | current_user: models.User = Depends(deps.get_current_active_superuser), 29 | ) -> Any: 30 | """ 31 | Test emails. 32 | """ 33 | send_test_email(email_to=email_to) 34 | return {"msg": "Test email sent"} 35 | -------------------------------------------------------------------------------- /backend/app/app/backend_pre_start.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from app.db.session import SessionLocal 4 | from tenacity import after_log, before_log, retry, stop_after_attempt, wait_fixed 5 | 6 | logging.basicConfig(level=logging.INFO) 7 | logger = logging.getLogger(__name__) 8 | 9 | max_tries = 60 * 5 # 5 minutes 10 | wait_seconds = 1 11 | 12 | 13 | @retry( 14 | stop=stop_after_attempt(max_tries), 15 | wait=wait_fixed(wait_seconds), 16 | before=before_log(logger, logging.INFO), 17 | after=after_log(logger, logging.WARN), 18 | ) 19 | def init() -> None: 20 | try: 21 | db = SessionLocal() 22 | # Try to create session to check if DB is awake 23 | db.execute("SELECT 1") 24 | except Exception as e: 25 | logger.error(e) 26 | raise e 27 | 28 | 29 | def main() -> None: 30 | logger.info("Initializing service") 31 | init() 32 | logger.info("Service finished initializing") 33 | 34 | 35 | if __name__ == "__main__": 36 | main() 37 | -------------------------------------------------------------------------------- /backend/app/app/celeryworker_pre_start.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from app.db.session import SessionLocal 4 | from tenacity import after_log, before_log, retry, stop_after_attempt, wait_fixed 5 | 6 | logging.basicConfig(level=logging.INFO) 7 | logger = logging.getLogger(__name__) 8 | 9 | max_tries = 60 * 5 # 5 minutes 10 | wait_seconds = 1 11 | 12 | 13 | @retry( 14 | stop=stop_after_attempt(max_tries), 15 | wait=wait_fixed(wait_seconds), 16 | before=before_log(logger, logging.INFO), 17 | after=after_log(logger, logging.WARN), 18 | ) 19 | def init() -> None: 20 | try: 21 | # Try to create session to check if DB is awake 22 | db = SessionLocal() 23 | db.execute("SELECT 1") 24 | except Exception as e: 25 | logger.error(e) 26 | raise e 27 | 28 | 29 | def main() -> None: 30 | logger.info("Initializing service") 31 | init() 32 | logger.info("Service finished initializing") 33 | 34 | 35 | if __name__ == "__main__": 36 | main() 37 | -------------------------------------------------------------------------------- /backend/app/app/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/app/core/__init__.py -------------------------------------------------------------------------------- /backend/app/app/core/celery_app.py: -------------------------------------------------------------------------------- 1 | from celery import Celery 2 | 3 | celery_app = Celery("worker", broker="amqp://guest@queue//") 4 | 5 | celery_app.conf.task_routes = {"app.worker.*": "main-queue"} 6 | -------------------------------------------------------------------------------- /backend/app/app/core/security.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from typing import Any, Union, Optional 3 | 4 | from app.core.config import settings 5 | from jose import jwt 6 | from passlib.context import CryptContext 7 | 8 | pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") 9 | 10 | ALGORITHM = "HS256" 11 | 12 | 13 | def create_access_token( 14 | subject: Union[str, Any], expires_delta: Optional[timedelta] = None 15 | ) -> str: 16 | if expires_delta: 17 | expire = datetime.utcnow() + expires_delta 18 | else: 19 | expire = datetime.utcnow() + timedelta( 20 | minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES 21 | ) 22 | to_encode = {"exp": expire, "sub": str(subject)} 23 | encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=ALGORITHM) 24 | return encoded_jwt 25 | 26 | 27 | def verify_password(plain_password: str, hashed_password: str) -> bool: 28 | return pwd_context.verify(plain_password, hashed_password) 29 | 30 | 31 | def get_password_hash(password: str) -> str: 32 | return pwd_context.hash(password) 33 | -------------------------------------------------------------------------------- /backend/app/app/crud/__init__.py: -------------------------------------------------------------------------------- 1 | from .crud_deck import deck 2 | from .crud_fact import fact 3 | from .crud_history import history 4 | from .crud_user import user 5 | from .crud_test_history import test_history 6 | from .crud_studyset import studyset 7 | from .crud_mnemonic import mnemonic 8 | from .sqlalchemy_helper import helper 9 | 10 | # For a new basic set of CRUD operations you could just do 11 | 12 | # from .base import CRUDBase 13 | # from app.models.fact import Fact 14 | # from app.schemas.fact import FactCreate, FactUpdate 15 | 16 | # fact = CRUDBase[Fact, FactCreate, FactUpdate](Fact) 17 | -------------------------------------------------------------------------------- /backend/app/app/crud/base.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union 2 | 3 | from app.db.base_class import Base 4 | from fastapi.encoders import jsonable_encoder 5 | from pydantic import BaseModel 6 | from sqlalchemy.orm import Session 7 | from app.utils.utils import logger, log_time, time_it 8 | 9 | ModelType = TypeVar("ModelType", bound=Base) 10 | CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel) 11 | UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel) 12 | 13 | 14 | class CRUDBase(Generic[ModelType, CreateSchemaType, UpdateSchemaType]): 15 | def __init__(self, model: Type[ModelType]): 16 | """ 17 | CRUD object with default methods to Create, Read, Update, Delete (CRUD). 18 | 19 | **Parameters** 20 | 21 | * `model`: A SQLAlchemy model class 22 | * `schema`: A Pydantic model (schema) class 23 | """ 24 | self.model = model 25 | 26 | def get(self, db: Session, id: Any) -> Optional[ModelType]: 27 | db_obj = db.query(self.model).filter(self.model.id == id).first() 28 | return db_obj 29 | 30 | def get_multi( 31 | self, db: Session, *, skip: Optional[int] = None, limit: Optional[int] = None 32 | ) -> List[ModelType]: 33 | query = db.query(self.model) 34 | if skip: 35 | query = query.offset(skip) 36 | if limit: 37 | query = query.offset(limit) 38 | return query.all() 39 | 40 | def create(self, db: Session, *, obj_in: CreateSchemaType) -> ModelType: 41 | obj_in_data = jsonable_encoder(obj_in) 42 | db_obj = self.model(**obj_in_data) 43 | db.add(db_obj) 44 | db.commit() 45 | db.refresh(db_obj) 46 | return db_obj 47 | 48 | def update( 49 | self, 50 | db: Session, 51 | *, 52 | db_obj: ModelType, 53 | obj_in: Union[UpdateSchemaType, Dict[str, Any]] 54 | ) -> ModelType: 55 | # https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/430#issue-1028952561 56 | if isinstance(obj_in, dict): 57 | update_data = obj_in 58 | else: 59 | update_data = obj_in.dict(exclude_unset=True) 60 | for field in update_data: 61 | if hasattr(db_obj, field): 62 | setattr(db_obj, field, update_data[field]) 63 | db.add(db_obj) 64 | db.commit() 65 | db.refresh(db_obj) 66 | return db_obj 67 | -------------------------------------------------------------------------------- /backend/app/app/crud/crud_history.py: -------------------------------------------------------------------------------- 1 | from app import models, schemas 2 | from app.crud.base import CRUDBase 3 | from sqlalchemy.orm import Session 4 | from app.utils.utils import logger, log_time, time_it 5 | from sqlalchemy import func, desc, cast, DateTime, text, Integer 6 | 7 | 8 | class CRUDHistory( 9 | CRUDBase[models.History, schemas.HistoryCreate, schemas.HistoryUpdate] 10 | ): 11 | def get_with_debug(self, db: Session, debug_id: str): 12 | debug = db.query(self.model).filter( 13 | self.model.details["debug_id"].astext == debug_id 14 | ) 15 | return debug 16 | 17 | # TODO: Add relearning count/better distinction between initial and subsequent relearning 18 | def get_user_study_count(self, user: models.User): 19 | return len( 20 | [ 21 | history_item 22 | for history_item in user.history 23 | if history_item.log_type == "study" 24 | ] 25 | ) 26 | 27 | def get_user_test_study_count(self, user: models.User): 28 | return len( 29 | [ 30 | history_item 31 | for history_item in user.history 32 | if history_item.log_type == "test_study" 33 | ] 34 | ) 35 | 36 | def get_test_mode_counts(self, db: Session): 37 | subquery = ( 38 | db.query( 39 | models.History.user_id, 40 | (func.count(models.History.id) / 10).label("num_test_modes_completed"), 41 | func.max(models.StudySet.create_date).label( 42 | "last_study_date" 43 | ), # Getting the max date from the studyset table 44 | ) 45 | .join( 46 | models.StudySet, 47 | cast(models.History.details["studyset_id"].astext, Integer) 48 | == models.StudySet.id, 49 | ) 50 | .filter(models.History.details["response"].astext == "true") 51 | .filter( 52 | models.History.details["set_type"].astext.in_(["test", "post_test"]) 53 | ) 54 | .group_by(models.History.user_id) 55 | .subquery() 56 | ) 57 | 58 | data = ( 59 | db.query( 60 | models.User, 61 | subquery.c.num_test_modes_completed, 62 | subquery.c.last_study_date, 63 | ) 64 | .join(models.User, models.User.id == subquery.c.user_id) 65 | .filter(subquery.c.num_test_modes_completed >= 3) 66 | .order_by(desc(subquery.c.num_test_modes_completed)) 67 | ) 68 | 69 | return data.all() 70 | 71 | 72 | history = CRUDHistory(models.History) 73 | -------------------------------------------------------------------------------- /backend/app/app/crud/crud_test_history.py: -------------------------------------------------------------------------------- 1 | from app import models, schemas 2 | from app.crud.base import CRUDBase 3 | from sqlalchemy.orm import Session 4 | from app.utils.utils import logger, log_time, time_it 5 | 6 | 7 | class CRUDTestHistory(CRUDBase[models.Test_History, schemas.TestHistoryCreate, schemas.TestHistoryUpdate]): 8 | def get_with_debug(self, db: Session, debug_id: str): 9 | debug = db.query(self.model).filter(self.model.details["debug_id"].astext == debug_id) 10 | return debug 11 | 12 | def get_user_test_study_count(self, user: models.User): 13 | return len([history_item for history_item in user.test_history]) 14 | 15 | 16 | test_history = CRUDTestHistory(models.Test_History) 17 | -------------------------------------------------------------------------------- /backend/app/app/data/formatted.train.clues.json: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:ae573e24cc6ce2c79876c02627b3b2e0b7246d8a31f064c5ded46c2e808a9b3a 3 | size 27457778 4 | -------------------------------------------------------------------------------- /backend/app/app/data/jeopardy.json: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:e93247fed3511e025b9f4c7d3a6ab3334c77546401fb1d64f4d558af09506308 3 | size 55554625 4 | -------------------------------------------------------------------------------- /backend/app/app/data/tfidf.pkl: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:5ac0f7d18832d9d9b430ef2e66a15750dab5e839be105168727dfd4cfc4e4e9b 3 | size 10746988 4 | -------------------------------------------------------------------------------- /backend/app/app/data/train_tfidf.py: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:37cb10a32c06df525f8e827991cf314a5e7d053103509262323fb72e26b89065 3 | size 476 4 | -------------------------------------------------------------------------------- /backend/app/app/data/train_tfidf_vectorizer.py: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:37cb10a32c06df525f8e827991cf314a5e7d053103509262323fb72e26b89065 3 | size 476 4 | -------------------------------------------------------------------------------- /backend/app/app/data/vocab_rlhf_testing.json: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:008a180e578d188ef63304dc5556e69e72ac4799f82d5c707cad8306ea04bd9e 3 | size 349526 4 | -------------------------------------------------------------------------------- /backend/app/app/db/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/app/db/__init__.py -------------------------------------------------------------------------------- /backend/app/app/db/base.py: -------------------------------------------------------------------------------- 1 | # Import all the models, so that Base has them before being 2 | # imported by Alembic 3 | from app.db.base_class import Base # noqa 4 | from app.models.fact import Fact # noqa 5 | from app.models.user import User # noqa 6 | -------------------------------------------------------------------------------- /backend/app/app/db/base_class.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from sqlalchemy.ext.declarative import as_declarative, declared_attr 4 | 5 | 6 | @as_declarative() 7 | class Base: 8 | id: Any 9 | __name__: str 10 | 11 | # Generate __tablename__ automatically 12 | @declared_attr 13 | def __tablename__(cls) -> str: 14 | return cls.__name__.lower() 15 | -------------------------------------------------------------------------------- /backend/app/app/db/init_db.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from app import crud, schemas 4 | from app.core.celery_app import celery_app 5 | from app.core.config import settings 6 | from app.db import base # noqa: F401 7 | from sqlalchemy.orm import Session 8 | 9 | from app.schemas import DeckType 10 | 11 | logging.basicConfig(level=logging.INFO) 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | # make sure all SQL Alchemy models are imported (app.db.base) before initializing DB 16 | # otherwise, SQL Alchemy might fail to initialize relationships properly 17 | # for more details: https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/28 18 | 19 | 20 | def init_db(db: Session) -> None: 21 | # Tables should be created with Alembic migrations 22 | # But if you don't want to use migrations, create 23 | # the tables un-commenting the next line 24 | # Base.metadata.create_all(bind=engine) 25 | 26 | user = crud.user.get_by_email(db, email=settings.FIRST_SUPERUSER) 27 | if not user: 28 | deck = crud.deck.create(db, obj_in=schemas.SuperDeckCreate(title="Default", deck_type=DeckType.public)) 29 | # deck = models.Deck(id=1, title="Default") 30 | # db.add(deck) 31 | # db.commit() 32 | 33 | user_in = schemas.SuperUserCreate( 34 | email=settings.FIRST_SUPERUSER, 35 | password=settings.FIRST_SUPERUSER_PASSWORD, 36 | username="KarlMaster", 37 | is_superuser=True, 38 | ) 39 | user = crud.user.super_user_create(db, obj_in=user_in) # noqa: F841 40 | 41 | crud.deck.assign_owner(db, db_obj=deck, user=user) 42 | logger.info("Sending celery task") 43 | celery_app.send_task("app.worker.load_jeopardy_facts") 44 | celery_app.send_task("app.worker.load_quizbowl_facts") 45 | celery_app.send_task("app.worker.create_test_mode_facts", kwargs={"filename": settings.TEST_MODE_FILE}) 46 | -------------------------------------------------------------------------------- /backend/app/app/db/session.py: -------------------------------------------------------------------------------- 1 | from app.core.config import settings 2 | from sqlalchemy import create_engine 3 | from sqlalchemy.orm import sessionmaker 4 | 5 | engine = create_engine(settings.SQLALCHEMY_DATABASE_URI, pool_pre_ping=True) 6 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) 7 | -------------------------------------------------------------------------------- /backend/app/app/email-templates/src/reset_password.mjml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | {{ project_name }} - Password Recovery 7 | We received a request to recover the password for user {{ username }} 8 | with email {{ email }} 9 | Reset your password by clicking the button below: 10 | Reset Password 11 | Or open the following link: 12 | {{ link }} 13 | 14 | The reset password link / button will expire in {{ valid_hours }} hours. 15 | If you didn't request a password recovery you can disregard this email. 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /backend/app/app/email-templates/src/test_email.mjml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | {{ project_name }} 7 | Test email for: {{ email }} 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /backend/app/app/initial_data.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from app.db.init_db import init_db 4 | from app.db.session import SessionLocal 5 | 6 | logging.basicConfig(level=logging.INFO) 7 | logger = logging.getLogger(__name__) 8 | 9 | 10 | def init() -> None: 11 | db = SessionLocal() 12 | init_db(db) 13 | 14 | 15 | def main() -> None: 16 | logger.info("Creating initial data") 17 | init() 18 | logger.info("Initial data created") 19 | 20 | 21 | if __name__ == "__main__": 22 | main() 23 | -------------------------------------------------------------------------------- /backend/app/app/interface/__init__.py: -------------------------------------------------------------------------------- 1 | from . import statistics 2 | from . import reassignment 3 | -------------------------------------------------------------------------------- /backend/app/app/interface/reassignment.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Union 3 | 4 | import requests 5 | from sentry_sdk import capture_exception 6 | 7 | from app import models, schemas, crud 8 | from app.core.config import settings 9 | from sqlalchemy.orm import Session 10 | from app.utils.utils import logger, log_time, time_it 11 | 12 | 13 | def change_assignment(user: models.user, repetition_model: schemas.Repetition) -> Union[ 14 | int, requests.exceptions.RequestException, json.decoder.JSONDecodeError]: 15 | parameters = {'user_id': user.id, 'env': settings.ENVIRONMENT, 'repetition_model': repetition_model} 16 | try: 17 | request = requests.put(f"{settings.INTERFACE}api/karl/set_repetition_model/", params=parameters) 18 | logger.info(request.url) 19 | return request.status_code 20 | except requests.exceptions.RequestException as e: 21 | capture_exception(e) 22 | return e 23 | except json.decoder.JSONDecodeError as e: 24 | capture_exception(e) 25 | return e 26 | -------------------------------------------------------------------------------- /backend/app/app/interface/scheduler.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | from datetime import datetime 4 | from typing import List, Union 5 | 6 | import requests 7 | from sentry_sdk import capture_exception 8 | 9 | from app import models, schemas, crud 10 | from app.core.config import settings 11 | from sqlalchemy.orm import Session 12 | 13 | from app.utils.utils import logger, log_time, time_it 14 | 15 | 16 | # Values set here are no longer relevant, as they are passed in for each schedule request. 17 | def set_user_settings(user: models.user, new_settings: schemas.UserUpdate) -> Union[ 18 | int, requests.exceptions.RequestException, json.decoder.JSONDecodeError]: 19 | params = schemas.SetParametersSchema(env=settings.ENVIRONMENT, recall_target=new_settings.recall_target / 100, repetition_model=new_settings.repetition_model) 20 | parameters = {'user_id': user.id, 'params': params.dict()} 21 | try: 22 | request = requests.put(f"{settings.INTERFACE}api/karl/set_params?user_id={user.id}", json=params.dict()) 23 | logger.info(request.url) 24 | return request.status_code 25 | except requests.exceptions.RequestException as e: 26 | capture_exception(e) 27 | return e 28 | except json.decoder.JSONDecodeError as e: 29 | capture_exception(e) 30 | return e 31 | -------------------------------------------------------------------------------- /backend/app/app/main.py: -------------------------------------------------------------------------------- 1 | import sentry_sdk 2 | from fastapi import FastAPI 3 | from fastapi.routing import APIRoute 4 | from sentry_sdk.integrations.asgi import SentryAsgiMiddleware 5 | from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration 6 | from starlette.middleware.cors import CORSMiddleware 7 | 8 | from app.api.api_v1.api import api_router 9 | from app.core.config import settings 10 | 11 | app = FastAPI( 12 | title=settings.PROJECT_NAME, openapi_url=f"{settings.API_V1_STR}/openapi.json" 13 | ) 14 | 15 | if settings.SENTRY_DSN: 16 | sentry_sdk.init(dsn=settings.SENTRY_DSN, integrations=[SqlalchemyIntegration()], environment=settings.ENVIRONMENT) 17 | app.add_middleware(SentryAsgiMiddleware) 18 | 19 | # Set all CORS enabled origins 20 | if settings.BACKEND_CORS_ORIGINS: 21 | app.add_middleware( 22 | CORSMiddleware, 23 | allow_origins=[str(origin) for origin in settings.BACKEND_CORS_ORIGINS], 24 | allow_credentials=True, 25 | allow_methods=["*"], 26 | allow_headers=["*"], 27 | ) 28 | 29 | app.include_router(api_router, prefix=settings.API_V1_STR) 30 | 31 | 32 | def use_route_names_as_operation_ids(app: FastAPI) -> None: 33 | """ 34 | Simplify operation IDs so that generated API clients have simpler function 35 | names. 36 | 37 | Should be called only after all routes have been added. 38 | """ 39 | for route in app.routes: 40 | if isinstance(route, APIRoute): 41 | route.operation_id = route.name 42 | 43 | 44 | use_route_names_as_operation_ids(app) 45 | -------------------------------------------------------------------------------- /backend/app/app/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .deck import Deck 2 | from .fact import Fact 3 | from .mnemonic import Mnemonic 4 | from .history import History 5 | from .marked import Marked 6 | from .suspended import Suspended 7 | from .user import User 8 | from .user_deck import User_Deck 9 | from .reported import Reported 10 | from .deleted import Deleted 11 | from .test_history import Test_History 12 | from .session_fact import Session_Fact 13 | from .studyset import StudySet 14 | from .session_deck import Session_Deck 15 | -------------------------------------------------------------------------------- /backend/app/app/models/deck.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | 3 | from app.db.base_class import Base 4 | from sqlalchemy import Column, Integer, String, Enum, Boolean 5 | from sqlalchemy.ext.associationproxy import association_proxy 6 | from sqlalchemy.orm import relationship 7 | 8 | from app.schemas import DeckType 9 | 10 | if TYPE_CHECKING: 11 | from .fact import Fact # noqa: F401 12 | from .user import User # noqa: F401 13 | from .user_deck import User_Deck # noqa: F401 14 | from .studyset import StudySet # noqa: F401 15 | from .session_deck import Session_Deck # noqa: F401 16 | 17 | 18 | class Deck(Base): 19 | id = Column(Integer, primary_key=True, index=True) 20 | title = Column(String, nullable=False) 21 | deck_type = Column(Enum(DeckType), nullable=False, default=DeckType.default, index=True) 22 | 23 | facts = relationship("Fact", back_populates="deck") 24 | users = association_proxy("user_decks", "user") 25 | user_decks = relationship("User_Deck", back_populates="deck", cascade="all, delete-orphan") 26 | studysets = association_proxy("session_deck", "studyset", creator=lambda studyset: Session_Deck(studyset=studyset)) 27 | -------------------------------------------------------------------------------- /backend/app/app/models/deleted.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from sqlalchemy import Column, Integer, ForeignKey, TIMESTAMP 4 | from sqlalchemy.orm import relationship, backref 5 | 6 | from app.db.base_class import Base 7 | from .fact import Fact 8 | from .user import User 9 | 10 | 11 | class Deleted(Base): 12 | id = Column(Integer, primary_key=True, index=True) 13 | fact_id = Column(Integer, ForeignKey("fact.fact_id"), nullable=False) 14 | user_id = Column(Integer, ForeignKey("user.id"), nullable=False) 15 | date_deleted = Column(TIMESTAMP(timezone=True), nullable=False) 16 | 17 | deleter = relationship("User", backref=backref("deletions", cascade="all, delete-orphan")) 18 | deleted_fact = relationship("Fact", backref=backref("deletions", cascade="all, delete-orphan")) 19 | 20 | def __init__(self, deleter: User, deleted_fact: Fact, date_deleted: datetime): 21 | self.deleted_fact = deleted_fact 22 | self.deleter = deleter 23 | self.date_deleted = date_deleted 24 | -------------------------------------------------------------------------------- /backend/app/app/models/history.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | 3 | from app.db.base_class import Base 4 | from app.schemas.log import Log 5 | from sqlalchemy import Column, Integer, ForeignKey, TIMESTAMP, Enum, Boolean 6 | from sqlalchemy.dialects.postgresql import JSONB 7 | from sqlalchemy.orm import relationship 8 | 9 | if TYPE_CHECKING: 10 | from .fact import Fact # noqa: F401 11 | from .user import User # noqa: F401 12 | 13 | 14 | class History(Base): 15 | id = Column(Integer, primary_key=True, index=True) 16 | time = Column(TIMESTAMP(timezone=True), nullable=False) 17 | user_id = Column(Integer, ForeignKey("user.id"), nullable=False, index=True) 18 | fact_id = Column(Integer, ForeignKey("fact.fact_id"), index=True) 19 | log_type = Column(Enum(Log), nullable=False) 20 | correct = Column(Boolean(), index=True) 21 | details = Column(JSONB) 22 | 23 | fact = relationship("Fact", back_populates="history") 24 | user = relationship("User", back_populates="history") 25 | -------------------------------------------------------------------------------- /backend/app/app/models/marked.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from app.db.base_class import Base 4 | from sqlalchemy import Column, Integer, ForeignKey, TIMESTAMP 5 | from sqlalchemy.orm import relationship, backref 6 | 7 | from .fact import Fact 8 | from .user import User 9 | 10 | 11 | class Marked(Base): 12 | id = Column(Integer, primary_key=True, index=True) 13 | fact_id = Column(Integer, ForeignKey("fact.fact_id"), nullable=False) 14 | user_id = Column(Integer, ForeignKey("user.id"), nullable=False) 15 | date_marked = Column(TIMESTAMP(timezone=True), nullable=False) 16 | 17 | marker = relationship("User", backref=backref("marks", cascade="all, delete-orphan")) 18 | marked_fact = relationship("Fact", backref=backref("marks", cascade="all, delete-orphan")) 19 | 20 | def __init__(self, marker: User, marked_fact: Fact, date_marked: datetime): 21 | self.marked_fact = marked_fact 22 | self.marker = marker 23 | self.date_marked = date_marked 24 | -------------------------------------------------------------------------------- /backend/app/app/models/mnemonic.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Optional, List 2 | 3 | from sqlalchemy import Column, ForeignKey, Integer, String, TIMESTAMP, ARRAY, cast, Index, func, Boolean 4 | from sqlalchemy.dialects import postgresql 5 | from sqlalchemy.dialects.postgresql import JSONB 6 | from sqlalchemy.ext.associationproxy import association_proxy 7 | from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property 8 | from sqlalchemy.orm import relationship 9 | 10 | from app.db.base_class import Base 11 | from app.schemas import Permission 12 | from .user import User 13 | 14 | if TYPE_CHECKING: 15 | from .fact import Fact 16 | 17 | class Mnemonic(Base): 18 | id = Column(Integer, primary_key=True, index=True) 19 | study_id = Column(Integer, ForeignKey("studyset.id")) 20 | fact_id = Column(Integer, ForeignKey("fact.fact_id")) 21 | user_id = Column(Integer, ForeignKey("user.id")) 22 | 23 | user_rating = Column(Integer, nullable=True) 24 | 25 | is_offensive = Column(Boolean, nullable=True) 26 | is_incorrect_definition = Column(Boolean, nullable=True) 27 | is_difficult_to_understand = Column(Boolean, nullable=True) 28 | is_bad_keyword_link = Column(Boolean, nullable=True) 29 | is_bad_for_other_reason = Column(Boolean, nullable=True) 30 | other_reason_text = Column(String, nullable=True) 31 | 32 | correct = Column(Boolean(), index=True) 33 | 34 | create_date = Column(TIMESTAMP(timezone=True), nullable=True) 35 | 36 | -------------------------------------------------------------------------------- /backend/app/app/models/reported.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from sqlalchemy import Column, Integer, ForeignKey, TIMESTAMP 4 | from sqlalchemy.dialects.postgresql import JSONB 5 | from sqlalchemy.orm import relationship, backref 6 | 7 | from app.db.base_class import Base 8 | from app.schemas import FactToReport 9 | from .fact import Fact 10 | from .user import User 11 | 12 | 13 | class Reported(Base): 14 | id = Column(Integer, primary_key=True, index=True) 15 | fact_id = Column(Integer, ForeignKey("fact.fact_id"), nullable=False) 16 | user_id = Column(Integer, ForeignKey("user.id"), nullable=False) 17 | date_reported = Column(TIMESTAMP(timezone=True), nullable=False) 18 | suggestion = Column(JSONB) 19 | 20 | reporter = relationship("User", backref=backref("reporteds", cascade="all, delete-orphan")) 21 | reported_fact = relationship("Fact", backref=backref("reporteds", cascade="all, delete-orphan")) 22 | 23 | def __init__(self, reporter: User, reported_fact: Fact, date_reported: datetime, 24 | suggestion: FactToReport): 25 | self.reported_fact = reported_fact 26 | self.reporter = reporter 27 | self.date_reported = date_reported 28 | self.suggestion = suggestion.dict() 29 | -------------------------------------------------------------------------------- /backend/app/app/models/session_deck.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | from app.db.base_class import Base 3 | from sqlalchemy import Column, Integer, ForeignKey 4 | from sqlalchemy.orm import relationship 5 | 6 | if TYPE_CHECKING: 7 | from .studyset import StudySet # noqa: F401 8 | from .deck import Deck # noqa: F401 9 | from .history import History # noqa: F401 10 | 11 | 12 | class Session_Deck(Base): 13 | studyset_id = Column(Integer, ForeignKey("studyset.id"), primary_key=True) 14 | deck_id = Column(Integer, ForeignKey("deck.id"), primary_key=True) 15 | 16 | studyset = relationship("StudySet", back_populates="session_decks") 17 | deck = relationship("Deck") 18 | 19 | # def __init__(self, studyset=None, deck=None): 20 | # self.studyset = studyset 21 | # self.deck = deck 22 | -------------------------------------------------------------------------------- /backend/app/app/models/suspended.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from sqlalchemy import Column, Integer, ForeignKey, TIMESTAMP 4 | from sqlalchemy.orm import relationship, backref 5 | 6 | from app.db.base_class import Base 7 | from .fact import Fact 8 | from .user import User 9 | 10 | 11 | class Suspended(Base): 12 | id = Column(Integer, primary_key=True, index=True) 13 | fact_id = Column(Integer, ForeignKey("fact.fact_id"), nullable=False) 14 | user_id = Column(Integer, ForeignKey("user.id"), nullable=False) 15 | date_suspended = Column(TIMESTAMP(timezone=True), nullable=False) 16 | 17 | suspender = relationship("User", backref=backref("suspensions", cascade="all, delete-orphan")) 18 | suspended_fact = relationship("Fact", backref=backref("suspensions", cascade="all, delete-orphan")) 19 | 20 | def __init__(self, suspender: User, suspended_fact: Fact, date_suspended: datetime): 21 | self.suspended_fact = suspended_fact 22 | self.suspender = suspender 23 | self.date_suspended = date_suspended 24 | -------------------------------------------------------------------------------- /backend/app/app/models/test_history.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | 3 | from app.db.base_class import Base 4 | from app.schemas.log import Log 5 | from sqlalchemy import Column, Integer, ForeignKey, TIMESTAMP, Enum, Boolean, String 6 | from sqlalchemy.dialects.postgresql import JSONB 7 | from sqlalchemy.orm import relationship 8 | 9 | if TYPE_CHECKING: 10 | from .fact import Fact # noqa: F401 11 | from .user import User # noqa: F401 12 | 13 | 14 | class Test_History(Base): 15 | id = Column(Integer, primary_key=True, index=True) 16 | time = Column(TIMESTAMP(timezone=True), nullable=False) 17 | user_id = Column(Integer, ForeignKey("user.id"), nullable=False) 18 | fact_id = Column(Integer, ForeignKey("fact.fact_id")) 19 | response = Column(Boolean(), nullable=False, index=True) 20 | details = Column(JSONB) 21 | 22 | fact = relationship("Fact", back_populates="test_history") 23 | user = relationship("User", back_populates="test_history") 24 | -------------------------------------------------------------------------------- /backend/app/app/models/user_deck.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from app.db.base_class import Base 3 | from app.schemas import Permission, Repetition 4 | from sqlalchemy import Column, Integer, ForeignKey, Enum, Boolean 5 | from sqlalchemy.orm import relationship 6 | from sqlalchemy.ext.hybrid import hybrid_property 7 | 8 | from .deck import Deck 9 | from .user import User 10 | 11 | 12 | # user_deck = Table("user_deck", Base.metadata, 13 | # Column("user_id", Integer, ForeignKey("user.id"), primary_key=True), 14 | # Column("deck_id", Integer, ForeignKey("deck.id"), primary_key=True) 15 | # ) 16 | class User_Deck(Base): 17 | deck_id = Column(Integer, ForeignKey("deck.id"), primary_key=True) 18 | owner_id = Column(Integer, ForeignKey("user.id"), primary_key=True) 19 | permissions = Column(Enum(Permission), nullable=False, default=Permission.viewer) 20 | completed = Column(Boolean) 21 | repetition_model_override = Column(Enum(Repetition)) 22 | 23 | user = relationship("User", back_populates="user_decks") 24 | deck = relationship("Deck", back_populates="user_decks") 25 | 26 | def __init__(self, deck: Deck, user: User, permissions: Permission, repetition_model_override: Optional[Repetition] = None): 27 | self.deck = deck 28 | self.user = user 29 | self.permissions = permissions 30 | self.repetition_model_override = repetition_model_override 31 | -------------------------------------------------------------------------------- /backend/app/app/schemas/__init__.py: -------------------------------------------------------------------------------- 1 | from .deck_type import DeckType 2 | from .set_type import SetType 3 | from .fact import Fact, FactCreate, FactInDB, FactUpdate, KarlFact, KarlFactUpdate, FactSearch, FactBrowse, FactReported, FactToReport, KarlFactV2, SchedulerQuery, UpdateRequestV2 4 | from .msg import Msg 5 | from .token import Token, TokenPayload 6 | from .user import User, UserCreate, UserInDB, UserUpdate, SuperUserCreate, SuperUserUpdate, UserWithStudySet 7 | from .deck import Deck, DeckCreate, DeckUpdate, DeckInDB, SuperDeckCreate, SuperDeckUpdate 8 | from .history import History, HistoryCreate, HistoryUpdate, TestHistoryCreate, TestHistoryUpdate 9 | from .repetition import Repetition 10 | from .suspend_type import SuspendType 11 | from .permission import Permission 12 | from .log import Log 13 | from .mnemonic import MnemonicLearningFeedbackLog, MnemonicComparisonFeedbackLog, MnemonicFeedback, MnemonicFeedbackDetailed, MnemonicStatistics 14 | from .schedule import Schedule, ScheduleResponse 15 | from .statistics import Statistics 16 | from .field import Field 17 | from .file_props import FileProps 18 | from .rank_type import RankType 19 | from .leaderboard import LeaderboardUser, Leaderboard, DataTypeHeader 20 | from .studyset import StudySet, StudySetCreate, StudySetUpdate 21 | from .set_parameters_schema import SetParametersSchema 22 | from .target_window import TargetWindow 23 | -------------------------------------------------------------------------------- /backend/app/app/schemas/deck.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from fastapi import Query 4 | from pydantic import BaseModel, validator 5 | 6 | from app.schemas import DeckType 7 | 8 | 9 | class DeckBase(BaseModel): 10 | title: Optional[str] = None 11 | 12 | 13 | # Properties to receive on deck creation 14 | class DeckCreate(DeckBase): 15 | title: str 16 | 17 | 18 | # Properties to receive on creation from super users 19 | class SuperDeckCreate(DeckCreate): 20 | deck_type: DeckType = DeckType.default 21 | # hidden: bool = False 22 | 23 | 24 | # Properties to receive on deck update 25 | class DeckUpdate(DeckBase): 26 | pass 27 | 28 | 29 | # Properties to receive on deck update 30 | class SuperDeckUpdate(DeckUpdate): 31 | deck_type: Optional[DeckType] = None 32 | # hidden: Optional[bool] = None 33 | 34 | 35 | # Properties shared by models stored in DB 36 | class DeckInDBBase(DeckBase): 37 | id: int 38 | title: str 39 | deck_type: DeckType 40 | # hidden: bool 41 | 42 | 43 | # Properties to return to client 44 | class Deck(DeckInDBBase): 45 | class Config: 46 | orm_mode = True 47 | 48 | 49 | # Properties properties stored in DB 50 | class DeckInDB(DeckInDBBase): 51 | pass 52 | -------------------------------------------------------------------------------- /backend/app/app/schemas/deck_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class DeckType(str, Enum): 5 | default = "default" 6 | public = "public" 7 | public_mnemonic = "public_mnemonic" 8 | sanity_check = "sanity_check" 9 | hidden = "hidden" 10 | deleted = "deleted" 11 | -------------------------------------------------------------------------------- /backend/app/app/schemas/field.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class Field(str, Enum): 5 | text = "text" 6 | answer = "answer" 7 | deck = "deck" 8 | identifier = "identifier" 9 | category = "category" 10 | extra = "extra" 11 | -------------------------------------------------------------------------------- /backend/app/app/schemas/file_props.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from app.schemas import Field, Deck 4 | from pydantic import BaseModel 5 | 6 | 7 | class FileProps(BaseModel): 8 | headers: List[Field] = [Field.text, Field.answer] 9 | default_deck: Deck 10 | delimeter: str = "\t" 11 | -------------------------------------------------------------------------------- /backend/app/app/schemas/history.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Optional 3 | 4 | # Shared properties 5 | from app.schemas import Fact, User 6 | from app.schemas.log import Log 7 | from pydantic import BaseModel 8 | 9 | 10 | class BothHistoryBase(BaseModel): 11 | time: datetime 12 | user_id: int 13 | fact_id: Optional[int] = None 14 | details: dict 15 | 16 | 17 | class HistoryBase(BothHistoryBase): 18 | log_type: Log 19 | correct: Optional[bool] = None 20 | 21 | 22 | class TestHistoryBase(BothHistoryBase): 23 | response: bool 24 | 25 | 26 | # Properties to receive on deck creation 27 | class HistoryCreate(HistoryBase): 28 | pass 29 | 30 | 31 | class TestHistoryCreate(TestHistoryBase): 32 | pass 33 | 34 | 35 | # Properties to receive on deck update 36 | class HistoryUpdate(HistoryBase): 37 | pass 38 | 39 | 40 | # Properties to receive on deck update 41 | class TestHistoryUpdate(TestHistoryBase): 42 | pass 43 | 44 | 45 | class HistoryInDBBase(HistoryBase): 46 | id: int 47 | 48 | 49 | class History(HistoryInDBBase): 50 | fact: Fact 51 | user: User 52 | 53 | class Config: 54 | orm_mode = True 55 | -------------------------------------------------------------------------------- /backend/app/app/schemas/leaderboard.py: -------------------------------------------------------------------------------- 1 | from typing import List, Union, Optional 2 | 3 | from pydantic import BaseModel 4 | 5 | # Properties to return to client about statistics 6 | from app.schemas import User, RankType 7 | 8 | 9 | class IntOrFloat(float): 10 | @classmethod 11 | def __get_validators__(cls): 12 | yield cls.validate 13 | 14 | @classmethod 15 | def validate(cls, v): 16 | if isinstance(v, float) or isinstance(v, int): 17 | return v 18 | raise TypeError('int or float required') 19 | 20 | 21 | class LeaderboardUser(BaseModel): 22 | user: User 23 | value: IntOrFloat 24 | rank: int 25 | 26 | 27 | class DataTypeHeader(BaseModel): 28 | text: str 29 | value: str 30 | width: Optional[str] = None 31 | 32 | 33 | class Leaderboard(BaseModel): 34 | leaderboard: List[LeaderboardUser] 35 | total: int 36 | name: str 37 | headers: List[DataTypeHeader] 38 | details: str 39 | rank_type: RankType 40 | user: Optional[User] = None 41 | user_place: Optional[int] = None 42 | skip: Optional[int] = 0 43 | limit: Optional[int] = None 44 | -------------------------------------------------------------------------------- /backend/app/app/schemas/log.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class Log(str, Enum): 5 | browser = "browser" 6 | get_facts = "get_facts" 7 | update_fact = "update_fact" 8 | update_user = "update_user" 9 | study = "study" 10 | suspend = "suspend" 11 | delete = "delete" 12 | report = "report" 13 | mark = "mark" 14 | undo_suspend = "undo_suspend" 15 | undo_delete = "undo_delete" 16 | undo_report = "undo_report" 17 | undo_study = "undo_study" # currently unimplemented 18 | undo_mark = "undo_mark" 19 | resolve_report = "resolve_report" 20 | clear_report_or_suspend = "clear_report_or_suspend" 21 | assign_viewer = "assign_viewer" 22 | reassign_model = "reassign_model" 23 | test_study = "test_study" 24 | get_test_facts = "get_test_facts" 25 | update_mnemonic = "update_mnemonic" 26 | get_post_test_facts = "get_post_test_facts" 27 | post_test_study = "post_test_study" 28 | mnemonic_learning_feedback = "mnemonic_learning_feedback" 29 | mnemonic_comparison_feedback = "mnemonic_comparison_feedback" 30 | -------------------------------------------------------------------------------- /backend/app/app/schemas/mnemonic.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import List, Optional, Set 3 | from enum import Enum 4 | from pydantic import BaseModel, validator 5 | from sqlalchemy.orm import Query 6 | 7 | from app.schemas.repetition import Repetition 8 | from app.schemas.deck import Deck 9 | # Shared properties 10 | from app.schemas.permission import Permission 11 | from app.schemas.target_window import TargetWindow 12 | 13 | 14 | class MnemonicLearningFeedbackLog(BaseModel): 15 | study_id: int 16 | fact_id: int 17 | user_id: int 18 | user_rating: int 19 | is_offensive: bool 20 | is_incorrect_definition: bool 21 | is_difficult_to_understand: bool 22 | is_not_memorable: bool 23 | is_bad_phonetic_keyword: bool 24 | is_bad_circular_keyword: bool 25 | is_bad_keyword_explanation: bool 26 | is_bad_for_other_reason: bool 27 | other_reason_text: str 28 | correct: bool 29 | mnemonic_used_id: str 30 | mnemonic_used_text: str 31 | 32 | class MnemonicComparisonLog(str, Enum): 33 | a_better = 'a_better' 34 | b_better = 'b_better' 35 | equal = 'equal' 36 | 37 | class MnemonicComparisonFeedbackLog(BaseModel): 38 | study_id: int 39 | fact_id: int 40 | user_id: int 41 | mnemonic_a: str 42 | mnemonic_b: str 43 | comparison_rating: Optional[MnemonicComparisonLog] = None 44 | passed_sanity_check: Optional[bool] = None 45 | correct: bool 46 | 47 | class MnemonicFeedback(BaseModel): 48 | fact_ids: List[int] 49 | user_id: int 50 | 51 | class MnemonicFeedbackDetailed(BaseModel): 52 | fact_ids_learning: List[int] 53 | fact_ids_comparison: List[int] 54 | user_id: int 55 | 56 | class MnemonicStatistics(BaseModel): 57 | user_id: int 58 | num_vocab_studied: int 59 | num_mnemonics_rated: int -------------------------------------------------------------------------------- /backend/app/app/schemas/msg.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | 4 | class Msg(BaseModel): 5 | msg: str 6 | -------------------------------------------------------------------------------- /backend/app/app/schemas/permission.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class Permission(str, Enum): 5 | owner = "owner" 6 | viewer = "viewer" 7 | -------------------------------------------------------------------------------- /backend/app/app/schemas/rank_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class RankType(str, Enum): 5 | new_facts = "new_facts" 6 | reviewed_facts = "reviewed_facts" 7 | total_seen = "total_seen" 8 | total_minutes = "total_minutes" 9 | elapsed_minutes_text = "elapsed_minutes_text" 10 | new_known_rate = "new_known_rate" 11 | review_known_rate = "review_known_rate" 12 | known_rate = "known_rate" 13 | n_days_studied = "n_days_studied" 14 | num_vocab_studied = "num_vocab_studied" 15 | num_mnemonics_rated = "num_mnemonics_rated" 16 | -------------------------------------------------------------------------------- /backend/app/app/schemas/repetition.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from random import choice 3 | 4 | 5 | class Repetition(str, Enum): 6 | leitner = "leitner" # deprecated 7 | karl = "karl" 8 | sm2 = "sm-2" # deprecated 9 | karl100 = "karl100" # deprecated 10 | karl50 = "karl50" # deprecated 11 | karl85 = "karl85" # deprecated 12 | settles = "settles" # deprecated 13 | fsrs = "fsrs" 14 | karlAblation = "karl-ablation" 15 | 16 | @classmethod 17 | def select_model(cls): 18 | return choice([Repetition.fsrs, Repetition.karl]) 19 | -------------------------------------------------------------------------------- /backend/app/app/schemas/schedule.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, List 2 | 3 | from pydantic import BaseModel 4 | 5 | # Properties to receive when updating fact schedule 6 | from app.schemas import History 7 | 8 | 9 | class Schedule(BaseModel): 10 | fact_id: int 11 | typed: str 12 | response: bool 13 | recommendation: bool 14 | elapsed_seconds_text: Optional[int] = None 15 | elapsed_seconds_answer: Optional[int] = None 16 | elapsed_milliseconds_text: Optional[int] = None 17 | elapsed_milliseconds_answer: Optional[int] = None 18 | 19 | 20 | class ScheduleResponse(BaseModel): 21 | # successes: List[History] 22 | session_complete: bool 23 | -------------------------------------------------------------------------------- /backend/app/app/schemas/set_parameters_schema.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, List 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class SetParametersSchema(BaseModel): 7 | repetition_model: str = None 8 | card_embedding: float = None 9 | recall: float = None 10 | recall_target: float = None 11 | category: float = None 12 | answer: float = None 13 | leitner: float = None 14 | sm2: float = None 15 | decay_qrep: float = None 16 | cool_down: float = None 17 | cool_down_time_correct: float = None 18 | cool_down_time_wrong: float = None 19 | max_recent_facts: int = None -------------------------------------------------------------------------------- /backend/app/app/schemas/set_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class SetType(str, Enum): 5 | test = "test" 6 | post_test = "post_test" 7 | normal = "normal" 8 | -------------------------------------------------------------------------------- /backend/app/app/schemas/statistics.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | from typing import Optional 3 | 4 | # Properties to return to client about statistics 5 | from app.schemas import User 6 | 7 | 8 | class StatisticsBase(BaseModel): 9 | new_facts: int 10 | reviewed_facts: int 11 | total_seen: int 12 | total_minutes: int 13 | elapsed_minutes_text: int 14 | known_rate: float 15 | new_known_rate: float 16 | review_known_rate: float 17 | n_days_studied: int 18 | num_vocab_studied: int 19 | num_mnemonics_rated: int 20 | 21 | 22 | # Used to return statistics to users 23 | class Statistics(StatisticsBase): 24 | user: User 25 | name: str 26 | -------------------------------------------------------------------------------- /backend/app/app/schemas/studyset.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from pydantic import BaseModel, validator 4 | 5 | # Properties to return to client about statistics 6 | from sqlalchemy.orm import Query 7 | 8 | from app.schemas import User, Fact, Deck, Repetition, SetType 9 | 10 | 11 | class StudySetBase(BaseModel): 12 | user_id: int 13 | debug_id: Optional[str] 14 | repetition_model: Optional[Repetition] 15 | set_type: Optional[SetType] 16 | 17 | 18 | class StudySetCreate(StudySetBase): 19 | repetition_model: Repetition 20 | set_type: SetType = SetType.normal 21 | 22 | 23 | class StudySetUpdate(StudySetBase): 24 | pass 25 | 26 | 27 | class StudySetInDBBase(StudySetBase): 28 | id: int 29 | 30 | 31 | class StudySet(StudySetInDBBase): 32 | user: User 33 | all_decks: List[Deck] 34 | all_facts: List[Fact] 35 | unstudied_facts: List[Fact] 36 | completed: bool 37 | num_facts: int 38 | num_unstudied: int 39 | is_first_pass: bool 40 | short_description: str 41 | expanded_description: str 42 | # retired: bool 43 | set_type: SetType 44 | 45 | class Config: 46 | orm_mode = True 47 | 48 | 49 | class StudySetInDB(StudySetInDBBase): 50 | pass 51 | -------------------------------------------------------------------------------- /backend/app/app/schemas/suspend_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class SuspendType(str, Enum): 5 | delete = "delete" 6 | suspend = "suspend" 7 | report = "report" 8 | -------------------------------------------------------------------------------- /backend/app/app/schemas/target_window.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | class TargetWindow(BaseModel): 4 | target_window_lowest: float 5 | target_window_highest: float 6 | target: float 7 | -------------------------------------------------------------------------------- /backend/app/app/schemas/token.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class Token(BaseModel): 7 | access_token: str 8 | token_type: str 9 | 10 | 11 | class TokenPayload(BaseModel): 12 | sub: Optional[int] = None 13 | -------------------------------------------------------------------------------- /backend/app/app/schemas/user.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Optional, List, Any, Type 3 | 4 | from pydantic import BaseModel, EmailStr 5 | from pydantic.utils import GetterDict 6 | 7 | from .deck import Deck 8 | from .fact import Fact 9 | from .repetition import Repetition 10 | 11 | 12 | # Shared properties 13 | class UserBase(BaseModel): 14 | email: Optional[EmailStr] = None 15 | username: Optional[str] = None 16 | is_active: bool = True 17 | repetition_model: Optional[Repetition] = None 18 | show_help: Optional[bool] = None 19 | show_mnemonic_help: Optional[bool] = None 20 | dark_mode: Optional[bool] = None 21 | pwa_tip: Optional[bool] = None 22 | beta_user: Optional[bool] = None 23 | recall_target: Optional[int] = None 24 | 25 | 26 | # Properties to receive via API on creation 27 | class UserCreate(UserBase): 28 | email: EmailStr 29 | username: str 30 | password: str 31 | repetition_model: Optional[Repetition] = None 32 | 33 | 34 | # Properties to receive on creation from super users 35 | class SuperUserCreate(UserCreate): 36 | is_superuser: bool = False 37 | beta_user: bool = False 38 | 39 | 40 | # Properties to receive via API on update 41 | class UserUpdate(UserBase): 42 | password: Optional[str] = None 43 | default_deck_id: Optional[int] = None 44 | 45 | 46 | class SuperUserUpdate(UserUpdate): 47 | is_superuser: Optional[bool] = None 48 | 49 | 50 | class UserInDBBase(UserBase): 51 | id: int 52 | email: EmailStr 53 | username: str 54 | is_active: bool 55 | is_superuser: bool 56 | show_help: bool 57 | show_mnemonic_help: bool 58 | dark_mode: bool 59 | pwa_tip: bool 60 | beta_user: bool 61 | recall_target: int 62 | 63 | class Config: 64 | orm_mode = True 65 | 66 | 67 | # Additional properties to return via API 68 | class User(UserInDBBase): 69 | default_deck: Deck 70 | decks: List[Deck] = [] 71 | 72 | class Config: 73 | orm_mode = True 74 | 75 | # Done to work with association proxies, which return Collections 76 | # See here: https://github.com/samuelcolvin/pydantic/issues/380#issuecomment-535112498 77 | class CustomGetterDict(GetterDict): 78 | def get(self, item: Any, default: Any) -> Any: 79 | attribute = getattr(self._obj, item, default) 80 | if item == "decks": 81 | attribute = list(attribute) 82 | return attribute 83 | 84 | @classmethod 85 | def _decompose_class(cls: Type['Model'], obj: Any) -> GetterDict: 86 | return User.CustomGetterDict(obj) 87 | 88 | 89 | # Additional properties to return via API 90 | class UserWithStudySet(User): 91 | # Could refactor to return a study set object, but obstacle is circular references 92 | study_set_expiry_date: Optional[datetime] 93 | # in_test_mode: bool 94 | 95 | 96 | # Additional properties stored in DB 97 | class UserInDB(UserInDBBase): 98 | hashed_password: str 99 | -------------------------------------------------------------------------------- /backend/app/app/tests/.gitignore: -------------------------------------------------------------------------------- 1 | .cache 2 | -------------------------------------------------------------------------------- /backend/app/app/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/app/tests/__init__.py -------------------------------------------------------------------------------- /backend/app/app/tests/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/app/tests/api/__init__.py -------------------------------------------------------------------------------- /backend/app/app/tests/api/api_v1/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/app/tests/api/api_v1/__init__.py -------------------------------------------------------------------------------- /backend/app/app/tests/api/api_v1/test_celery.py: -------------------------------------------------------------------------------- 1 | from typing import Dict 2 | 3 | from app.core.celery_app import celery_app 4 | from app.core.config import settings 5 | from fastapi.testclient import TestClient 6 | 7 | 8 | def test_celery_worker_test( 9 | client: TestClient, superuser_token_headers: Dict[str, str] 10 | ) -> None: 11 | data = {"msg": "test"} 12 | r = client.post( 13 | f"{settings.API_V1_STR}/utils/test-celery/", 14 | json=data, 15 | headers=superuser_token_headers, 16 | ) 17 | response = r.json() 18 | assert response["msg"] == "Word received" 19 | 20 | 21 | def test_fact_load( 22 | client: TestClient, superuser_token_headers: Dict[str, str] 23 | ) -> None: 24 | celery_app.send_task("app.worker.load_quizbowl_facts") 25 | # db: Session = SessionLocal() 26 | # user = crud.user.get_by_email(db, email=settings.FIRST_SUPERUSER) 27 | # if user: 28 | # dirname = os.path.dirname(os.path.abspath(__file__)) 29 | # filename = os.path.join(dirname, '../../../data/formatted.train.clues.json') 30 | # with open(filename, "r") as file: 31 | # json_data = json.load(file) 32 | # # for each_fact in json_data: 33 | # for each_fact in itertools.islice(json_data, 0, 5): 34 | # deck = crud.deck.find_or_create(db, proposed_deck=each_fact["deck"], user=user) 35 | # fact_in = schemas.FactCreate( 36 | # text=each_fact["text"], 37 | # answer=each_fact["answer"], 38 | # deck_id=deck.id, 39 | # answer_lines=each_fact["answer_lines"], 40 | # identifier=each_fact["identifier"], 41 | # category=each_fact["category"], 42 | # extra=each_fact["extra"] 43 | # ) 44 | # crud.fact.create_with_owner(db, obj_in=fact_in, user=user) 45 | -------------------------------------------------------------------------------- /backend/app/app/tests/api/api_v1/test_login.py: -------------------------------------------------------------------------------- 1 | from typing import Dict 2 | 3 | from app.core.config import settings 4 | from fastapi.testclient import TestClient 5 | 6 | 7 | def test_get_access_token(client: TestClient) -> None: 8 | login_data = { 9 | "username": settings.FIRST_SUPERUSER, 10 | "password": settings.FIRST_SUPERUSER_PASSWORD, 11 | } 12 | r = client.post(f"{settings.API_V1_STR}/login/access-token", data=login_data) 13 | tokens = r.json() 14 | assert r.status_code == 200 15 | assert "access_token" in tokens 16 | assert tokens["access_token"] 17 | 18 | 19 | def test_use_access_token( 20 | client: TestClient, superuser_token_headers: Dict[str, str] 21 | ) -> None: 22 | r = client.post( 23 | f"{settings.API_V1_STR}/login/test-token", headers=superuser_token_headers, 24 | ) 25 | result = r.json() 26 | assert r.status_code == 200 27 | assert "email" in result 28 | -------------------------------------------------------------------------------- /backend/app/app/tests/conftest.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Generator, Tuple 2 | 3 | import pytest 4 | from app.core.config import settings 5 | from app.db.session import SessionLocal 6 | from app.main import app 7 | from app.models import User 8 | from app.tests.utils.user import authentication_token_from_email 9 | from app.tests.utils.utils import get_superuser_token_headers 10 | from fastapi.testclient import TestClient 11 | from sqlalchemy.orm import Session 12 | 13 | 14 | @pytest.fixture(scope="session") 15 | def db() -> Generator: 16 | yield SessionLocal() 17 | 18 | 19 | @pytest.fixture(scope="module") 20 | def client() -> Generator: 21 | with TestClient(app) as c: 22 | yield c 23 | 24 | 25 | @pytest.fixture(scope="module") 26 | def superuser_token_headers(client: TestClient) -> Dict[str, str]: 27 | return get_superuser_token_headers(client) 28 | 29 | 30 | @pytest.fixture(scope="module") 31 | def normal_user_token_headers(client: TestClient, db: Session) -> Tuple[Dict[str, str], User]: 32 | return authentication_token_from_email( 33 | client=client, email=settings.EMAIL_TEST_USER, db=db 34 | ) 35 | -------------------------------------------------------------------------------- /backend/app/app/tests/crud/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/app/tests/crud/__init__.py -------------------------------------------------------------------------------- /backend/app/app/tests/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/backend/app/app/tests/utils/__init__.py -------------------------------------------------------------------------------- /backend/app/app/tests/utils/deck.py: -------------------------------------------------------------------------------- 1 | from app import crud, models 2 | from app.schemas.deck import DeckCreate 3 | from app.tests.utils.utils import random_lower_string 4 | from sqlalchemy.orm import Session 5 | 6 | 7 | def create_random_deck(db: Session, user: models.User) -> models.Deck: 8 | title = random_lower_string() 9 | deck_in = DeckCreate(title=title) 10 | return crud.deck.create_with_owner(db=db, obj_in=deck_in, user=user) 11 | -------------------------------------------------------------------------------- /backend/app/app/tests/utils/fact.py: -------------------------------------------------------------------------------- 1 | from app import crud, models 2 | from app.schemas.fact import FactCreate 3 | from app.tests.utils.deck import create_random_deck 4 | from app.tests.utils.utils import random_lower_string 5 | from sqlalchemy.orm import Session 6 | 7 | 8 | def create_random_fact(db: Session, user: models.User) -> models.Fact: 9 | text = random_lower_string() 10 | identifier = random_lower_string() 11 | answer = random_lower_string() 12 | deck = create_random_deck(db=db, user=user) 13 | deck_id = deck.id 14 | answer_lines = [answer] 15 | extra = {"type": "Noodles"} 16 | 17 | fact_in = FactCreate(text=text, 18 | answer=answer, 19 | deck_id=deck_id, 20 | answer_lines=answer_lines, 21 | identifier=identifier, 22 | extra=extra) 23 | return crud.fact.create_with_owner(db=db, obj_in=fact_in, user=user) 24 | 25 | 26 | def create_random_fact_with_deck(db: Session, user: models.User, deck: models.Deck) -> models.Fact: 27 | text = random_lower_string() 28 | identifier = random_lower_string() 29 | answer = random_lower_string() 30 | deck_id = deck.id 31 | answer_lines = [answer] 32 | extra = {"type": "Noodles"} 33 | 34 | fact_in = FactCreate(text=text, 35 | answer=answer, 36 | deck_id=deck_id, 37 | answer_lines=answer_lines, 38 | identifier=identifier, 39 | extra=extra) 40 | return crud.fact.create_with_owner(db=db, obj_in=fact_in, user=user) 41 | -------------------------------------------------------------------------------- /backend/app/app/tests/utils/user.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Tuple 2 | 3 | from app import crud 4 | from app.core.config import settings 5 | from app.models.user import User 6 | from app.schemas.user import UserCreate, UserUpdate 7 | from app.tests.utils.utils import random_email, random_lower_string 8 | from fastapi.testclient import TestClient 9 | from sqlalchemy.orm import Session 10 | 11 | from app.utils.utils import logger, log_time, time_it 12 | 13 | def user_authentication_headers( 14 | *, client: TestClient, email: str, password: str 15 | ) -> Dict[str, str]: 16 | data = {"username": email, "password": password} 17 | 18 | r = client.post(f"{settings.API_V1_STR}/login/access-token", data=data) 19 | response = r.json() 20 | auth_token = response["access_token"] 21 | headers = {"Authorization": f"Bearer {auth_token}"} 22 | return headers 23 | 24 | 25 | def create_random_user(db: Session) -> User: 26 | email = random_email() 27 | password = random_lower_string() 28 | user_in = UserCreate(username=email, email=email, password=password) 29 | user = crud.user.create(db=db, obj_in=user_in) 30 | return user 31 | 32 | 33 | def authentication_token_from_email( 34 | *, client: TestClient, email: str, db: Session 35 | ) -> Tuple[Dict[str, str], User]: 36 | """ 37 | Return a valid token for the user with given email. 38 | 39 | If the user doesn't exist it is created first. 40 | """ 41 | password = random_lower_string() 42 | user = crud.user.get_by_email(db, email=email) 43 | if not user: 44 | user_in_create = UserCreate(username=email, email=email, password=password) 45 | user = crud.user.create(db, obj_in=user_in_create) 46 | else: 47 | user_in_update = UserUpdate(password=password) 48 | user = crud.user.update(db, db_obj=user, obj_in=user_in_update) 49 | 50 | return user_authentication_headers(client=client, email=email, password=password), user 51 | -------------------------------------------------------------------------------- /backend/app/app/tests/utils/utils.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | from typing import Dict 4 | 5 | from app.core.config import settings 6 | from fastapi.testclient import TestClient 7 | 8 | 9 | def random_lower_string() -> str: 10 | return "".join(random.choices(string.ascii_lowercase, k=32)) 11 | 12 | 13 | def random_email() -> str: 14 | return f"{random_lower_string()}@{random_lower_string()}.com" 15 | 16 | 17 | def get_superuser_token_headers(client: TestClient) -> Dict[str, str]: 18 | login_data = { 19 | "username": settings.FIRST_SUPERUSER, 20 | "password": settings.FIRST_SUPERUSER_PASSWORD, 21 | } 22 | r = client.post(f"{settings.API_V1_STR}/login/access-token", data=login_data) 23 | tokens = r.json() 24 | a_token = tokens["access_token"] 25 | headers = {"Authorization": f"Bearer {a_token}"} 26 | return headers 27 | -------------------------------------------------------------------------------- /backend/app/app/tests_pre_start.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from app.db.session import SessionLocal 4 | from tenacity import after_log, before_log, retry, stop_after_attempt, wait_fixed 5 | 6 | logging.basicConfig(level=logging.INFO) 7 | logger = logging.getLogger(__name__) 8 | 9 | max_tries = 60 * 5 # 5 minutes 10 | wait_seconds = 1 11 | 12 | 13 | @retry( 14 | stop=stop_after_attempt(max_tries), 15 | wait=wait_fixed(wait_seconds), 16 | before=before_log(logger, logging.INFO), 17 | after=after_log(logger, logging.WARN), 18 | ) 19 | def init() -> None: 20 | try: 21 | # Try to create session to check if DB is awake 22 | db = SessionLocal() 23 | db.execute("SELECT 1") 24 | except Exception as e: 25 | logger.error(e) 26 | raise e 27 | 28 | 29 | def main() -> None: 30 | logger.info("Initializing service") 31 | init() 32 | logger.info("Service finished initializing") 33 | 34 | 35 | if __name__ == "__main__": 36 | main() 37 | -------------------------------------------------------------------------------- /backend/app/app/utils/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /backend/app/app/utils/evaluate.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from app import models 4 | from sklearn.metrics.pairwise import cosine_similarity 5 | 6 | from app.utils.utils import logger, log_time, time_it 7 | import os 8 | import pickle 9 | 10 | corpus: List[str] = [] 11 | dirname = os.path.dirname(os.path.abspath(__file__)) 12 | filename = os.path.join(dirname, '../data/tfidf.pkl') 13 | with open(filename, "rb") as pickleFile: 14 | tfidf = pickle.load(pickleFile) 15 | 16 | 17 | def run_tfidf(true_answer: str, texts_to_score: List[str]) -> int: 18 | text_vec = tfidf.transform(texts_to_score) 19 | ans_vec = tfidf.transform([true_answer]) 20 | scores = cosine_similarity(ans_vec, text_vec)[0].tolist() 21 | logger.info("scores: " + str(scores)) 22 | return max(scores) 23 | 24 | 25 | def evaluate_answer_cutoff(max_score: float) -> bool: 26 | if max_score > .15: 27 | return True 28 | else: 29 | return False 30 | 31 | 32 | def evaluate_answer(eval_fact: models.Fact, typed: str) -> bool: 33 | cleaned_back = eval_fact.answer.lower().strip() 34 | cleaned_typed = typed.lower().strip() 35 | answer_lines = [answer_line.lower() for answer_line in eval_fact.answer_lines] + [cleaned_back] 36 | max_score = run_tfidf(cleaned_typed, answer_lines) 37 | return evaluate_answer_cutoff(max_score) 38 | -------------------------------------------------------------------------------- /backend/app/mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | plugins = pydantic.mypy, sqlmypy 3 | ignore_missing_imports = True 4 | disallow_untyped_defs = True 5 | -------------------------------------------------------------------------------- /backend/app/prestart.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | 3 | # Let the DB start 4 | python /app/app/backend_pre_start.py 5 | 6 | # Run migrations 7 | alembic upgrade head 8 | 9 | # Create initial data in DB 10 | python /app/app/initial_data.py 11 | -------------------------------------------------------------------------------- /backend/app/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "app" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["Admin "] 6 | 7 | [tool.poetry.dependencies] 8 | python = "^3.10" 9 | uvicorn = "^0.22.0" 10 | fastapi = "^0.100.0" 11 | python-multipart = "^0.0.6" 12 | email-validator = "^1.0.5" 13 | requests = "^2.23.0" 14 | celery = "^4.4.2" 15 | passlib = {extras = ["bcrypt"], version = "^1.7.2"} 16 | tenacity = "^6.1.0" 17 | pydantic = "^1.4" 18 | emails = "^0.6" 19 | gunicorn = "^20.1.0" 20 | jinja2 = "^2.11.2" 21 | psycopg2-binary = "^2.8.5" 22 | alembic = "^1.4.2" 23 | sqlalchemy = "^1.3.16" 24 | pytest = "^7.1.2" 25 | python-jose = {extras = ["cryptography"], version = "^3.1.0"} 26 | sentry_sdk = "^0.14.3" 27 | cython = "^0.29.30" 28 | # numpy = "^1.25.2" 29 | scikit-learn = "^1.3.0" 30 | pytest-timeout = "^1.3.4" 31 | tqdm = "^4.46.0" 32 | pandas = "^2.1.0" 33 | MarkupSafe = "2.0.1" 34 | 35 | [tool.poetry.dev-dependencies] 36 | mypy = "^1.5" 37 | black = "^19.10b0" 38 | isort = "^4.3.21" 39 | autoflake = "^1.3.1" 40 | flake8 = "^3.7.9" 41 | pytest = "^7.1.2" 42 | sqlalchemy-stubs = "^0.3" 43 | pytest-cov = "^2.8.1" 44 | 45 | [tool.isort] 46 | multi_line_output = 3 47 | include_trailing_comma = true 48 | force_grid_wrap = 0 49 | line_length = 88 50 | [build-system] 51 | requires = ["poetry>=0.12"] 52 | build-backend = "poetry.masonry.api" 53 | 54 | -------------------------------------------------------------------------------- /backend/app/scripts/format-imports.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | set -x 3 | 4 | # Sort imports one per line, so autoflake can remove unused imports 5 | isort --recursive --force-single-line-imports --apply app 6 | sh ./scripts/format.sh 7 | -------------------------------------------------------------------------------- /backend/app/scripts/format.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | set -x 3 | 4 | autoflake --remove-all-unused-imports --recursive --remove-unused-variables --in-place app --exclude=__init__.py 5 | black app 6 | isort --recursive --apply app 7 | -------------------------------------------------------------------------------- /backend/app/scripts/lint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -x 4 | 5 | mypy app 6 | black app --check 7 | isort --recursive --check-only app 8 | flake8 9 | -------------------------------------------------------------------------------- /backend/app/scripts/reformat_test.py: -------------------------------------------------------------------------------- 1 | import json 2 | from collections import defaultdict 3 | import os 4 | 5 | # Get current script dir and construct json file path 6 | script_dir = os.path.dirname(os.path.abspath(__file__)) 7 | data_folder_path = os.path.join(script_dir, '..', 'app/data') 8 | json_file_path = os.path.join(data_folder_path, 'test_mode.json') 9 | output_file_path = os.path.join(data_folder_path, 'test_mode_grouped.json') 10 | 11 | with open(json_file_path, 'r') as f: 12 | data = json.load(f) 13 | 14 | grouped_data = defaultdict(list) 15 | for item in data: 16 | mode_num = item['extra']['mode_num'] 17 | grouped_data[mode_num].append(item) 18 | 19 | result = [{'mode_num': key, 'questions': value, 'is_test': True} for key, value in grouped_data.items()] 20 | 21 | with open(output_file_path, 'w') as f: 22 | json.dump(result, f, indent=4) 23 | -------------------------------------------------------------------------------- /backend/app/scripts/test-cov-html.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -x 5 | 6 | bash scripts/test.sh --cov-report=html "${@}" 7 | -------------------------------------------------------------------------------- /backend/app/scripts/test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -x 5 | 6 | pytest --cov=app --cov-report=term-missing app/tests "${@}" 7 | -------------------------------------------------------------------------------- /backend/app/tests-start.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | set -e 3 | 4 | python /app/app/tests_pre_start.py 5 | 6 | bash ./scripts/test.sh "$@" 7 | -------------------------------------------------------------------------------- /backend/app/worker-start.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | set -e 3 | 4 | python /app/app/celeryworker_pre_start.py 5 | 6 | celery worker -A app.worker -l info -Q main-queue -c 1 7 | -------------------------------------------------------------------------------- /backend/backend.dockerfile: -------------------------------------------------------------------------------- 1 | FROM tiangolo/uvicorn-gunicorn-fastapi:python3.10 2 | 3 | WORKDIR /app/ 4 | 5 | # Install Poetry 6 | RUN curl -sSL https://install.python-poetry.org | POETRY_HOME=/opt/poetry python && \ 7 | cd /usr/local/bin && \ 8 | ln -s /opt/poetry/bin/poetry && \ 9 | poetry config virtualenvs.create false 10 | 11 | # Copy poetry.lock* in case it doesn't exist in the repo 12 | COPY ./app/pyproject.toml ./app/poetry.lock* /app/ 13 | 14 | # Allow installing dev dependencies to run tests 15 | ARG INSTALL_DEV=false 16 | RUN bash -c "if [ $INSTALL_DEV == 'true' ] ; then poetry install --no-root ; else poetry install --no-root --no-dev ; fi" 17 | 18 | # For development, Jupyter remote kernel, Hydrogen 19 | # Using inside the container: 20 | # jupyter lab --ip=0.0.0.0 --allow-root --NotebookApp.custom_display_url=http://127.0.0.1:8888 21 | ARG INSTALL_JUPYTER=false 22 | RUN bash -c "if [ $INSTALL_JUPYTER == 'true' ] ; then pip install jupyterlab ; fi" 23 | 24 | COPY ./app /app 25 | ENV PYTHONPATH=/app 26 | -------------------------------------------------------------------------------- /backend/celeryworker.dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10 2 | 3 | WORKDIR /app/ 4 | 5 | # Install Poetry 6 | RUN curl -sSL https://install.python-poetry.org | POETRY_HOME=/opt/poetry python && \ 7 | cd /usr/local/bin && \ 8 | ln -s /opt/poetry/bin/poetry && \ 9 | poetry config virtualenvs.create false 10 | 11 | # Copy poetry.lock* in case it doesn't exist in the repo 12 | COPY ./app/pyproject.toml ./app/poetry.lock* /app/ 13 | 14 | # Allow installing dev dependencies to run tests 15 | ARG INSTALL_DEV=false 16 | RUN bash -c "if [ $INSTALL_DEV == 'true' ] ; then poetry install --no-root ; else poetry install --no-root --no-dev ; fi" 17 | 18 | # For development, Jupyter remote kernel, Hydrogen 19 | # Using inside the container: 20 | # jupyter lab --ip=0.0.0.0 --allow-root --NotebookApp.custom_display_url=http://127.0.0.1:8888 21 | ARG INSTALL_JUPYTER=false 22 | RUN bash -c "if [ $INSTALL_JUPYTER == 'true' ] ; then pip install jupyterlab ; fi" 23 | 24 | ENV C_FORCE_ROOT=1 25 | 26 | COPY ./app /app 27 | WORKDIR /app 28 | 29 | ENV PYTHONPATH=/app 30 | 31 | COPY ./app/worker-start.sh /worker-start.sh 32 | 33 | RUN chmod +x /worker-start.sh 34 | 35 | CMD ["bash", "/worker-start.sh"] 36 | -------------------------------------------------------------------------------- /frontend/.browserslistrc: -------------------------------------------------------------------------------- 1 | > 1% 2 | last 2 versions 3 | -------------------------------------------------------------------------------- /frontend/.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | -------------------------------------------------------------------------------- /frontend/.env: -------------------------------------------------------------------------------- 1 | VUE_APP_DOMAIN_DEV=localhost 2 | # VUE_APP_DOMAIN_DEV=local.dockertoolbox.tiangolo.com 3 | # VUE_APP_DOMAIN_DEV=localhost.tiangolo.com 4 | # VUE_APP_DOMAIN_DEV=dev.karl.qanta.org 5 | VUE_APP_DOMAIN_STAG=stag.karl.qanta.org 6 | VUE_APP_DOMAIN_PROD=karl.qanta.org 7 | VUE_APP_NAME=KAR³L 8 | VUE_APP_ENV=development 9 | # VUE_APP_ENV=staging 10 | # VUE_APP_ENV=production 11 | VUE_APP_TEST_MODE_ENABLED=0 12 | VUE_APP_UNVISITED_PAGE_DIRECT=/mnemonic-study 13 | -------------------------------------------------------------------------------- /frontend/.eslintrc.js: -------------------------------------------------------------------------------- 1 | const OFF = 0, 2 | WARN = 1, 3 | ERROR = 2; 4 | 5 | module.exports = { 6 | root: true, 7 | env: { 8 | node: true, 9 | }, 10 | ignorePatterns: [ 11 | "!.eslintrc.js", 12 | "!.prettierrc.js", 13 | "node_modules/", 14 | "shims-tsx.d.ts", 15 | "shims-vue.d.ts", 16 | ], 17 | plugins: ["vuetify"], 18 | extends: [ 19 | "plugin:vue/recommended", 20 | "eslint:recommended", 21 | "@vue/typescript/recommended", 22 | "@vue/prettier", 23 | "@vue/prettier/@typescript-eslint", 24 | ], 25 | parserOptions: { 26 | ecmaVersion: 2020, 27 | }, 28 | rules: { 29 | "no-console": process.env.NODE_ENV === "production" ? WARN : OFF, 30 | "no-debugger": process.env.NODE_ENV === "production" ? ERROR : OFF, 31 | "@typescript-eslint/interface-name-prefix": [ 32 | WARN, 33 | { 34 | prefixWithI: "always", 35 | }, 36 | ], 37 | "@typescript-eslint/no-unused-vars": [ 38 | WARN, 39 | { 40 | argsIgnorePattern: "^_", 41 | varsIgnorePattern: "^_", 42 | }, 43 | ], 44 | "vuetify/no-deprecated-classes": WARN, 45 | "vuetify/grid-unknown-attributes": WARN, 46 | "vuetify/no-legacy-grid": WARN, 47 | "vue/no-unused-vars": WARN, 48 | "@typescript-eslint/camelcase": OFF, 49 | }, 50 | overrides: [ 51 | { 52 | files: ["**/__tests__/*.{j,t}s?(x)", "**/tests/unit/**/*.spec.{j,t}s?(x)"], 53 | env: { 54 | jest: true, 55 | }, 56 | }, 57 | ], 58 | }; 59 | -------------------------------------------------------------------------------- /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | /dist 4 | 5 | # local env files 6 | .env.local 7 | .env.*.local 8 | 9 | # Log files 10 | npm-debug.log* 11 | yarn-debug.log* 12 | yarn-error.log* 13 | 14 | # Editor directories and files 15 | .idea 16 | .vscode 17 | *.suo 18 | *.ntvs* 19 | *.njsproj 20 | *.sln 21 | *.sw* 22 | -------------------------------------------------------------------------------- /frontend/.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | printWidth: 88, 3 | tabWidth: 2, 4 | tabs: false, 5 | semi: true, 6 | singleQuote: false, 7 | trailingComma: "all", 8 | arrowParens: "always", 9 | vueIndentScriptAndStyle: true, 10 | }; 11 | -------------------------------------------------------------------------------- /frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | # Stage 0, "build-stage", based on Node.js, to build and compile the frontend 2 | FROM tiangolo/node-frontend:10 as build-stage 3 | 4 | WORKDIR /app 5 | 6 | COPY package*.json /app/ 7 | 8 | RUN npm install 9 | 10 | COPY ./ /app/ 11 | 12 | ARG FRONTEND_ENV=production 13 | 14 | ENV VUE_APP_ENV=${FRONTEND_ENV} 15 | 16 | # Comment out the next line to disable tests 17 | RUN npm run test:unit 18 | 19 | RUN npm run build 20 | 21 | 22 | # Stage 1, based on Nginx, to have only the compiled app, ready for production with Nginx 23 | FROM nginx:1.15 24 | 25 | COPY --from=build-stage /app/dist/ /usr/share/nginx/html 26 | 27 | COPY --from=build-stage /nginx.conf /etc/nginx/conf.d/default.conf 28 | COPY ./nginx-backend-not-found.conf /etc/nginx/extra-conf.d/backend-not-found.conf 29 | -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | # frontend 2 | 3 | ## Project setup 4 | ``` 5 | npm install 6 | ``` 7 | 8 | ### Compiles and hot-reloads for development 9 | ``` 10 | npm run serve 11 | ``` 12 | 13 | ### Compiles and minifies for production 14 | ``` 15 | npm run build 16 | ``` 17 | 18 | ### Run your tests 19 | ``` 20 | npm run test 21 | ``` 22 | 23 | ### Lints and fixes files 24 | ``` 25 | npm run lint 26 | ``` 27 | 28 | ### Run your unit tests 29 | ``` 30 | npm run test:unit 31 | ``` 32 | -------------------------------------------------------------------------------- /frontend/babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [ 3 | [ 4 | "@vue/cli-plugin-babel/preset", 5 | { 6 | useBuiltIns: "entry", 7 | }, 8 | ], 9 | ], 10 | }; 11 | -------------------------------------------------------------------------------- /frontend/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | preset: "@vue/cli-plugin-unit-jest/presets/typescript-and-babel", 3 | transformIgnorePatterns: ["/node_modules/(?!@mdi|vuetify)"], 4 | }; 5 | -------------------------------------------------------------------------------- /frontend/nginx-backend-not-found.conf: -------------------------------------------------------------------------------- 1 | location /api { 2 | return 404; 3 | } 4 | location /docs { 5 | return 404; 6 | } 7 | location /redoc { 8 | return 404; 9 | } 10 | -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "KARL", 3 | "version": "0.1.1", 4 | "private": true, 5 | "scripts": { 6 | "serve": "vue-cli-service serve", 7 | "build": "vue-cli-service build", 8 | "test:unit": "vue-cli-service test:unit", 9 | "lint": "vue-cli-service lint" 10 | }, 11 | "dependencies": { 12 | "@mdi/font": "^3.6.95", 13 | "@mdi/js": "^5.7.55", 14 | "@sentry/browser": "^5.27.0", 15 | "@sentry/integrations": "^5.27.0", 16 | "@types/lodash.debounce": "^4.0.6", 17 | "axios": "^0.21.1", 18 | "core-js": "^3.6.4", 19 | "date-fns": "^2.16.1", 20 | "http-status-codes": "^1.4.0", 21 | "lodash.debounce": "^4.0.8", 22 | "register-service-worker": "^1.6.2", 23 | "roboto-fontface": "*", 24 | "vee-validate": "^3.4.2", 25 | "vue": "^2.6.12", 26 | "vue-class-component": "^7.2.6", 27 | "vue-gtag": "^1.9.1", 28 | "vue-property-decorator": "^8.5.1", 29 | "vue-router": "^3.4.7", 30 | "vuetify": "^2.3.15", 31 | "vuex": "^3.5.1" 32 | }, 33 | "devDependencies": { 34 | "@types/jest": "^24.0.19", 35 | "@typescript-eslint/eslint-plugin": "^2.34.0", 36 | "@typescript-eslint/parser": "^2.34.0", 37 | "@vue/cli-plugin-babel": "~4.2.0", 38 | "@vue/cli-plugin-eslint": "~4.2.0", 39 | "@vue/cli-plugin-pwa": "~4.2.0", 40 | "@vue/cli-plugin-router": "~4.2.0", 41 | "@vue/cli-plugin-typescript": "~4.2.0", 42 | "@vue/cli-plugin-unit-jest": "~4.2.0", 43 | "@vue/cli-plugin-vuex": "~4.2.0", 44 | "@vue/cli-service": "~4.2.0", 45 | "@vue/eslint-config-prettier": "^6.0.0", 46 | "@vue/eslint-config-typescript": "^5.1.0", 47 | "@vue/test-utils": "1.0.0-beta.31", 48 | "eslint": "^6.7.2", 49 | "eslint-plugin-prettier": "^3.1.4", 50 | "eslint-plugin-vue": "^6.2.2", 51 | "eslint-plugin-vuetify": "^1.0.0-beta.7", 52 | "prettier": "^1.19.1", 53 | "sass": "^1.27.0", 54 | "sass-loader": "^8.0.2", 55 | "typescript": "^3.9.7", 56 | "vue-cli-plugin-vuetify": "^2.0.7", 57 | "vue-template-compiler": "^2.6.12", 58 | "vuetify-loader": "^1.6.0", 59 | "vuex-module-decorators": "^0.16.1", 60 | "webpack": "^4.44.2" 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /frontend/public/.well-known/apple-app-site-association: -------------------------------------------------------------------------------- 1 | { 2 | "webcredentials": { 3 | "apps": [ "SM7F898GQH.com.technaplex.KARL" ] 4 | } 5 | } -------------------------------------------------------------------------------- /frontend/public/apple-app-site-association: -------------------------------------------------------------------------------- 1 | { 2 | "webcredentials": { 3 | "apps": [ "SM7F898GQH.com.technaplex.KARL" ] 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /frontend/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/public/favicon.ico -------------------------------------------------------------------------------- /frontend/public/img/icons/android-chrome-192x192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/public/img/icons/android-chrome-192x192.png -------------------------------------------------------------------------------- /frontend/public/img/icons/android-chrome-512x512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/public/img/icons/android-chrome-512x512.png -------------------------------------------------------------------------------- /frontend/public/img/icons/apple-touch-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/public/img/icons/apple-touch-icon.png -------------------------------------------------------------------------------- /frontend/public/img/icons/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/public/img/icons/favicon-16x16.png -------------------------------------------------------------------------------- /frontend/public/img/icons/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/public/img/icons/favicon-32x32.png -------------------------------------------------------------------------------- /frontend/public/img/icons/mstile-150x150.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/public/img/icons/mstile-150x150.png -------------------------------------------------------------------------------- /frontend/public/img/icons/safari-pinned-tab.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 7 | 8 | Created by potrace 1.11, written by Peter Selinger 2001-2013 9 | 10 | 12 | 19 | 22 | 25 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /frontend/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | <%= htmlWebpackPlugin.options.title %> 10 | 11 | 12 | 18 |
19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /frontend/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "KAR³L Flashcards", 3 | "short_name": "KAR³L", 4 | "icons": [ 5 | { 6 | "src": "/img/icons/android-chrome-192x192.png", 7 | "sizes": "192x192", 8 | "type": "image/png" 9 | }, 10 | { 11 | "src": "/img/icons/android-chrome-512x512.png", 12 | "sizes": "512x512", 13 | "type": "image/png" 14 | } 15 | ], 16 | "start_url": "/", 17 | "display": "standalone", 18 | "background_color": "#000000", 19 | "theme_color": "#1D489B" 20 | } 21 | -------------------------------------------------------------------------------- /frontend/public/robots.txt: -------------------------------------------------------------------------------- 1 | User-agent: * 2 | Disallow: 3 | -------------------------------------------------------------------------------- /frontend/src/App.vue: -------------------------------------------------------------------------------- 1 | 27 | 28 | 69 | -------------------------------------------------------------------------------- /frontend/src/assets/background-desktop-blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/src/assets/background-desktop-blue.png -------------------------------------------------------------------------------- /frontend/src/assets/background-desktop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/src/assets/background-desktop.png -------------------------------------------------------------------------------- /frontend/src/assets/ios_button.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/src/assets/ios_button.jpg -------------------------------------------------------------------------------- /frontend/src/assets/ios_screen.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/src/assets/ios_screen.jpg -------------------------------------------------------------------------------- /frontend/src/assets/karl_install.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/src/assets/karl_install.png -------------------------------------------------------------------------------- /frontend/src/assets/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/src/assets/logo.png -------------------------------------------------------------------------------- /frontend/src/assets/neural-net-blue.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /frontend/src/assets/neural-net.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /frontend/src/assets/umd-horizontal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Pinafore/karl-flashcards-web-app/088817ecd23ebdeda57023243a864ecb5f0871b2/frontend/src/assets/umd-horizontal.png -------------------------------------------------------------------------------- /frontend/src/component-hooks.ts: -------------------------------------------------------------------------------- 1 | import Component from "vue-class-component"; 2 | 3 | // Register the router hooks with their names 4 | Component.registerHooks([ 5 | "beforeRouteEnter", 6 | "beforeRouteLeave", 7 | "beforeRouteUpdate", // for vue-router 2.2+ 8 | ]); 9 | -------------------------------------------------------------------------------- /frontend/src/components/ConnectionError.vue: -------------------------------------------------------------------------------- 1 | 19 | 20 | 54 | 55 | 56 | -------------------------------------------------------------------------------- /frontend/src/components/NotificationsManager.vue: -------------------------------------------------------------------------------- 1 | 13 | 80 | -------------------------------------------------------------------------------- /frontend/src/components/RouterComponent.vue: -------------------------------------------------------------------------------- 1 | 4 | 5 | 11 | -------------------------------------------------------------------------------- /frontend/src/components/UpdateAvailable.vue: -------------------------------------------------------------------------------- 1 | 9 | 10 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /frontend/src/components/UploadButton.vue: -------------------------------------------------------------------------------- 1 | 13 | 14 | 31 | 32 | 41 | -------------------------------------------------------------------------------- /frontend/src/env.ts: -------------------------------------------------------------------------------- 1 | const env = process.env.VUE_APP_ENV; 2 | 3 | let envApiUrl = ""; 4 | 5 | if (env === "production") { 6 | envApiUrl = `https://${process.env.VUE_APP_DOMAIN_PROD}`; 7 | } else if (env === "staging") { 8 | envApiUrl = `https://${process.env.VUE_APP_DOMAIN_STAG}`; 9 | } else { 10 | envApiUrl = `http://${process.env.VUE_APP_DOMAIN_DEV}`; 11 | } 12 | 13 | export const apiUrl = envApiUrl; 14 | export const appName = process.env.VUE_APP_NAME; 15 | -------------------------------------------------------------------------------- /frontend/src/main.ts: -------------------------------------------------------------------------------- 1 | import "./component-hooks"; 2 | import Vue from "vue"; 3 | import App from "./App.vue"; 4 | import "./registerServiceWorker"; 5 | import router from "./router"; 6 | import store from "./store"; 7 | import vuetify from "./plugins/vuetify"; 8 | import * as Sentry from "@sentry/browser"; 9 | import { Vue as VueIntegration } from "@sentry/integrations"; 10 | import VueGtag from "vue-gtag"; 11 | 12 | const IGNORE = [/ServiceWorker/, /service worker/, /newestWorker/, /focus/]; 13 | 14 | function shouldIgnoreException(s: string): boolean { 15 | return IGNORE.find((pattern) => pattern.test(s)) != null; 16 | } 17 | 18 | if (process.env.VUE_APP_ENV == "production") { 19 | Sentry.init({ 20 | dsn: "https://ac296d2d7e8c4115ab8f2713520612cf@o283930.ingest.sentry.io/5259730", 21 | integrations: [new VueIntegration({ Vue, attachProps: true, logErrors: true })], 22 | beforeSend: function(event, hint: Sentry.EventHint) { 23 | console.log("Processing before sending to Sentry"); 24 | console.log(event.message); 25 | console.log(hint.originalException); 26 | const error = hint.originalException; 27 | if (error instanceof Error) { 28 | console.log(error.message); 29 | } else { 30 | console.log("not instance of error"); 31 | } 32 | if (hint) { 33 | const error = hint.originalException; 34 | if (typeof error === "string") { 35 | if (shouldIgnoreException(error)) { 36 | console.log("ignoring error"); 37 | return null; 38 | } else { 39 | console.log("Not ignoring error 1"); 40 | } 41 | } else if (error instanceof Error) { 42 | if (shouldIgnoreException(error.message)) { 43 | console.log("ignoring error"); 44 | return null; 45 | } else { 46 | console.log("Not ignoring error 2"); 47 | } 48 | } 49 | } 50 | if (event.message) { 51 | if (shouldIgnoreException(event.message)) { 52 | console.log("ignoring error"); 53 | return null; 54 | } else { 55 | console.log("Not ignoring error 1"); 56 | } 57 | } 58 | console.log("Sending Sentry event"); 59 | return event; 60 | }, 61 | }); 62 | } 63 | 64 | Vue.config.productionTip = false; 65 | 66 | if (process.env.VUE_APP_ENV == "production") { 67 | Vue.use( 68 | VueGtag, 69 | { 70 | config: { id: "G-DTXRFTV9D2" }, 71 | }, 72 | router, 73 | ); 74 | } 75 | 76 | new Vue({ 77 | router, 78 | store, 79 | vuetify, 80 | render: (h) => h(App), 81 | }).$mount("#app"); 82 | -------------------------------------------------------------------------------- /frontend/src/plugins/vuetify.ts: -------------------------------------------------------------------------------- 1 | import "@mdi/font/css/materialdesignicons.css"; 2 | import Vue from "vue"; 3 | import Vuetify, { colors } from "vuetify/lib"; 4 | 5 | Vue.use(Vuetify); 6 | 7 | export default new Vuetify({ 8 | icons: { 9 | iconfont: "mdi", 10 | }, 11 | theme: { 12 | themes: { 13 | light: { 14 | primary: colors.blueGrey.darken2, 15 | navigation: colors.blueGrey.darken2, 16 | }, 17 | dark: { 18 | primary: colors.lightBlue.lighten3, 19 | navigation: colors.blue.darken4, 20 | }, 21 | }, 22 | }, 23 | }); 24 | -------------------------------------------------------------------------------- /frontend/src/registerServiceWorker.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | 3 | import { register } from "register-service-worker"; 4 | 5 | if (process.env.NODE_ENV === "production") { 6 | register(`${process.env.BASE_URL}service-worker.js`, { 7 | ready() { 8 | console.log( 9 | "App is being served from cache by a service worker.\n" + 10 | "For more details, visit https://goo.gl/AFskqB", 11 | ); 12 | }, 13 | registered() { 14 | console.log("Service worker has been registered."); 15 | }, 16 | cached() { 17 | console.log("Content has been cached for offline use."); 18 | }, 19 | updatefound() { 20 | console.log("New content is downloading."); 21 | }, 22 | updated(registration) { 23 | console.log("New content is available; please refresh."); 24 | document.dispatchEvent(new CustomEvent("swUpdated", { detail: registration })); 25 | }, 26 | offline() { 27 | console.log("No internet connection found. App is running in offline mode."); 28 | }, 29 | error(error) { 30 | console.error("Error during service worker registration:", error); 31 | }, 32 | }); 33 | } 34 | -------------------------------------------------------------------------------- /frontend/src/shims-tsx.d.ts: -------------------------------------------------------------------------------- 1 | import Vue, { VNode } from "vue"; 2 | 3 | declare global { 4 | namespace JSX { 5 | // eslint:disable no-empty-interface 6 | interface Element extends VNode {} 7 | // eslint:disable no-empty-interface 8 | interface ElementClass extends Vue {} 9 | interface IntrinsicElements { 10 | [elem: string]: any; 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /frontend/src/shims-vue.d.ts: -------------------------------------------------------------------------------- 1 | declare module "*.vue" { 2 | import Vue from "vue"; 3 | export default Vue; 4 | } 5 | -------------------------------------------------------------------------------- /frontend/src/store/index.ts: -------------------------------------------------------------------------------- 1 | import Vue from "vue"; 2 | import Vuex, { Store } from "vuex"; 3 | import { initializeStores, modules } from "@/utils/store-accessor"; 4 | 5 | Vue.use(Vuex); 6 | 7 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 8 | const initializer = (store: Store) => initializeStores(store); 9 | 10 | export const plugins = [initializer]; 11 | 12 | export * from "@/utils/store-accessor"; 13 | 14 | export default new Store({ 15 | plugins, 16 | modules, 17 | }); 18 | -------------------------------------------------------------------------------- /frontend/src/store/modules/admin.ts: -------------------------------------------------------------------------------- 1 | import { api } from "@/api"; 2 | import { VuexModule, Module, Mutation, Action } from "vuex-module-decorators"; 3 | import { IComponents } from "@/interfaces"; 4 | import { mainStore } from "@/utils/store-accessor"; 5 | 6 | @Module({ name: "admin" }) 7 | export default class AdminModule extends VuexModule { 8 | users: IComponents["User"][] = []; 9 | 10 | get adminOneUser() { 11 | return (userId: number) => { 12 | const filteredUsers = this.users.filter((user) => user.id === userId); 13 | if (filteredUsers.length > 0) { 14 | return { ...filteredUsers[0] }; 15 | } 16 | }; 17 | } 18 | 19 | @Mutation 20 | setUsers(payload: IComponents["User"][]) { 21 | this.users = payload; 22 | } 23 | 24 | @Mutation 25 | setUser(payload: IComponents["User"]) { 26 | const users = this.users.filter( 27 | (user: IComponents["User"]) => user.id !== payload.id, 28 | ); 29 | users.push(payload); 30 | this.users = users; 31 | } 32 | 33 | @Action 34 | async getUsers() { 35 | try { 36 | const response = await api.getUsers(mainStore.token); 37 | if (response) { 38 | this.setUsers(response.data); 39 | } 40 | } catch (error) { 41 | await mainStore.checkApiError(error); 42 | } 43 | } 44 | 45 | @Action 46 | async updateUser(payload: { id: number; user: IComponents["SuperUserUpdate"] }) { 47 | try { 48 | const loadingNotification = { content: "saving", showProgress: true }; 49 | mainStore.addNotification(loadingNotification); 50 | const response = ( 51 | await Promise.all([ 52 | api.updateUser(mainStore.token, payload.id, payload.user), 53 | await new Promise((resolve, _reject) => setTimeout(() => resolve(), 500)), 54 | ]) 55 | )[0]; 56 | mainStore.setUserProfile(response.data); 57 | mainStore.removeNotification(loadingNotification); 58 | mainStore.addNotification({ 59 | content: "User updated", 60 | color: "success", 61 | }); 62 | } catch (error) { 63 | await mainStore.checkApiError(error); 64 | } 65 | } 66 | 67 | @Action 68 | async createUser(payload: IComponents["SuperUserCreate"]) { 69 | try { 70 | const loadingNotification = { content: "saving", showProgress: true }; 71 | mainStore.addNotification(loadingNotification); 72 | const response = ( 73 | await Promise.all([ 74 | api.createUser(mainStore.token, payload), 75 | await new Promise((resolve, _reject) => setTimeout(() => resolve(), 500)), 76 | ]) 77 | )[0]; 78 | mainStore.removeNotification(loadingNotification); 79 | mainStore.addNotification({ 80 | content: "User successfully created", 81 | color: "success", 82 | }); 83 | } catch (error) { 84 | await mainStore.checkApiError(error); 85 | } 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /frontend/src/utils/index.ts: -------------------------------------------------------------------------------- 1 | export const getLocalToken = () => localStorage.getItem("token"); 2 | 3 | export const saveLocalToken = (token: string) => localStorage.setItem("token", token); 4 | 5 | export const removeLocalToken = () => localStorage.removeItem("token"); 6 | 7 | export const getVisited = () => localStorage.getItem("visited"); 8 | 9 | export const saveVisited = () => localStorage.setItem("visited", "true"); 10 | -------------------------------------------------------------------------------- /frontend/src/utils/store-accessor.ts: -------------------------------------------------------------------------------- 1 | import { Store } from "vuex"; 2 | import { getModule } from "vuex-module-decorators"; 3 | import MainModule from "@/store/modules/main"; 4 | import AdminModule from "@/store/modules/admin"; 5 | import StudyModule from "@/store/modules/study"; 6 | 7 | let mainStore: MainModule; 8 | let adminStore: AdminModule; 9 | let studyStore: StudyModule; 10 | 11 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 12 | function initializeStores(store: Store): void { 13 | mainStore = getModule(MainModule, store); 14 | adminStore = getModule(AdminModule, store); 15 | studyStore = getModule(StudyModule, store); 16 | } 17 | 18 | export const modules = { 19 | main: MainModule, 20 | admin: AdminModule, 21 | study: StudyModule, 22 | }; 23 | 24 | export { initializeStores, mainStore, adminStore, studyStore }; 25 | -------------------------------------------------------------------------------- /frontend/src/views/Login.vue: -------------------------------------------------------------------------------- 1 | 50 | 51 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /frontend/src/views/PasswordRecovery.vue: -------------------------------------------------------------------------------- 1 | 50 | 51 | 92 | 93 | 94 | -------------------------------------------------------------------------------- /frontend/src/views/main/Contact.vue: -------------------------------------------------------------------------------- 1 | 39 | 40 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /frontend/src/views/main/Start.vue: -------------------------------------------------------------------------------- 1 | 9 | 10 | 71 | -------------------------------------------------------------------------------- /frontend/src/views/main/TestPopup.vue: -------------------------------------------------------------------------------- 1 | 31 | 32 | 92 | 93 | 94 | -------------------------------------------------------------------------------- /frontend/src/views/main/add/AddDeck.vue: -------------------------------------------------------------------------------- 1 | 40 | 41 | 103 | -------------------------------------------------------------------------------- /frontend/src/views/main/add/ChooseDecks.vue: -------------------------------------------------------------------------------- 1 | 26 | 27 | 64 | -------------------------------------------------------------------------------- /frontend/src/views/main/admin/Admin.vue: -------------------------------------------------------------------------------- 1 | 4 | 5 | 28 | -------------------------------------------------------------------------------- /frontend/src/views/main/admin/AdminUsers.vue: -------------------------------------------------------------------------------- 1 | 25 | 26 | 82 | -------------------------------------------------------------------------------- /frontend/tests/unit/upload-button.spec.ts: -------------------------------------------------------------------------------- 1 | import { shallowMount } from "@vue/test-utils"; 2 | import UploadButton from "@/components/UploadButton.vue"; 3 | import "@/plugins/vuetify"; 4 | import Vue from "vue"; 5 | import Vuetify from "vuetify"; 6 | 7 | Vue.use(Vuetify); 8 | 9 | describe("UploadButton.vue", () => { 10 | it("renders props.title when passed", () => { 11 | const title = "upload a file"; 12 | const wrapper = shallowMount(UploadButton, { 13 | slots: { 14 | default: title, 15 | }, 16 | }); 17 | expect(wrapper.text()).toMatch(title); 18 | }); 19 | }); 20 | -------------------------------------------------------------------------------- /frontend/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "esnext", 4 | "module": "esnext", 5 | "strict": true, 6 | "jsx": "preserve", 7 | "importHelpers": true, 8 | "noUnusedLocals": false, 9 | "noUnusedParameters": false, 10 | "noImplicitAny": false, 11 | "moduleResolution": "node", 12 | "experimentalDecorators": true, 13 | "esModuleInterop": true, 14 | "allowSyntheticDefaultImports": true, 15 | "sourceMap": true, 16 | "baseUrl": ".", 17 | "types": ["webpack-env", "jest", "vuetify"], 18 | "paths": { 19 | "@/*": ["src/*"] 20 | }, 21 | "lib": ["esnext", "dom", "dom.iterable", "scripthost"] 22 | }, 23 | "include": [ 24 | "src/**/*.ts", 25 | "src/**/*.tsx", 26 | "src/**/*.vue", 27 | "tests/**/*.ts", 28 | "tests/**/*.tsx" 29 | ], 30 | "exclude": ["node_modules"] 31 | } 32 | -------------------------------------------------------------------------------- /frontend/vue.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | transpileDependencies: ["vuetify"], 3 | chainWebpack: (config) => { 4 | config.module 5 | .rule("vue") 6 | .use("vue-loader") 7 | .loader("vue-loader") 8 | .tap((options) => 9 | Object.assign(options, { 10 | transformAssetUrls: { 11 | "v-img": ["src", "lazy-src"], 12 | "v-card": "src", 13 | "v-card-media": "src", 14 | "v-responsive": "src", 15 | }, 16 | }), 17 | ); 18 | }, 19 | }; 20 | -------------------------------------------------------------------------------- /scripts/backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Get the optional argument for the dump filename 4 | DUMP_NAME=$1 5 | 6 | # If no argument is provided, use the default name 7 | if [ -z "$DUMP_NAME" ]; then 8 | DUMP_NAME="dump_$(date +"%Y-%m-%d_%H_%M_%S").gz" 9 | else 10 | DUMP_NAME="${DUMP_NAME}_$(date +"%Y-%m-%d_%H_%M_%S").gz" 11 | fi 12 | 13 | # Get the container ID of the running PostgreSQL image 14 | POSTGRES_DOCKER_ID=$(docker ps | grep "postgres" | awk '{print $1}') 15 | 16 | # Check if the container ID was found 17 | if [ -z "$POSTGRES_DOCKER_ID" ]; then 18 | echo "Error: PostgreSQL container not found!" 19 | exit 1 20 | fi 21 | 22 | # Run the backup command 23 | docker exec -t $POSTGRES_DOCKER_ID pg_dumpall -c -U postgres | gzip > "./db-backups/$DUMP_NAME" 24 | 25 | echo "Backup completed successfully!" 26 | exit 0 27 | -------------------------------------------------------------------------------- /scripts/backup_app.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Get the optional argument for the dump filename 4 | DUMP_NAME=$1 5 | 6 | # If no argument is provided, use the default name 7 | if [ -z "$DUMP_NAME" ]; then 8 | DUMP_NAME="dump_app_$(date +"%Y-%m-%d_%H_%M_%S").gz" 9 | else 10 | DUMP_NAME="${DUMP_NAME}_$(date +"%Y-%m-%d_%H_%M_%S").gz" 11 | fi 12 | 13 | # Get the container ID of the running PostgreSQL image 14 | POSTGRES_DOCKER_ID=$(docker ps | grep "postgres" | awk '{print $1}') 15 | 16 | # Check if the container ID was found 17 | if [ -z "$POSTGRES_DOCKER_ID" ]; then 18 | echo "Error: PostgreSQL container not found!" 19 | exit 1 20 | fi 21 | 22 | # Run the backup command for the 'app' database 23 | docker exec -t $POSTGRES_DOCKER_ID pg_dump -c -U postgres app | gzip > "./db-backups/$DUMP_NAME" 24 | 25 | echo "Backup completed successfully!" 26 | exit 0 27 | -------------------------------------------------------------------------------- /scripts/build-and-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Get the current highest tag number from docker stack ps for karl services 4 | # Assuming that the services follow the naming convention karl/SERVICE:TAG and that TAG is numerical 5 | highest_tag=$(docker stack ps karl | grep -oP 'karl/\S+:\K\d+' | sort -nr | head -n 1) 6 | 7 | # Increment the tag number 8 | let new_tag=highest_tag+1 9 | 10 | # Export the TAG and FRONTEND_ENV variables 11 | export TAG=$new_tag 12 | export FRONTEND_ENV=production 13 | 14 | # Run the build script 15 | bash ./scripts/build.sh && 16 | 17 | # Set other variables and run the deploy script 18 | DOMAIN=karl.qanta.org \ 19 | TRAEFIK_TAG=karl \ 20 | STACK_NAME=karl \ 21 | TAG=$new_tag \ 22 | bash ./scripts/deploy.sh -------------------------------------------------------------------------------- /scripts/build-push.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env sh 2 | 3 | # Exit in case of error 4 | set -e 5 | 6 | TAG=${TAG} \ 7 | FRONTEND_ENV=${FRONTEND_ENV-production} \ 8 | sh ./scripts/build.sh 9 | 10 | docker-compose -f docker-compose.yml push 11 | -------------------------------------------------------------------------------- /scripts/build.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env sh 2 | 3 | # Exit in case of error 4 | set -e 5 | 6 | TAG=${TAG} \ 7 | FRONTEND_ENV=${FRONTEND_ENV-production} \ 8 | docker-compose \ 9 | -f docker-compose.yml \ 10 | build 11 | -------------------------------------------------------------------------------- /scripts/deploy.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env sh 2 | 3 | # Exit in case of error 4 | set -e 5 | 6 | DOMAIN=${DOMAIN} \ 7 | TRAEFIK_TAG=${TRAEFIK_TAG} \ 8 | STACK_NAME=${STACK_NAME} \ 9 | TAG=${TAG} \ 10 | docker-compose \ 11 | -f docker-compose.yml \ 12 | config > docker-stack.yml 13 | 14 | docker-auto-labels docker-stack.yml 15 | 16 | docker stack deploy -c docker-stack.yml --with-registry-auth "${STACK_NAME}" 17 | -------------------------------------------------------------------------------- /scripts/load-backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Function to handle errors 4 | handle_error() { 5 | echo "Error: $1" 6 | exit 1 7 | } 8 | 9 | # Check the number of arguments provided 10 | if [ "$#" -lt 1 ]; then 11 | handle_error "No arguments provided. Usage: $0 [path_to_.env_file]" 12 | elif [ "$#" -gt 2 ]; then 13 | handle_error "Too many arguments provided. Usage: $0 [path_to_.env_file]" 14 | fi 15 | 16 | BACKUP_FILE=$1 17 | 18 | # Check if the .env file argument is provided, if not default to .env in the current directory 19 | if [ -z "$2" ]; then 20 | ENV_FILE="./.env" 21 | else 22 | ENV_FILE=$2 23 | fi 24 | 25 | 26 | # Extract POSTGRES_PASSWORD from the .env file 27 | POSTGRES_PASSWORD=$(grep POSTGRES_PASSWORD $ENV_FILE | cut -d '=' -f2) 28 | 29 | # Run the backup.sh script first 30 | ./scripts/backup.sh 31 | if [ $? -ne 0 ]; then 32 | handle_error "Backup failed. Aborting restore." 33 | fi 34 | 35 | # Extract the base name without the .gz extension for the cat command later 36 | BASE_NAME=$(basename $BACKUP_FILE .gz) 37 | DIR_NAME=$(dirname $BACKUP_FILE) 38 | FULL_PATH="$DIR_NAME/$BASE_NAME" 39 | 40 | # Unzip the backup file 41 | gunzip -c $BACKUP_FILE > $FULL_PATH 42 | if [ $? -ne 0 ]; then 43 | handle_error "Failed to unzip the backup file. Aborting restore." 44 | fi 45 | 46 | # Get the container ID of the running PostgreSQL image 47 | POSTGRES_DOCKER_ID=$(docker ps | grep "postgres" | awk '{print $1}') 48 | 49 | # Check if the container ID was found 50 | if [ -z "$POSTGRES_DOCKER_ID" ]; then 51 | handle_error "PostgreSQL container not found! Aborting restore." 52 | fi 53 | 54 | # Execute commands inside the PostgreSQL container 55 | docker exec -ti $POSTGRES_DOCKER_ID bash -c " 56 | psql -h localhost postgres postgres -c \"UPDATE pg_database SET datallowconn = 'false' WHERE datname = 'app';\" 57 | psql -h localhost postgres postgres -c \"SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = 'app';\" 58 | psql -h localhost postgres postgres -c \"DROP DATABASE app;\" 59 | " 60 | 61 | if [ $? -ne 0 ]; then 62 | handle_error "Failed to execute SQL commands inside the container. Aborting restore." 63 | fi 64 | 65 | # Load the backup into the PostgreSQL container 66 | cat $FULL_PATH | docker exec -i $POSTGRES_DOCKER_ID psql -U postgres 67 | if [ $? -ne 0 ]; then 68 | handle_error "Failed to load the backup into the PostgreSQL container." 69 | fi 70 | 71 | # Reset the postgres user's password using the extracted POSTGRES_PASSWORD 72 | docker exec -it $POSTGRES_DOCKER_ID bash -c "psql -U postgres < docker-stack.yml 13 | 14 | docker-compose -f docker-stack.yml build 15 | docker-compose -f docker-stack.yml down -v --remove-orphans # Remove possibly previous broken stacks left hanging after an error 16 | docker-compose -f docker-stack.yml up -d 17 | docker-compose -f docker-stack.yml exec -T backend bash /app/tests-start.sh "$@" 18 | docker-compose -f docker-stack.yml down -v --remove-orphans 19 | -------------------------------------------------------------------------------- /scripts/update.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env sh 2 | 3 | # Exit in case of error 4 | set -e 5 | 6 | TAG=$1 7 | FRONTEND_ENV=production \ 8 | docker-compose \ 9 | -f docker-compose.yml \ 10 | build 11 | 12 | DOMAIN=karl.qanta.org \ 13 | TRAEFIK_TAG=karl \ 14 | STACK_NAME=karl \ 15 | TAG=$1 \ 16 | docker-compose \ 17 | -f docker-compose.yml \ 18 | config > docker-stack.yml 19 | 20 | docker stack deploy -c docker-stack.yml --with-registry-auth "${STACK_NAME}" --------------------------------------------------------------------------------