├── packages.txt
├── install_log.txt
├── temp_translated.txt
├── translations
├── .keep
├── de
│ └── LC_MESSAGES
│ │ └── .keep
├── fi
│ └── LC_MESSAGES
│ │ └── .keep
├── fr
│ └── LC_MESSAGES
│ │ └── .keep
├── it
│ └── LC_MESSAGES
│ │ └── .keep
└── nl
│ └── LC_MESSAGES
│ └── .keep
├── docs
├── features
│ ├── CSV_EXPORT_ENHANCED.md
│ ├── KEYBOARD_SHORTCUTS_README.md
│ └── RUN_BLACK_FORMATTING.md
├── QUICK_WINS_IMPLEMENTATION.md
├── reports
│ └── README.md
├── guides
│ ├── README.md
│ └── QUICK_START_LOCAL_DEVELOPMENT.md
├── api
│ └── README.md
├── development
│ └── README.md
├── admin
│ └── README.md
├── QUICK_WINS_UI.md
├── implementation-notes
│ └── IMPLEMENTATION_STATUS.md
├── bugfixes
│ └── template_application_fix.md
└── AVATAR_PERSISTENCE_SUMMARY.md
├── tests
├── test_services
│ └── __init__.py
├── test_repositories
│ └── __init__.py
├── __pycache__
│ ├── conftest.cpython-312-pytest-7.4.3.pyc
│ ├── test_api_v1.cpython-312-pytest-7.4.3.pyc
│ ├── test_basic.cpython-312-pytest-7.4.3.pyc
│ ├── test_email.cpython-312-pytest-7.4.3.pyc
│ ├── test_routes.cpython-312-pytest-7.4.3.pyc
│ ├── test_utils.cpython-312-pytest-7.4.3.pyc
│ ├── test_analytics.cpython-312-pytest-7.4.3.pyc
│ ├── test_expenses.cpython-312-pytest-7.4.3.pyc
│ ├── test_invoices.cpython-312-pytest-7.4.3.pyc
│ ├── test_overtime.cpython-312-pytest-7.4.3.pyc
│ ├── test_security.cpython-312-pytest-7.4.3.pyc
│ ├── test_telemetry.cpython-312-pytest-7.4.3.pyc
│ ├── test_timezone.cpython-312-pytest-7.4.3.pyc
│ ├── smoke_test_email.cpython-312-pytest-7.4.3.pyc
│ ├── test_admin_users.cpython-312-pytest-7.4.3.pyc
│ ├── test_enhanced_ui.cpython-312-pytest-7.4.3.pyc
│ ├── test_new_features.cpython-312-pytest-7.4.3.pyc
│ ├── test_oidc_logout.cpython-312-pytest-7.4.3.pyc
│ ├── test_pdf_layout.cpython-312-pytest-7.4.3.pyc
│ ├── test_permissions.cpython-312-pytest-7.4.3.pyc
│ ├── test_weekly_goals.cpython-312-pytest-7.4.3.pyc
│ ├── test_delete_actions.cpython-312-pytest-7.4.3.pyc
│ ├── test_overtime_smoke.cpython-312-pytest-7.4.3.pyc
│ ├── test_payment_model.cpython-312-pytest-7.4.3.pyc
│ ├── test_payment_routes.cpython-312-pytest-7.4.3.pyc
│ ├── test_payment_smoke.cpython-312-pytest-7.4.3.pyc
│ ├── test_profile_avatar.cpython-312-pytest-7.4.3.pyc
│ ├── test_project_costs.cpython-312-pytest-7.4.3.pyc
│ ├── test_time_rounding.cpython-312-pytest-7.4.3.pyc
│ ├── test_ui_quick_wins.cpython-312-pytest-7.4.3.pyc
│ ├── test_api_comprehensive.cpython-312-pytest-7.4.3.pyc
│ ├── test_calendar_routes.cpython-312-pytest-7.4.3.pyc
│ ├── test_client_note_model.cpython-312-pytest-7.4.3.pyc
│ ├── test_extra_good_model.cpython-312-pytest-7.4.3.pyc
│ ├── test_favorite_projects.cpython-312-pytest-7.4.3.pyc
│ ├── test_invoice_expenses.cpython-312-pytest-7.4.3.pyc
│ ├── test_models_extended.cpython-312-pytest-7.4.3.pyc
│ ├── test_project_archiving.cpython-312-pytest-7.4.3.pyc
│ ├── test_task_edit_project.cpython-312-pytest-7.4.3.pyc
│ ├── test_tasks_filters_ui.cpython-312-pytest-7.4.3.pyc
│ ├── test_tasks_templates.cpython-312-pytest-7.4.3.pyc
│ ├── test_version_reading.cpython-312-pytest-7.4.3.pyc
│ ├── test_admin_email_routes.cpython-312-pytest-7.4.3.pyc
│ ├── test_admin_settings_logo.cpython-312-pytest-7.4.3.pyc
│ ├── test_client_notes_routes.cpython-312-pytest-7.4.3.pyc
│ ├── test_installation_config.cpython-312-pytest-7.4.3.pyc
│ ├── test_keyboard_shortcuts.cpython-312-pytest-7.4.3.pyc
│ ├── test_permissions_routes.cpython-312-pytest-7.4.3.pyc
│ ├── test_uploads_persistence.cpython-312-pytest-7.4.3.pyc
│ ├── test_calendar_event_model.cpython-312-pytest-7.4.3.pyc
│ ├── test_comprehensive_tracking.cpython-312-pytest-7.4.3.pyc
│ ├── test_invoice_currency_fix.cpython-312-pytest-7.4.3.pyc
│ ├── test_invoice_currency_smoke.cpython-312-pytest-7.4.3.pyc
│ ├── test_models_comprehensive.cpython-312-pytest-7.4.3.pyc
│ ├── test_time_entry_duplication.cpython-312-pytest-7.4.3.pyc
│ ├── test_time_entry_templates.cpython-312-pytest-7.4.3.pyc
│ ├── test_project_archiving_models.cpython-312-pytest-7.4.3.pyc
│ ├── test_project_inactive_status.cpython-312-pytest-7.4.3.pyc
│ └── test_keyboard_shortcuts_input_fix.cpython-312-pytest-7.4.3.pyc
├── test_service_worker.py
├── test_system_ui_flags.py
├── test_ui_quick_wins.py
├── test_time_rounding_param.py
├── test_api_audit_activities_v1.py
├── test_api_invoice_templates_api_v1.py
├── test_time_entry_freeze.py
├── test_tasks_filters_ui.py
├── smoke_test_prepaid_hours.py
├── test_client_prepaid_model.py
└── test_api_kanban_v1.py
├── app
├── static
│ ├── test.txt
│ ├── uploads
│ │ └── logos
│ │ │ └── .gitkeep
│ ├── images
│ │ ├── avatar-default.svg
│ │ └── timetracker-logo.svg
│ └── src
│ │ └── input.css
├── integrations
│ ├── __init__.py
│ └── registry.py
├── routes
│ ├── api
│ │ ├── v1
│ │ │ └── __init__.py
│ │ └── __init__.py
│ └── settings.py
├── templates
│ ├── components
│ │ └── cards.html
│ ├── admin
│ │ └── system_info.html
│ ├── errors
│ │ ├── 500.html
│ │ ├── 404.html
│ │ ├── 400.html
│ │ └── 403.html
│ └── reports
│ │ └── summary.html
├── utils
│ ├── decorators.py
│ └── rate_limiting.py
├── models
│ ├── invoice_template.py
│ ├── task_activity.py
│ ├── user_favorite_project.py
│ ├── tax_rule.py
│ ├── saved_filter.py
│ ├── currency.py
│ └── client_prepaid_consumption.py
├── repositories
│ ├── __init__.py
│ ├── client_repository.py
│ └── user_repository.py
├── services
│ └── __init__.py
├── schemas
│ ├── __init__.py
│ ├── comment_schema.py
│ ├── user_schema.py
│ ├── expense_schema.py
│ └── task_schema.py
└── config
│ └── __init__.py
├── .bandit
├── assets
├── screenshots
│ ├── About.png
│ ├── Help.png
│ ├── Kanban.png
│ ├── Login.png
│ ├── OIDC.png
│ ├── Tasks.png
│ ├── Clients.png
│ ├── Invoices.png
│ ├── LogTime.png
│ ├── Profile.png
│ ├── Projects.png
│ ├── Reports.png
│ ├── CreateTask.png
│ ├── Dashboard.png
│ ├── UserReports.png
│ ├── CreateClient.png
│ ├── CreateProject.png
│ ├── AdminDashboard.png
│ └── TimeEntryTemplates.png
├── c__Users_dries_AppData_Roaming_Cursor_User_workspaceStorage_fbcaabc1224787c3bc6f9b84afe0b9ee_images_522379712-b3773e3a-495d-4de0-a740-05a615728363-c67cfd5c-1375-4a72-8291-839696c9e051.png
└── README.md
├── babel.cfg
├── postcss.config.js
├── logs
└── .gitkeep
├── temp_migration.sql
├── run_tests.sh
├── .gitattributes
├── .flake8
├── docker
├── start-minimal.sh
├── Dockerfile.mkcert
├── start-simple.sh
├── supervisord.conf
├── init-db.sh
├── init.sh
├── entrypoint-local-test-simple.sh
├── fix-invoices-now.py
├── test-startup.sh
├── migrate-add-tasks.py
├── entrypoint-local-test.sh
├── start.py
├── fix-schema.py
├── start.sh
├── test-db.py
├── fix-upload-permissions.sh
├── generate-mkcert-certs.sh
└── start-fixed.sh
├── .editorconfig
├── grafana
└── provisioning
│ └── datasources
│ └── prometheus.yml
├── .coveragerc
├── setup.py
├── migrations
├── versions
│ ├── 002_add_user_full_name.py
│ ├── 003_add_user_theme_preference.py
│ ├── 053_add_quote_payment_terms.py
│ ├── 031_add_standard_hours_per_day.py
│ ├── 020_add_user_avatar.py
│ ├── 049_add_client_password_setup_token.py
│ ├── 036_add_pdf_design_json.py
│ ├── 011_add_user_preferred_language.py
│ ├── 088_add_salesman_splitting_to_reports.py
│ ├── 048_add_client_portal_credentials.py
│ ├── 047_add_client_portal_fields.py
│ ├── 054_add_quote_comments.py
│ ├── 064_add_kiosk_mode_settings.py
│ ├── 022_add_project_code_field.py
│ ├── 052_add_quote_discount_fields.py
│ ├── 012_add_pdf_template_fields.py
│ ├── 068_add_user_password_hash.py
│ ├── 074_add_password_change_required.py
│ ├── 006_add_logo_and_task_timestamps.py
│ ├── 004_add_task_activities_table.py
│ ├── 058_add_quote_versions.py
│ ├── 055_add_quote_attachments.py
│ ├── 082_add_global_integrations.py
│ ├── 085_add_project_custom_fields.py
│ └── 087_add_salesman_email_mapping.py
├── script.py.mako
├── add_analytics_column.sql
└── add_project_costs.sql
├── scripts
├── validate-setup.bat
├── start-local-test.sh
├── start-local-test.bat
├── validate-setup.sh
├── version-manager.bat
├── version-manager.sh
├── start-local-test.ps1
├── extract_translations.py
└── version-manager.ps1
├── run_model_tests.py
├── check_routes.py
├── .github
├── ISSUE_TEMPLATE
│ ├── feature_request.md
│ └── bug_report.md
├── FUNDING.yml
└── workflows
│ └── static.yml
├── run_tests_script.py
├── tailwind.config.js
├── package.json
├── env.local-test.example
├── .env.local-test
├── docker-compose.https-auto.yml
├── prometheus
└── prometheus.yml
├── promtail
└── promtail-config.yml
├── loki
└── loki-config.yml
├── requirements-test.txt
├── docker-compose.https-mkcert.yml
├── run_tests_individually.py
├── requirements.txt
├── quick_test_summary.py
└── pyproject.toml
/packages.txt:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/install_log.txt:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/temp_translated.txt:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/translations/.keep:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/docs/features/CSV_EXPORT_ENHANCED.md:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/translations/de/LC_MESSAGES/.keep:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/translations/fi/LC_MESSAGES/.keep:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/translations/fr/LC_MESSAGES/.keep:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/translations/it/LC_MESSAGES/.keep:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/translations/nl/LC_MESSAGES/.keep:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/tests/test_services/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for service layer.
3 | """
4 |
--------------------------------------------------------------------------------
/tests/test_repositories/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for repository layer.
3 | """
4 |
--------------------------------------------------------------------------------
/app/static/test.txt:
--------------------------------------------------------------------------------
1 | This is a test file to verify static file serving is working.
2 |
--------------------------------------------------------------------------------
/.bandit:
--------------------------------------------------------------------------------
1 | [bandit]
2 | exclude_dirs = tests,migrations,venv,.venv,htmlcov
3 | skips = B101,B601
4 |
5 |
--------------------------------------------------------------------------------
/assets/screenshots/About.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/About.png
--------------------------------------------------------------------------------
/assets/screenshots/Help.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/Help.png
--------------------------------------------------------------------------------
/assets/screenshots/Kanban.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/Kanban.png
--------------------------------------------------------------------------------
/assets/screenshots/Login.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/Login.png
--------------------------------------------------------------------------------
/assets/screenshots/OIDC.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/OIDC.png
--------------------------------------------------------------------------------
/assets/screenshots/Tasks.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/Tasks.png
--------------------------------------------------------------------------------
/assets/screenshots/Clients.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/Clients.png
--------------------------------------------------------------------------------
/assets/screenshots/Invoices.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/Invoices.png
--------------------------------------------------------------------------------
/assets/screenshots/LogTime.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/LogTime.png
--------------------------------------------------------------------------------
/assets/screenshots/Profile.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/Profile.png
--------------------------------------------------------------------------------
/assets/screenshots/Projects.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/Projects.png
--------------------------------------------------------------------------------
/assets/screenshots/Reports.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/Reports.png
--------------------------------------------------------------------------------
/babel.cfg:
--------------------------------------------------------------------------------
1 | [python: app/**.py]
2 | [python: *.py]
3 | [jinja2: app/templates/**.html]
4 | encoding = utf-8
5 |
6 |
--------------------------------------------------------------------------------
/assets/screenshots/CreateTask.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/CreateTask.png
--------------------------------------------------------------------------------
/assets/screenshots/Dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/Dashboard.png
--------------------------------------------------------------------------------
/assets/screenshots/UserReports.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/UserReports.png
--------------------------------------------------------------------------------
/postcss.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | plugins: {
3 | tailwindcss: {},
4 | autoprefixer: {},
5 | },
6 | }
7 |
--------------------------------------------------------------------------------
/assets/screenshots/CreateClient.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/CreateClient.png
--------------------------------------------------------------------------------
/assets/screenshots/CreateProject.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/CreateProject.png
--------------------------------------------------------------------------------
/assets/screenshots/AdminDashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/AdminDashboard.png
--------------------------------------------------------------------------------
/logs/.gitkeep:
--------------------------------------------------------------------------------
1 | # This file ensures the logs directory is tracked in git
2 | # Log files will be created here by the application
3 |
--------------------------------------------------------------------------------
/assets/screenshots/TimeEntryTemplates.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/assets/screenshots/TimeEntryTemplates.png
--------------------------------------------------------------------------------
/app/integrations/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Integration connectors package.
3 | """
4 |
5 | from .base import BaseConnector
6 |
7 | __all__ = ["BaseConnector"]
8 |
--------------------------------------------------------------------------------
/tests/__pycache__/conftest.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/conftest.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/app/static/uploads/logos/.gitkeep:
--------------------------------------------------------------------------------
1 | # This file ensures the logos directory is tracked by git
2 | # Logo files uploaded through the admin interface will be stored here
3 |
--------------------------------------------------------------------------------
/tests/__pycache__/test_api_v1.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_api_v1.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_basic.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_basic.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_email.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_email.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_routes.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_routes.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_utils.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_utils.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_analytics.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_analytics.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_expenses.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_expenses.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_invoices.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_invoices.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_overtime.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_overtime.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_security.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_security.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_telemetry.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_telemetry.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_timezone.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_timezone.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/temp_migration.sql:
--------------------------------------------------------------------------------
1 | -- Temporary migration to set up advanced expense management schema
2 | UPDATE alembic_version SET version_num = '037_add_advanced_expense_management';
3 |
4 |
--------------------------------------------------------------------------------
/tests/__pycache__/smoke_test_email.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/smoke_test_email.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_admin_users.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_admin_users.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_enhanced_ui.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_enhanced_ui.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_new_features.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_new_features.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_oidc_logout.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_oidc_logout.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_pdf_layout.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_pdf_layout.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_permissions.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_permissions.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_weekly_goals.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_weekly_goals.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_delete_actions.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_delete_actions.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_overtime_smoke.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_overtime_smoke.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_payment_model.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_payment_model.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_payment_routes.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_payment_routes.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_payment_smoke.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_payment_smoke.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_profile_avatar.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_profile_avatar.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_project_costs.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_project_costs.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_time_rounding.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_time_rounding.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_ui_quick_wins.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_ui_quick_wins.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/run_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | cd /app
3 | echo "====== Running TimeTracker Tests ======"
4 | python -m pytest tests/ -v --tb=short
5 | echo "====== Tests Complete. Exit Code: $? ======"
6 |
7 |
--------------------------------------------------------------------------------
/tests/__pycache__/test_api_comprehensive.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_api_comprehensive.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_calendar_routes.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_calendar_routes.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_client_note_model.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_client_note_model.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_extra_good_model.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_extra_good_model.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_favorite_projects.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_favorite_projects.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_invoice_expenses.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_invoice_expenses.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_models_extended.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_models_extended.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_project_archiving.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_project_archiving.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_task_edit_project.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_task_edit_project.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_tasks_filters_ui.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_tasks_filters_ui.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_tasks_templates.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_tasks_templates.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_version_reading.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_version_reading.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/assets/c__Users_dries_AppData_Roaming_Cursor_User_workspaceStorage_fbcaabc1224787c3bc6f9b84afe0b9ee_images_522379712-b3773e3a-495d-4de0-a740-05a615728363-c67cfd5c-1375-4a72-8291-839696c9e051.png:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/__pycache__/test_admin_email_routes.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_admin_email_routes.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_admin_settings_logo.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_admin_settings_logo.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_client_notes_routes.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_client_notes_routes.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_installation_config.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_installation_config.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_keyboard_shortcuts.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_keyboard_shortcuts.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_permissions_routes.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_permissions_routes.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_uploads_persistence.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_uploads_persistence.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_calendar_event_model.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_calendar_event_model.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_comprehensive_tracking.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_comprehensive_tracking.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_invoice_currency_fix.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_invoice_currency_fix.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_invoice_currency_smoke.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_invoice_currency_smoke.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_models_comprehensive.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_models_comprehensive.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_time_entry_duplication.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_time_entry_duplication.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_time_entry_templates.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_time_entry_templates.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_project_archiving_models.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_project_archiving_models.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/tests/__pycache__/test_project_inactive_status.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_project_inactive_status.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Enforce LF endings for executable scripts to avoid /usr/bin/env CRLF issues
2 | *.sh text eol=lf
3 | *.py text eol=lf
4 |
5 | # Optional: keep everything else automatic
6 | * text=auto
7 |
8 |
--------------------------------------------------------------------------------
/tests/__pycache__/test_keyboard_shortcuts_input_fix.cpython-312-pytest-7.4.3.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DRYTRIX/TimeTracker/HEAD/tests/__pycache__/test_keyboard_shortcuts_input_fix.cpython-312-pytest-7.4.3.pyc
--------------------------------------------------------------------------------
/docs/QUICK_WINS_IMPLEMENTATION.md:
--------------------------------------------------------------------------------
1 | - Kanban: Show project code on task cards and remove inline status dropdown. Status is determined by the column. Projects now support an optional short `code` used for compact displays.
2 |
3 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 120
3 | extend-ignore = E203, W503
4 | exclude =
5 | .git,
6 | __pycache__,
7 | venv,
8 | .venv,
9 | build,
10 | dist,
11 | htmlcov,
12 | app/static/vendor
13 |
14 |
15 |
--------------------------------------------------------------------------------
/docker/start-minimal.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | cd /app
4 | export FLASK_APP=app
5 |
6 | echo "=== Starting TimeTracker (Minimal Mode) ==="
7 | echo "Starting application..."
8 | exec gunicorn --bind 0.0.0.0:8080 --worker-class eventlet --workers 1 --timeout 120 "app:create_app()"
9 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | end_of_line = lf
5 | insert_final_newline = true
6 | charset = utf-8
7 |
8 | [*.py]
9 | indent_style = space
10 | indent_size = 4
11 | max_line_length = 120
12 |
13 | [*.{json,yml,yaml}]
14 | indent_style = space
15 | indent_size = 2
16 |
17 |
18 |
--------------------------------------------------------------------------------
/grafana/provisioning/datasources/prometheus.yml:
--------------------------------------------------------------------------------
1 | # Grafana datasource configuration for Prometheus
2 | # This file automatically provisions Prometheus as a datasource in Grafana
3 |
4 | apiVersion: 1
5 |
6 | datasources:
7 | - name: Prometheus
8 | type: prometheus
9 | access: proxy
10 | url: http://prometheus:9090
11 | isDefault: true
12 | editable: true
13 | jsonData:
14 | timeInterval: 30s
15 |
16 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | source = app
3 | omit =
4 | */tests/*
5 | */test_*.py
6 | */__pycache__/*
7 | */venv/*
8 | */env/*
9 | # Exclude infrastructure/CLI utilities from unit test coverage
10 | app/utils/backup.py
11 | app/utils/cli.py
12 | app/utils/pdf_generator.py
13 | app/utils/pdf_generator_fallback.py
14 |
15 | [report]
16 | precision = 2
17 | show_missing = True
18 | skip_covered = False
19 |
20 | [html]
21 | directory = htmlcov
22 |
23 |
--------------------------------------------------------------------------------
/docker/Dockerfile.mkcert:
--------------------------------------------------------------------------------
1 | FROM alpine:latest
2 |
3 | # Install mkcert
4 | RUN apk add --no-cache \
5 | ca-certificates \
6 | curl \
7 | nss-tools \
8 | && curl -JLO "https://dl.filippo.io/mkcert/latest?for=linux/amd64" \
9 | && chmod +x mkcert-v*-linux-amd64 \
10 | && mv mkcert-v*-linux-amd64 /usr/local/bin/mkcert
11 |
12 | # Create certificate generation script
13 | COPY docker/generate-mkcert-certs.sh /generate-mkcert-certs.sh
14 | RUN chmod +x /generate-mkcert-certs.sh
15 |
16 | CMD ["/generate-mkcert-certs.sh"]
17 |
18 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """
2 | Setup configuration for TimeTracker application.
3 | This allows the app to be installed as a package for testing.
4 | """
5 |
6 | from setuptools import setup, find_packages
7 |
8 | setup(
9 | name='timetracker',
10 | version='4.6.0',
11 | packages=find_packages(),
12 | include_package_data=True,
13 | install_requires=[
14 | # Core requirements are in requirements.txt
15 | # This file is mainly for making the app importable during testing
16 | ],
17 | python_requires='>=3.11',
18 | )
19 |
20 |
--------------------------------------------------------------------------------
/tests/test_service_worker.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 |
4 | def test_service_worker_serves_assets(client):
5 | resp = client.get("/service-worker.js")
6 | assert resp.status_code == 200
7 | text = resp.get_data(as_text=True)
8 | # Ensure JS content type and presence of cache list with known asset
9 | assert "application/javascript" in (resp.headers.get("Content-Type") or "")
10 | assert "dist/output.css" in text
11 | assert "enhanced-ui.js" in text
12 | # Basic sanity: ASSETS array present
13 | assert "const ASSETS=" in text
14 |
--------------------------------------------------------------------------------
/migrations/versions/002_add_user_full_name.py:
--------------------------------------------------------------------------------
1 | """Add full_name to users
2 |
3 | Revision ID: 002
4 | Revises: 001
5 | Create Date: 2025-01-15 11:00:00.000000
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = '002'
13 | down_revision = '001'
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade():
19 | op.add_column('users', sa.Column('full_name', sa.String(length=200), nullable=True))
20 |
21 |
22 | def downgrade():
23 | op.drop_column('users', 'full_name')
24 |
25 |
26 |
--------------------------------------------------------------------------------
/migrations/versions/003_add_user_theme_preference.py:
--------------------------------------------------------------------------------
1 | from alembic import op
2 | import sqlalchemy as sa
3 |
4 | # revision identifiers, used by Alembic.
5 | revision = '003'
6 | down_revision = '002'
7 | branch_labels = None
8 | depends_on = None
9 |
10 |
11 | def upgrade():
12 | with op.batch_alter_table('users') as batch_op:
13 | batch_op.add_column(sa.Column('theme_preference', sa.String(length=10), nullable=True))
14 |
15 |
16 | def downgrade():
17 | with op.batch_alter_table('users') as batch_op:
18 | batch_op.drop_column('theme_preference')
19 |
20 |
21 |
--------------------------------------------------------------------------------
/docker/start-simple.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | cd /app
4 | export FLASK_APP=app
5 |
6 | echo "=== Starting TimeTracker (Simple Mode) ==="
7 |
8 | echo "Waiting for database to be ready..."
9 | # Simple wait loop
10 | sleep 5
11 |
12 | echo "Running database initialization..."
13 | python /app/docker/init-database.py
14 |
15 | echo "Running SQL database initialization (for invoice tables)..."
16 | python /app/docker/init-database-sql.py
17 |
18 | echo "Starting application..."
19 | exec gunicorn --bind 0.0.0.0:8080 --worker-class eventlet --workers 1 --timeout 120 "app:create_app()"
20 |
--------------------------------------------------------------------------------
/scripts/validate-setup.bat:
--------------------------------------------------------------------------------
1 | @echo off
2 | REM TimeTracker CI/CD Setup Validation Script for Windows
3 | REM Runs the Python validation script
4 |
5 | echo ========================================
6 | echo TimeTracker CI/CD Setup Validation
7 | echo ========================================
8 | echo.
9 |
10 | REM Check if Python is available
11 | python --version >nul 2>&1
12 | if errorlevel 1 (
13 | echo ERROR: Python not found!
14 | echo Please install Python 3.11 or higher
15 | exit /b 1
16 | )
17 |
18 | REM Run the validation script
19 | python scripts\validate-setup.py
20 | exit /b %ERRORLEVEL%
21 |
22 |
--------------------------------------------------------------------------------
/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade():
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade():
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/app/static/images/avatar-default.svg:
--------------------------------------------------------------------------------
1 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/run_model_tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Simple test runner to check model tests."""
3 | import subprocess
4 | import sys
5 |
6 | result = subprocess.run(
7 | [sys.executable, "-m", "pytest", "-m", "unit and models", "-v", "--tb=short"],
8 | capture_output=True,
9 | text=True
10 | )
11 |
12 | with open("test_results_model.txt", "w", encoding="utf-8") as f:
13 | f.write(result.stdout)
14 | f.write("\n")
15 | f.write(result.stderr)
16 | f.write(f"\n\nExit code: {result.returncode}\n")
17 |
18 | print("Test results written to test_results_model.txt")
19 | print(f"Exit code: {result.returncode}")
20 |
21 |
--------------------------------------------------------------------------------
/check_routes.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """Check if export routes are registered"""
3 | from app import create_app
4 |
5 | app = create_app()
6 |
7 | print("\n=== Export Routes ===")
8 | with app.app_context():
9 | for rule in app.url_map.iter_rules():
10 | if 'export' in str(rule):
11 | print(f"✓ {rule}")
12 |
13 | print("\n✅ Routes are registered!")
14 | print("\nTo access the new feature:")
15 | print("1. Restart your Flask app: python app.py")
16 | print("2. Go to: http://localhost:5000/reports")
17 | print("3. Click on: 'Export CSV' button")
18 | print("4. Or visit directly: http://localhost:5000/reports/export/form")
19 |
20 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/scripts/start-local-test.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "Starting TimeTracker Local Test Environment with SQLite..."
4 | echo
5 |
6 | # Check if docker-compose is available
7 | if ! command -v docker-compose &> /dev/null; then
8 | echo "Error: docker-compose is not installed or not in PATH"
9 | echo "Please install Docker Compose"
10 | exit 1
11 | fi
12 |
13 | # Check if Docker is running
14 | if ! docker info &> /dev/null; then
15 | echo "Error: Docker is not running"
16 | echo "Please start Docker"
17 | exit 1
18 | fi
19 |
20 | echo "Building and starting containers..."
21 | docker-compose -f docker-compose.local-test.yml up --build
22 |
23 | echo
24 | echo "Local test environment stopped."
25 |
--------------------------------------------------------------------------------
/app/routes/api/v1/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | API v1 Routes
3 |
4 | This module contains the v1 API endpoints.
5 | v1 is the current stable API version.
6 |
7 | API Versioning Policy:
8 | - v1: Current stable API (backward compatible)
9 | - Breaking changes require new version (v2, v3, etc.)
10 | - Each version maintains backward compatibility
11 | - Deprecated endpoints are marked but not removed
12 | """
13 |
14 | from flask import Blueprint
15 |
16 | # Create v1 blueprint
17 | api_v1_bp = Blueprint("api_v1", __name__, url_prefix="/api/v1")
18 |
19 | # Import all v1 endpoints
20 | # Note: The actual endpoints are in api_v1.py for now
21 | # This structure allows for future reorganization
22 |
23 | __all__ = ["api_v1_bp"]
24 |
--------------------------------------------------------------------------------
/migrations/versions/053_add_quote_payment_terms.py:
--------------------------------------------------------------------------------
1 | """Add payment terms to quotes
2 |
3 | Revision ID: 053
4 | Revises: 052
5 | Create Date: 2025-01-27
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = '053'
13 | down_revision = '052'
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade():
19 | """Add payment_terms field to quotes table"""
20 | op.add_column('quotes',
21 | sa.Column('payment_terms', sa.String(length=100), nullable=True)
22 | )
23 |
24 |
25 | def downgrade():
26 | """Remove payment_terms field from quotes table"""
27 | op.drop_column('quotes', 'payment_terms')
28 |
29 |
--------------------------------------------------------------------------------
/docker/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 | nodaemon=true
3 | user=root
4 |
5 | [program:postgresql]
6 | command=postgres -D /var/lib/postgresql/data -c config_file=/etc/postgresql/main/postgresql.conf
7 | user=postgres
8 | autostart=true
9 | autorestart=true
10 | priority=100
11 | startsecs=10
12 | startretries=3
13 | stdout_logfile=/dev/stdout
14 | stdout_logfile_maxbytes=0
15 | stderr_logfile=/dev/stderr
16 | stderr_logfile_maxbytes=0
17 |
18 | [program:flask-app]
19 | command=/app/start.sh
20 | user=timetracker
21 | autostart=true
22 | autorestart=true
23 | priority=200
24 | startsecs=30
25 | startretries=3
26 | stdout_logfile=/dev/stdout
27 | stdout_logfile_maxbytes=0
28 | stderr_logfile=/dev/stderr
29 | stderr_logfile_maxbytes=0
30 | redirect_stderr=true
31 |
--------------------------------------------------------------------------------
/docker/init-db.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | # Initialize PostgreSQL database
5 | if [ ! -f /var/lib/postgresql/data/PG_VERSION ]; then
6 | echo "Initializing PostgreSQL database..."
7 | su - postgres -c "initdb -D /var/lib/postgresql/data"
8 | su - postgres -c "pg_ctl -D /var/lib/postgresql/data -l logfile start"
9 |
10 | # Create database and user
11 | su - postgres -c "createdb timetracker"
12 | su - postgres -c "createuser -s timetracker"
13 |
14 | # Run initialization SQL
15 | su - postgres -c "psql -d timetracker -f /app/docker/init.sql"
16 |
17 | su - postgres -c "pg_ctl -D /var/lib/postgresql/data stop"
18 | echo "PostgreSQL database initialized successfully"
19 | else
20 | echo "PostgreSQL database already exists"
21 | fi
22 |
--------------------------------------------------------------------------------
/migrations/versions/031_add_standard_hours_per_day.py:
--------------------------------------------------------------------------------
1 | """Add standard_hours_per_day to users
2 |
3 | Revision ID: 031
4 | Revises: 030
5 | Create Date: 2025-10-27 10:00:00.000000
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = '031'
13 | down_revision = '030'
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade():
19 | """Add standard_hours_per_day column to users table"""
20 | op.add_column('users',
21 | sa.Column('standard_hours_per_day', sa.Float(), nullable=False, server_default='8.0')
22 | )
23 |
24 |
25 | def downgrade():
26 | """Remove standard_hours_per_day column from users table"""
27 | op.drop_column('users', 'standard_hours_per_day')
28 |
29 |
--------------------------------------------------------------------------------
/run_tests_script.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Simple script to run tests and display results"""
3 | import sys
4 | import os
5 |
6 | # Add current directory to path
7 | sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
8 |
9 | import pytest
10 |
11 | if __name__ == "__main__":
12 | print("=" * 70)
13 | print("Running TimeTracker Tests")
14 | print("=" * 70)
15 | print()
16 |
17 | # Run pytest with arguments
18 | exit_code = pytest.main([
19 | "tests/",
20 | "-v",
21 | "--tb=short",
22 | "-ra",
23 | "--color=no"
24 | ])
25 |
26 | print()
27 | print("=" * 70)
28 | print(f"Tests completed with exit code: {exit_code}")
29 | print("=" * 70)
30 |
31 | sys.exit(exit_code)
32 |
33 |
--------------------------------------------------------------------------------
/tailwind.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('tailwindcss').Config} */
2 | module.exports = {
3 | darkMode: 'class',
4 | content: [
5 | './app/templates/**/*.html',
6 | './app/static/src/**/*.js',
7 | ],
8 | theme: {
9 | extend: {
10 | colors: {
11 | 'primary': '#4A90E2',
12 | 'secondary': '#50E3C2',
13 | 'background-light': '#F7F9FB',
14 | 'background-dark': '#1A202C',
15 | 'card-light': '#FFFFFF',
16 | 'card-dark': '#2D3748',
17 | 'text-light': '#2D3748',
18 | 'text-dark': '#E2E8F0',
19 | 'text-muted-light': '#A0AEC0',
20 | 'text-muted-dark': '#718096',
21 | 'border-light': '#E2E8F0',
22 | 'border-dark': '#4A5568',
23 | },
24 | },
25 | },
26 | plugins: [],
27 | }
28 |
--------------------------------------------------------------------------------
/scripts/start-local-test.bat:
--------------------------------------------------------------------------------
1 | @echo off
2 | echo Starting TimeTracker Local Test Environment with SQLite...
3 | echo.
4 |
5 | REM Check if docker-compose is available
6 | docker-compose --version >nul 2>&1
7 | if %errorlevel% neq 0 (
8 | echo Error: docker-compose is not installed or not in PATH
9 | echo Please install Docker Desktop or Docker Compose
10 | pause
11 | exit /b 1
12 | )
13 |
14 | REM Check if Docker is running
15 | docker info >nul 2>&1
16 | if %errorlevel% neq 0 (
17 | echo Error: Docker is not running
18 | echo Please start Docker Desktop
19 | pause
20 | exit /b 1
21 | )
22 |
23 | echo Building and starting containers...
24 | docker-compose -f docker-compose.local-test.yml up --build
25 |
26 | echo.
27 | echo Local test environment stopped.
28 | pause
29 |
--------------------------------------------------------------------------------
/app/templates/components/cards.html:
--------------------------------------------------------------------------------
1 | {% macro stat_card(title, value, change, change_color) %}
2 |
3 |
{{ title }}
4 |
{{ value }} {{ change }}
5 |
6 | {% endmacro %}
7 |
8 | {% macro info_card(title, value, subtext) %}
9 |
10 |
{{ title }}
11 |
{{ value }}
12 |
{{ subtext }}
13 |
14 | {% endmacro %}
15 |
--------------------------------------------------------------------------------
/scripts/validate-setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # TimeTracker CI/CD Setup Validation Script for Linux/Mac
3 | # Runs the Python validation script
4 |
5 | set -e
6 |
7 | echo "========================================"
8 | echo "TimeTracker CI/CD Setup Validation"
9 | echo "========================================"
10 | echo ""
11 |
12 | # Check if Python is available
13 | if ! command -v python3 &> /dev/null && ! command -v python &> /dev/null; then
14 | echo "ERROR: Python not found!"
15 | echo "Please install Python 3.11 or higher"
16 | exit 1
17 | fi
18 |
19 | # Use python3 if available, otherwise python
20 | if command -v python3 &> /dev/null; then
21 | PYTHON=python3
22 | else
23 | PYTHON=python
24 | fi
25 |
26 | # Run the validation script
27 | $PYTHON scripts/validate-setup.py
28 | exit $?
29 |
30 |
--------------------------------------------------------------------------------
/app/utils/decorators.py:
--------------------------------------------------------------------------------
1 | """Common decorators for route handlers"""
2 |
3 | from functools import wraps
4 | from flask import flash, redirect, url_for
5 | from flask_login import current_user
6 | from flask_babel import gettext as _
7 |
8 |
9 | def admin_required(f):
10 | """Decorator to require admin access
11 |
12 | DEPRECATED: Use @admin_or_permission_required() with specific permissions instead.
13 | This decorator is kept for backward compatibility.
14 | """
15 |
16 | @wraps(f)
17 | def decorated_function(*args, **kwargs):
18 | if not current_user.is_authenticated or not current_user.is_admin:
19 | flash(_("Administrator access required"), "error")
20 | return redirect(url_for("main.dashboard"))
21 | return f(*args, **kwargs)
22 |
23 | return decorated_function
24 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "timetracker-frontend",
3 | "version": "1.0.0",
4 | "description": "Frontend assets for TimeTracker",
5 | "main": "index.js",
6 | "scripts": {
7 | "install:all": "npm install && npm install -D tailwindcss postcss autoprefixer",
8 | "install:cmdk": "npm install cmdk",
9 | "build:css": "tailwindcss -i ./app/static/src/input.css -o ./app/static/dist/output.css --watch",
10 | "build:docker": "npx tailwindcss -i ./app/static/src/input.css -o ./app/static/dist/output.css"
11 | },
12 | "keywords": [],
13 | "author": "",
14 | "license": "ISC",
15 | "devDependencies": {
16 | "autoprefixer": "^10.4.16",
17 | "postcss": "^8.4.31",
18 | "tailwindcss": "^3.3.5"
19 | },
20 | "dependencies": {
21 | "cmdk": "^1.1.1",
22 | "framer-motion": "^12.23.24"
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/docker/init.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | echo "=== Starting TimeTracker Initialization ==="
5 |
6 | # Create and set up PostgreSQL data directory
7 | echo "Setting up PostgreSQL data directory..."
8 | mkdir -p /var/lib/postgresql/data
9 | chown postgres:postgres /var/lib/postgresql/data
10 | chmod 700 /var/lib/postgresql/data
11 |
12 | # Initialize PostgreSQL database if needed
13 | if [ ! -f /var/lib/postgresql/data/PG_VERSION ]; then
14 | echo "Initializing PostgreSQL database..."
15 | su - postgres -c "initdb -D /var/lib/postgresql/data"
16 | echo "PostgreSQL database initialized successfully"
17 | else
18 | echo "PostgreSQL database already exists"
19 | fi
20 |
21 | # Start supervisor to manage services
22 | echo "Starting supervisor..."
23 | exec /usr/bin/supervisord -c /etc/supervisor/conf.d/supervisord.conf
24 |
--------------------------------------------------------------------------------
/scripts/version-manager.bat:
--------------------------------------------------------------------------------
1 | @echo off
2 | REM Version Manager for TimeTracker - Windows Batch Wrapper
3 |
4 | if "%1"=="" (
5 | echo Usage: version-manager.bat [action] [options]
6 | echo.
7 | echo Actions:
8 | echo tag [version] [message] - Create a version tag
9 | echo build [number] - Create a build tag
10 | echo list - List all tags
11 | echo info [tag] - Show tag information
12 | echo status - Show current status
13 | echo suggest - Suggest next version
14 | echo.
15 | echo Examples:
16 | echo version-manager.bat tag v1.2.3 "Release 1.2.3"
17 | echo version-manager.bat build 123
18 | echo version-manager.bat status
19 | echo.
20 | exit /b 1
21 | )
22 |
23 | python scripts/version-manager.py %*
24 |
--------------------------------------------------------------------------------
/docs/reports/README.md:
--------------------------------------------------------------------------------
1 | # Reports & Analysis
2 |
3 | Reports, summaries, and analysis documents for TimeTracker.
4 |
5 | ## 📊 Available Reports
6 |
7 | - **[All Bugfixes Summary](ALL_BUGFIXES_SUMMARY.md)** - Complete list of bugfixes
8 | - **[i18n Audit Report](i18n_audit_report.md)** - Internationalization audit results
9 | - **[Translation Analysis Report](TRANSLATION_ANALYSIS_REPORT.md)** - Translation system analysis
10 |
11 | ## 📝 Report Types
12 |
13 | This directory contains:
14 | - **Bugfix summaries** - Historical bugfix documentation
15 | - **Audit reports** - System audits and analysis
16 | - **Analysis reports** - Feature and system analysis
17 |
18 | ## 📚 Related Documentation
19 |
20 | - **[Main Documentation Index](../README.md)** - Complete documentation overview
21 | - **[Implementation Notes](../implementation-notes/)** - Development notes and summaries
22 |
--------------------------------------------------------------------------------
/scripts/version-manager.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Version Manager for TimeTracker - Unix Shell Wrapper
3 |
4 | if [ $# -eq 0 ]; then
5 | echo "Usage: ./version-manager.sh [action] [options]"
6 | echo ""
7 | echo "Actions:"
8 | echo " tag [version] [message] - Create a version tag"
9 | echo " build [number] - Create a build tag"
10 | echo " list - List all tags"
11 | echo " info [tag] - Show tag information"
12 | echo " status - Show current status"
13 | echo " suggest - Suggest next version"
14 | echo ""
15 | echo "Examples:"
16 | echo " ./version-manager.sh tag v1.2.3 'Release 1.2.3'"
17 | echo " ./version-manager.sh build 123"
18 | echo " ./version-manager.sh status"
19 | echo ""
20 | exit 1
21 | fi
22 |
23 | python3 scripts/version-manager.py "$@"
24 |
--------------------------------------------------------------------------------
/docker/entrypoint-local-test-simple.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # TimeTracker Local Test Entrypoint - Simple Version
3 | # Runs everything as root to avoid permission issues
4 |
5 | echo "=== TimeTracker Local Test Container Starting (Simple Mode) ==="
6 | echo "Timestamp: $(date)"
7 | echo "Container ID: $(hostname)"
8 | echo "Python version: $(python --version 2>/dev/null || echo 'Python not available')"
9 | echo "Current directory: $(pwd)"
10 | echo "User: $(whoami)"
11 | echo
12 |
13 | # Function to log messages with timestamp
14 | log() {
15 | echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1"
16 | }
17 |
18 | # Ensure data directory exists and has proper permissions
19 | log "Setting up data directory..."
20 | mkdir -p /data /data/uploads /app/logs
21 | chmod 755 /data /data/uploads /app/logs
22 |
23 | log "Running as root user (simplified mode)..."
24 | # Run the application directly as root
25 | exec "$@"
26 |
--------------------------------------------------------------------------------
/env.local-test.example:
--------------------------------------------------------------------------------
1 | # Local Testing Environment Variables
2 | # Copy this file to .env.local-test and modify as needed
3 |
4 | # Timezone (default: Europe/Brussels)
5 | TZ=Europe/Brussels
6 |
7 | # Currency (default: EUR)
8 | CURRENCY=EUR
9 |
10 | # Timer settings
11 | ROUNDING_MINUTES=1
12 | SINGLE_ACTIVE_TIMER=true
13 | IDLE_TIMEOUT_MINUTES=30
14 |
15 | # User management
16 | ALLOW_SELF_REGISTER=true
17 | ADMIN_USERNAMES=admin,testuser
18 |
19 | # Security (CHANGE THESE FOR PRODUCTION!)
20 | SECRET_KEY=local-test-secret-key-change-this
21 |
22 | # Database (SQLite for local testing)
23 | DATABASE_URL=sqlite:////data/timetracker.db
24 |
25 | # Logging
26 | LOG_FILE=/app/logs/timetracker.log
27 |
28 | # Cookie settings (disabled for local testing)
29 | SESSION_COOKIE_SECURE=false
30 | REMEMBER_COOKIE_SECURE=false
31 |
32 | # Flask environment
33 | FLASK_ENV=development
34 | FLASK_DEBUG=true
35 |
36 |
--------------------------------------------------------------------------------
/app/models/invoice_template.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from app import db
3 |
4 |
5 | class InvoiceTemplate(db.Model):
6 | """Reusable invoice templates/themes with customizable HTML and CSS."""
7 |
8 | __tablename__ = "invoice_templates"
9 |
10 | id = db.Column(db.Integer, primary_key=True)
11 | name = db.Column(db.String(100), nullable=False, unique=True, index=True)
12 | description = db.Column(db.String(255), nullable=True)
13 | html = db.Column(db.Text, nullable=True)
14 | css = db.Column(db.Text, nullable=True)
15 | is_default = db.Column(db.Boolean, default=False, nullable=False)
16 |
17 | created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
18 | updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
19 |
20 | def __repr__(self):
21 | return f""
22 |
--------------------------------------------------------------------------------
/docs/guides/README.md:
--------------------------------------------------------------------------------
1 | # User Guides
2 |
3 | Step-by-step guides for using TimeTracker.
4 |
5 | ## 📖 Available Guides
6 |
7 | - **[Deployment Guide](DEPLOYMENT_GUIDE.md)** - How to deploy TimeTracker
8 | - **[Quick Start Guide](QUICK_START_GUIDE.md)** - Get started quickly
9 | - **[Quick Start Local Development](QUICK_START_LOCAL_DEVELOPMENT.md)** - Local development setup
10 | - **[Improvements Quick Reference](IMPROVEMENTS_QUICK_REFERENCE.md)** - Quick reference for improvements
11 |
12 | ## 🚀 Getting Started
13 |
14 | New to TimeTracker? Start with the [Getting Started Guide](../GETTING_STARTED.md) for a comprehensive tutorial.
15 |
16 | ## 📚 Related Documentation
17 |
18 | - **[Main Documentation Index](../README.md)** - Complete documentation overview
19 | - **[Feature Documentation](../features/)** - Detailed feature guides
20 | - **[Troubleshooting](../#-troubleshooting)** - Common issues and solutions
21 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
12 | polar: # Replace with a single Polar username
13 | buy_me_a_coffee: drytrix
14 | thanks_dev: # Replace with a single thanks.dev username
15 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
16 |
--------------------------------------------------------------------------------
/migrations/add_analytics_column.sql:
--------------------------------------------------------------------------------
1 | -- Add allow_analytics column to settings table
2 | -- This script adds the missing column that the application expects
3 |
4 | -- Check if column already exists
5 | DO $$
6 | BEGIN
7 | IF NOT EXISTS (
8 | SELECT 1
9 | FROM information_schema.columns
10 | WHERE table_name = 'settings'
11 | AND column_name = 'allow_analytics'
12 | ) THEN
13 | -- Add the new column
14 | ALTER TABLE settings ADD COLUMN allow_analytics BOOLEAN DEFAULT TRUE;
15 | RAISE NOTICE 'Added allow_analytics column to settings table';
16 | ELSE
17 | RAISE NOTICE 'allow_analytics column already exists in settings table';
18 | END IF;
19 | END $$;
20 |
21 | -- Verify the column was added
22 | SELECT column_name, data_type, is_nullable, column_default
23 | FROM information_schema.columns
24 | WHERE table_name = 'settings'
25 | AND column_name = 'allow_analytics';
26 |
--------------------------------------------------------------------------------
/.env.local-test:
--------------------------------------------------------------------------------
1 | # Local Testing Environment Variables
2 | # Copy this file to .env.local-test and modify as needed
3 |
4 | # Timezone (default: Europe/Brussels)
5 | TZ=Europe/Brussels
6 |
7 | # Currency (default: EUR)
8 | CURRENCY=EUR
9 |
10 | # Timer settings
11 | ROUNDING_MINUTES=1
12 | SINGLE_ACTIVE_TIMER=true
13 | IDLE_TIMEOUT_MINUTES=30
14 |
15 | # User management
16 | ALLOW_SELF_REGISTER=true
17 | ADMIN_USERNAMES=admin
18 | # Security (CHANGE THESE FOR PRODUCTION!)
19 | SECRET_KEY=local-test-secret-key-change-this
20 |
21 | # Database (SQLite for local testing)
22 | DATABASE_URL=sqlite:///data/timetracker.db
23 |
24 | # Logging
25 | LOG_FILE=/app/logs/timetracker.log
26 |
27 | # Cookie settings (disabled for local testing)
28 | SESSION_COOKIE_SECURE=false
29 | REMEMBER_COOKIE_SECURE=false
30 |
31 | # Flask environment
32 | FLASK_ENV=development
33 | FLASK_DEBUG=true
34 |
35 | # License server (disabled for local testing)
36 | LICENSE_SERVER_ENABLED=false
--------------------------------------------------------------------------------
/app/repositories/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Repository layer for data access abstraction.
3 | This layer provides a clean interface for database operations,
4 | making it easier to test and maintain.
5 | """
6 |
7 | from .time_entry_repository import TimeEntryRepository
8 | from .project_repository import ProjectRepository
9 | from .invoice_repository import InvoiceRepository
10 | from .user_repository import UserRepository
11 | from .client_repository import ClientRepository
12 | from .task_repository import TaskRepository
13 | from .expense_repository import ExpenseRepository
14 | from .payment_repository import PaymentRepository
15 | from .comment_repository import CommentRepository
16 |
17 | __all__ = [
18 | "TimeEntryRepository",
19 | "ProjectRepository",
20 | "InvoiceRepository",
21 | "UserRepository",
22 | "ClientRepository",
23 | "TaskRepository",
24 | "ExpenseRepository",
25 | "PaymentRepository",
26 | "CommentRepository",
27 | ]
28 |
--------------------------------------------------------------------------------
/docker-compose.https-auto.yml:
--------------------------------------------------------------------------------
1 | services:
2 | # Certificate generator - runs once to create certificates
3 | certgen:
4 | image: alpine:latest
5 | container_name: timetracker-certgen
6 | volumes:
7 | - ./nginx/ssl:/certs
8 | - ./scripts:/scripts:ro
9 | command: sh /scripts/generate-certs.sh
10 | restart: "no"
11 |
12 | nginx:
13 | image: nginx:alpine
14 | container_name: timetracker-nginx
15 | ports:
16 | - "80:80"
17 | - "443:443"
18 | volumes:
19 | - ./nginx/conf.d:/etc/nginx/conf.d:ro
20 | - ./nginx/ssl:/etc/nginx/ssl:ro
21 | depends_on:
22 | certgen:
23 | condition: service_completed_successfully
24 | app:
25 | condition: service_started
26 | restart: unless-stopped
27 |
28 | app:
29 | ports: [] # nginx handles all ports
30 | environment:
31 | - WTF_CSRF_SSL_STRICT=true
32 | - SESSION_COOKIE_SECURE=true
33 | - CSRF_COOKIE_SECURE=true
34 | restart: unless-stopped
35 |
36 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/app/repositories/client_repository.py:
--------------------------------------------------------------------------------
1 | """
2 | Repository for client data access operations.
3 | """
4 |
5 | from typing import List, Optional
6 | from sqlalchemy.orm import joinedload
7 | from app import db
8 | from app.models import Client
9 | from app.repositories.base_repository import BaseRepository
10 |
11 |
12 | class ClientRepository(BaseRepository[Client]):
13 | """Repository for client operations"""
14 |
15 | def __init__(self):
16 | super().__init__(Client)
17 |
18 | def get_with_projects(self, client_id: int) -> Optional[Client]:
19 | """Get client with projects loaded"""
20 | return self.model.query.options(joinedload(Client.projects)).get(client_id)
21 |
22 | def get_active_clients(self) -> List[Client]:
23 | """Get all active clients"""
24 | return self.model.query.filter_by(status="active").order_by(Client.name).all()
25 |
26 | def get_by_name(self, name: str) -> Optional[Client]:
27 | """Get client by name"""
28 | return self.model.query.filter_by(name=name).first()
29 |
--------------------------------------------------------------------------------
/tests/test_system_ui_flags.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from flask import url_for
3 |
4 | from app.models import Settings, User
5 | from app import db
6 |
7 |
8 | class TestSystemUiFlags:
9 | def test_calendar_hidden_when_system_disabled(self, client, user):
10 | """If calendar is disabled system-wide, it should not appear in nav or user settings."""
11 | # Disable calendar system-wide
12 | settings = Settings.get_settings()
13 | settings.ui_allow_calendar = False
14 | db.session.commit()
15 |
16 | # Log in
17 | with client.session_transaction() as sess:
18 | sess["_user_id"] = str(user.id)
19 |
20 | # Settings page should not contain the calendar checkbox
21 | resp = client.get("/settings")
22 | data = resp.data.decode("utf-8")
23 | assert "ui_show_calendar" not in data
24 |
25 | # Sidebar nav should not show Calendar section label
26 | resp = client.get(url_for("main.dashboard"))
27 | nav = resp.data.decode("utf-8")
28 | assert "Calendar" not in nav
29 |
--------------------------------------------------------------------------------
/app/templates/admin/system_info.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% from "components/cards.html" import info_card %}
3 | {% from "components/ui.html" import page_header, breadcrumb_nav, button, filter_badge %}
4 |
5 | {% block content %}
6 | {% set breadcrumbs = [
7 | {'text': 'Admin', 'url': url_for('admin.admin_dashboard')},
8 | {'text': 'System Information'}
9 | ] %}
10 |
11 | {{ page_header(
12 | icon_class='fas fa-info-circle',
13 | title_text='System Information',
14 | subtitle_text='Key metrics and statistics about the application',
15 | breadcrumbs=breadcrumbs,
16 | actions_html=None
17 | ) }}
18 |
19 |
20 | {{ info_card("Total Users", total_users, "All time") }}
21 | {{ info_card("Total Projects", total_projects, "All time") }}
22 | {{ info_card("Total Time Entries", total_entries, "All time") }}
23 | {{ info_card("Active Timers", active_timers, "Currently running") }}
24 | {{ info_card("Database Size (MB)", db_size_mb, "Current size") }}
25 |
26 | {% endblock %}
27 |
--------------------------------------------------------------------------------
/prometheus/prometheus.yml:
--------------------------------------------------------------------------------
1 | # Prometheus configuration for TimeTracker
2 | # This file configures Prometheus to scrape metrics from the TimeTracker application
3 |
4 | global:
5 | scrape_interval: 15s # Scrape targets every 15 seconds
6 | evaluation_interval: 15s # Evaluate rules every 15 seconds
7 | external_labels:
8 | monitor: 'timetracker'
9 |
10 | # Scrape configurations
11 | scrape_configs:
12 | # TimeTracker application metrics
13 | - job_name: 'timetracker'
14 | static_configs:
15 | - targets: ['timetracker:8000'] # Scrape from timetracker service
16 | metrics_path: '/metrics'
17 | scrape_interval: 30s # Scrape every 30 seconds
18 | scrape_timeout: 10s
19 |
20 | # Prometheus self-monitoring
21 | - job_name: 'prometheus'
22 | static_configs:
23 | - targets: ['localhost:9090']
24 |
25 | # Example alerting rules (optional)
26 | # rule_files:
27 | # - 'alerts.yml'
28 |
29 | # Alertmanager configuration (optional)
30 | # alerting:
31 | # alertmanagers:
32 | # - static_configs:
33 | # - targets: ['alertmanager:9093']
34 |
35 |
--------------------------------------------------------------------------------
/promtail/promtail-config.yml:
--------------------------------------------------------------------------------
1 | # Promtail configuration for shipping logs to Loki
2 | # This file configures Promtail to read TimeTracker logs and send them to Loki
3 |
4 | server:
5 | http_listen_port: 9080
6 | grpc_listen_port: 0
7 |
8 | positions:
9 | filename: /tmp/positions.yaml
10 |
11 | clients:
12 | - url: http://loki:3100/loki/api/v1/push
13 |
14 | scrape_configs:
15 | # Scrape JSON logs from TimeTracker
16 | - job_name: timetracker
17 | static_configs:
18 | - targets:
19 | - localhost
20 | labels:
21 | job: timetracker
22 | __path__: /var/log/timetracker/app.jsonl
23 |
24 | # Parse JSON logs
25 | pipeline_stages:
26 | - json:
27 | expressions:
28 | timestamp: asctime
29 | level: levelname
30 | logger: name
31 | message: message
32 | request_id: request_id
33 |
34 | - labels:
35 | level:
36 | logger:
37 |
38 | - timestamp:
39 | source: timestamp
40 | format: RFC3339
41 |
42 |
--------------------------------------------------------------------------------
/docker/fix-invoices-now.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Immediate fix for missing invoice tables
4 | Run this script to create the missing tables right now
5 | """
6 |
7 | import os
8 | import sys
9 | import subprocess
10 |
11 | def main():
12 | print("=== Fixing Missing Invoice Tables ===")
13 |
14 | # Run the fix script
15 | try:
16 | result = subprocess.run([
17 | sys.executable,
18 | '/app/docker/fix-invoice-tables.py'
19 | ], capture_output=True, text=True, check=True)
20 |
21 | print("✓ Fix script output:")
22 | print(result.stdout)
23 |
24 | if result.stderr:
25 | print("Warnings/Errors:")
26 | print(result.stderr)
27 |
28 | except subprocess.CalledProcessError as e:
29 | print(f"✗ Fix script failed: {e}")
30 | print("STDOUT:", e.stdout)
31 | print("STDERR:", e.stderr)
32 | sys.exit(1)
33 |
34 | print("=== Invoice Tables Fixed ===")
35 | print("You can now access the invoice functionality!")
36 |
37 | if __name__ == '__main__':
38 | main()
39 |
--------------------------------------------------------------------------------
/docs/api/README.md:
--------------------------------------------------------------------------------
1 | # API Documentation
2 |
3 | Complete API reference for TimeTracker REST API.
4 |
5 | ## 📖 Overview
6 |
7 | TimeTracker provides a comprehensive REST API for programmatic access to all features. The API supports token-based authentication and follows RESTful principles.
8 |
9 | ## 📚 Documentation
10 |
11 | - **[REST API](REST_API.md)** - Complete API reference with all endpoints
12 | - **[API Token Scopes](API_TOKEN_SCOPES.md)** - Understanding token permissions and scopes
13 | - **[API Versioning](API_VERSIONING.md)** - API versioning strategy and best practices
14 | - **[API Enhancements](API_ENHANCEMENTS.md)** - Recent API improvements and additions
15 |
16 | ## 🔑 Quick Start
17 |
18 | 1. Generate an API token in your user settings
19 | 2. Include the token in the `Authorization` header: `Bearer YOUR_TOKEN`
20 | 3. Make requests to the API endpoints
21 | 4. Review the [API Token Scopes](API_TOKEN_SCOPES.md) to ensure your token has the required permissions
22 |
23 | ## 📋 API Endpoints
24 |
25 | See the [REST API](REST_API.md) documentation for a complete list of available endpoints organized by resource type.
26 |
--------------------------------------------------------------------------------
/app/repositories/user_repository.py:
--------------------------------------------------------------------------------
1 | """
2 | Repository for user data access operations.
3 | """
4 |
5 | from typing import List, Optional
6 | from app import db
7 | from app.models import User
8 | from app.repositories.base_repository import BaseRepository
9 | from app.constants import UserRole
10 |
11 |
12 | class UserRepository(BaseRepository[User]):
13 | """Repository for user operations"""
14 |
15 | def __init__(self):
16 | super().__init__(User)
17 |
18 | def get_by_username(self, username: str) -> Optional[User]:
19 | """Get user by username"""
20 | return self.model.query.filter_by(username=username).first()
21 |
22 | def get_by_role(self, role: str) -> List[User]:
23 | """Get users by role"""
24 | return self.model.query.filter_by(role=role).all()
25 |
26 | def get_active_users(self) -> List[User]:
27 | """Get all active users"""
28 | return self.model.query.filter_by(is_active=True).all()
29 |
30 | def get_admins(self) -> List[User]:
31 | """Get all admin users"""
32 | return self.model.query.filter_by(role=UserRole.ADMIN.value, is_active=True).all()
33 |
--------------------------------------------------------------------------------
/scripts/start-local-test.ps1:
--------------------------------------------------------------------------------
1 | # PowerShell script to start TimeTracker Local Test Environment with SQLite
2 |
3 | Write-Host "Starting TimeTracker Local Test Environment with SQLite..." -ForegroundColor Green
4 | Write-Host ""
5 |
6 | # Check if docker-compose is available
7 | try {
8 | $null = docker-compose --version
9 | } catch {
10 | Write-Host "Error: docker-compose is not installed or not in PATH" -ForegroundColor Red
11 | Write-Host "Please install Docker Desktop or Docker Compose" -ForegroundColor Yellow
12 | Read-Host "Press Enter to exit"
13 | exit 1
14 | }
15 |
16 | # Check if Docker is running
17 | try {
18 | $null = docker info
19 | } catch {
20 | Write-Host "Error: Docker is not running" -ForegroundColor Red
21 | Write-Host "Please start Docker Desktop" -ForegroundColor Yellow
22 | Read-Host "Press Enter to exit"
23 | exit 1
24 | }
25 |
26 | Write-Host "Building and starting containers..." -ForegroundColor Cyan
27 | docker-compose -f docker-compose.local-test.yml up --build
28 |
29 | Write-Host ""
30 | Write-Host "Local test environment stopped." -ForegroundColor Green
31 | Read-Host "Press Enter to exit"
32 |
--------------------------------------------------------------------------------
/app/static/images/timetracker-logo.svg:
--------------------------------------------------------------------------------
1 |
24 |
--------------------------------------------------------------------------------
/docs/development/README.md:
--------------------------------------------------------------------------------
1 | # Developer Documentation
2 |
3 | Complete documentation for developers contributing to TimeTracker.
4 |
5 | ## 📖 Getting Started
6 |
7 | - **[Contributing Guidelines](CONTRIBUTING.md)** - How to contribute to TimeTracker
8 | - **[Code of Conduct](CODE_OF_CONDUCT.md)** - Community standards
9 | - **[Project Structure](PROJECT_STRUCTURE.md)** - Codebase organization
10 | - **[Local Testing with SQLite](LOCAL_TESTING_WITH_SQLITE.md)** - Quick local testing setup
11 | - **[Local Development with Analytics](LOCAL_DEVELOPMENT_WITH_ANALYTICS.md)** - Development setup with analytics
12 |
13 | ## 🏗️ Development Resources
14 |
15 | ### Testing
16 | - See [testing/](../testing/) for testing documentation
17 |
18 | ### CI/CD
19 | - See [cicd/](../cicd/) for CI/CD setup and workflows
20 |
21 | ### Architecture
22 | - See [implementation-notes/](../implementation-notes/) for architecture decisions and notes
23 |
24 | ## 📚 Related Documentation
25 |
26 | - **[Main Documentation Index](../README.md)** - Complete documentation overview
27 | - **[API Documentation](../api/)** - REST API reference
28 | - **[Admin Documentation](../admin/)** - Administrator guides
29 |
--------------------------------------------------------------------------------
/docker/test-startup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | echo "=== Testing Startup Script ==="
3 |
4 | echo "Current working directory: $(pwd)"
5 | echo "Current user: $(whoami)"
6 | echo "Current user ID: $(id)"
7 |
8 | echo "Checking if startup script exists..."
9 | if [ -f "/app/docker/start.sh" ]; then
10 | echo "✓ Startup script exists at /app/docker/start.sh"
11 | echo "File permissions: $(ls -la /app/docker/start.sh)"
12 | echo "File owner: $(stat -c '%U:%G' /app/docker/start.sh)"
13 |
14 | echo "Testing if script is executable..."
15 | if [ -x "/app/docker/start.sh" ]; then
16 | echo "✓ Startup script is executable"
17 | echo "Script first few lines:"
18 | head -5 /app/docker/start.sh
19 | else
20 | echo "✗ Startup script is NOT executable"
21 | fi
22 | else
23 | echo "✗ Startup script does NOT exist at /app/docker/start.sh"
24 | echo "Contents of /app/docker/:"
25 | ls -la /app/docker/ || echo "Directory /app/docker/ does not exist"
26 | fi
27 |
28 | echo "Checking /app directory structure..."
29 | echo "Contents of /app:"
30 | ls -la /app/ || echo "Directory /app/ does not exist"
31 |
32 | echo "=== Test Complete ==="
33 |
--------------------------------------------------------------------------------
/app/models/task_activity.py:
--------------------------------------------------------------------------------
1 | from app import db
2 | from app.utils.timezone import now_in_app_timezone
3 |
4 |
5 | class TaskActivity(db.Model):
6 | """Lightweight audit log for significant task events."""
7 |
8 | __tablename__ = "task_activities"
9 |
10 | id = db.Column(db.Integer, primary_key=True)
11 | task_id = db.Column(db.Integer, db.ForeignKey("tasks.id"), nullable=False, index=True)
12 | user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True, index=True)
13 | event = db.Column(db.String(50), nullable=False, index=True)
14 | details = db.Column(db.Text, nullable=True)
15 | created_at = db.Column(db.DateTime, default=now_in_app_timezone, nullable=False, index=True)
16 |
17 | task = db.relationship("Task", backref=db.backref("activities", lazy="dynamic", cascade="all, delete-orphan"))
18 | user = db.relationship("User")
19 |
20 | def __init__(self, task_id, event, user_id=None, details=None):
21 | self.task_id = task_id
22 | self.user_id = user_id
23 | self.event = event
24 | self.details = details
25 |
26 | def __repr__(self):
27 | return f""
28 |
--------------------------------------------------------------------------------
/docker/migrate-add-tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Database migration script to add Task Management feature
4 | """
5 |
6 | import sys
7 | import os
8 |
9 | # Add the parent directory to the path so we can import the app
10 | sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
11 |
12 | from app import create_app, db
13 | from app.models import Task
14 |
15 | def migrate_database():
16 | """Run the database migration"""
17 | app = create_app()
18 |
19 | with app.app_context():
20 | print("Starting Task Management migration...")
21 |
22 | try:
23 | # Use the app's built-in migration function
24 | from app import migrate_task_management_tables
25 | migrate_task_management_tables()
26 |
27 | print("\nMigration completed successfully!")
28 | print("Task Management feature is now available.")
29 | return True
30 |
31 | except Exception as e:
32 | print(f"✗ Migration failed: {e}")
33 | return False
34 |
35 | if __name__ == '__main__':
36 | success = migrate_database()
37 | sys.exit(0 if success else 1)
38 |
--------------------------------------------------------------------------------
/app/static/src/input.css:
--------------------------------------------------------------------------------
1 | @tailwind base;
2 | @tailwind components;
3 | @tailwind utilities;
4 |
5 | @layer components {
6 | .form-input {
7 | @apply mt-1 block w-full rounded-md border-gray-300 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 sm:text-sm dark:bg-gray-800 dark:border-gray-600 px-4 py-3;
8 | }
9 | }
10 |
11 | .cmdk-root {
12 | --cmdk-font-family: 'Inter', sans-serif;
13 | --cmdk-background: #fff;
14 | --cmdk-border-radius: 8px;
15 | --cmdk-box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1);
16 | --cmdk-color-text: #333;
17 | --cmdk-color-placeholder: #999;
18 | --cmdk-color-input: #333;
19 | --cmdk-color-separator: #ddd;
20 | --cmdk-color-item-hover: #f5f5f5;
21 | --cmdk-color-item-active: #eee;
22 | --cmdk-height: 400px;
23 | --cmdk-padding: 12px;
24 | }
25 |
26 | [cmdk-theme='dark'] .cmdk-root {
27 | --cmdk-background: #1A202C; /* background-dark */
28 | --cmdk-color-text: #E2E8F0; /* text-dark */
29 | --cmdk-color-placeholder: #718096; /* text-muted-dark */
30 | --cmdk-color-input: #E2E8F0;
31 | --cmdk-color-separator: #4A5568; /* border-dark */
32 | --cmdk-color-item-hover: #2D3748; /* card-dark */
33 | --cmdk-color-item-active: #4A5568;
34 | }
35 |
--------------------------------------------------------------------------------
/app/models/user_favorite_project.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from app import db
3 |
4 |
5 | class UserFavoriteProject(db.Model):
6 | """Association table for user favorite projects"""
7 |
8 | __tablename__ = "user_favorite_projects"
9 |
10 | id = db.Column(db.Integer, primary_key=True)
11 | user_id = db.Column(db.Integer, db.ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
12 | project_id = db.Column(db.Integer, db.ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True)
13 | created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
14 |
15 | # Unique constraint to prevent duplicate favorites
16 | __table_args__ = (db.UniqueConstraint("user_id", "project_id", name="uq_user_project_favorite"),)
17 |
18 | def __repr__(self):
19 | return f""
20 |
21 | def to_dict(self):
22 | """Convert to dictionary for API responses"""
23 | return {
24 | "id": self.id,
25 | "user_id": self.user_id,
26 | "project_id": self.project_id,
27 | "created_at": self.created_at.isoformat() if self.created_at else None,
28 | }
29 |
--------------------------------------------------------------------------------
/scripts/extract_translations.py:
--------------------------------------------------------------------------------
1 | import os
2 | import subprocess
3 |
4 |
5 | def run(cmd: list[str]) -> int:
6 | print("$", " ".join(cmd))
7 | # Use python -m babel instead of pybabel directly
8 | if cmd[0] == 'pybabel':
9 | cmd = ['python', '-m', 'babel.messages.frontend'] + cmd[1:]
10 | return subprocess.call(cmd)
11 |
12 |
13 | def main():
14 | # Requires Flask-Babel/Babel installed
15 | os.makedirs('translations', exist_ok=True)
16 | # Extract messages
17 | run(['pybabel', 'extract', '-F', 'babel.cfg', '-o', 'messages.pot', '.'])
18 |
19 | # Initialize languages if not already
20 | languages = ['en', 'nl', 'de', 'fr', 'it', 'fi', 'es', 'ar', 'he', 'nb']
21 | for lang in languages:
22 | po_dir = os.path.join('translations', lang, 'LC_MESSAGES')
23 | po_path = os.path.join(po_dir, 'messages.po')
24 | if not os.path.exists(po_path):
25 | run(['pybabel', 'init', '-i', 'messages.pot', '-d', 'translations', '-l', lang])
26 | # Update catalogs
27 | run(['pybabel', 'update', '-i', 'messages.pot', '-d', 'translations'])
28 | # Compile
29 | run(['pybabel', 'compile', '-d', 'translations'])
30 |
31 |
32 | if __name__ == '__main__':
33 | main()
34 |
35 |
36 |
--------------------------------------------------------------------------------
/loki/loki-config.yml:
--------------------------------------------------------------------------------
1 | # Loki configuration for log aggregation
2 | # This file configures Loki to receive and store logs
3 | # Compatible with Loki v2.9+ and v3.x
4 |
5 | auth_enabled: false
6 |
7 | server:
8 | http_listen_port: 3100
9 | grpc_listen_port: 9096
10 |
11 | common:
12 | path_prefix: /loki
13 | storage:
14 | filesystem:
15 | chunks_directory: /loki/chunks
16 | rules_directory: /loki/rules
17 | replication_factor: 1
18 | ring:
19 | instance_addr: 127.0.0.1
20 | kvstore:
21 | store: inmemory
22 |
23 | schema_config:
24 | configs:
25 | - from: 2020-10-24
26 | store: tsdb
27 | object_store: filesystem
28 | schema: v13
29 | index:
30 | prefix: index_
31 | period: 24h
32 |
33 | limits_config:
34 | reject_old_samples: true
35 | reject_old_samples_max_age: 168h
36 | ingestion_rate_mb: 16
37 | ingestion_burst_size_mb: 32
38 | max_cache_freshness_per_query: 10m
39 | split_queries_by_interval: 15m
40 | retention_period: 720h # 30 days
41 |
42 | compactor:
43 | working_directory: /loki/compactor
44 | compaction_interval: 10m
45 | retention_enabled: true
46 | retention_delete_delay: 2h
47 | retention_delete_worker_count: 150
48 | delete_request_store: filesystem
49 |
50 |
--------------------------------------------------------------------------------
/migrations/versions/020_add_user_avatar.py:
--------------------------------------------------------------------------------
1 | """add user avatar filename column
2 |
3 | Revision ID: 020
4 | Revises: 019
5 | Create Date: 2025-10-21 00:00:00
6 | """
7 |
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '020'
14 | down_revision = '019'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def _has_column(inspector, table_name: str, column_name: str) -> bool:
20 | return column_name in [col['name'] for col in inspector.get_columns(table_name)]
21 |
22 |
23 | def upgrade() -> None:
24 | bind = op.get_bind()
25 | inspector = sa.inspect(bind)
26 |
27 | if 'users' not in inspector.get_table_names():
28 | return
29 |
30 | if not _has_column(inspector, 'users', 'avatar_filename'):
31 | op.add_column('users', sa.Column('avatar_filename', sa.String(length=255), nullable=True))
32 |
33 |
34 | def downgrade() -> None:
35 | bind = op.get_bind()
36 | inspector = sa.inspect(bind)
37 |
38 | if 'users' not in inspector.get_table_names():
39 | return
40 |
41 | if _has_column(inspector, 'users', 'avatar_filename'):
42 | try:
43 | op.drop_column('users', 'avatar_filename')
44 | except Exception:
45 | pass
46 |
47 |
48 |
--------------------------------------------------------------------------------
/requirements-test.txt:
--------------------------------------------------------------------------------
1 | # Testing dependencies for TimeTracker
2 | # This file should be used in addition to requirements.txt for testing environments
3 |
4 | # Core testing frameworks
5 | pytest==7.4.3
6 | pytest-flask==1.3.0
7 | pytest-cov==4.1.0
8 | pytest-xdist==3.5.0 # Parallel test execution
9 | pytest-timeout==2.2.0 # Timeout for long-running tests
10 | pytest-mock==3.12.0 # Mocking support
11 | pytest-env==1.1.3 # Environment variable management for tests
12 |
13 | # Code quality and linting
14 | black==24.8.0
15 | flake8==6.1.0
16 | isort==5.13.2
17 | pylint==3.0.3
18 | mypy==1.8.0
19 |
20 | # Security testing
21 | bandit==1.7.6 # Security linting
22 | safety==3.0.1 # Dependency vulnerability scanning
23 |
24 | # Test data generation
25 | factory-boy==3.3.0 # Test fixtures
26 | faker==22.0.0 # Fake data generation
27 |
28 | # API testing
29 | requests-mock==1.11.0 # Mock HTTP requests
30 | responses==0.24.1 # Mock HTTP responses
31 |
32 | # Performance testing
33 | pytest-benchmark==4.0.0 # Performance benchmarking
34 |
35 | # Database testing
36 | sqlalchemy-utils==0.41.1 # Database utilities for testing
37 |
38 | # HTML/Coverage report
39 | coverage[toml]==7.4.0
40 | pytest-html==4.1.1 # HTML test reports
41 |
42 | # Additional utilities
43 | freezegun==1.4.0 # Time mocking
44 |
45 |
--------------------------------------------------------------------------------
/docker-compose.https-mkcert.yml:
--------------------------------------------------------------------------------
1 | services:
2 | # mkcert certificate manager - auto-generates trusted certificates
3 | mkcert:
4 | build:
5 | context: .
6 | dockerfile: docker/Dockerfile.mkcert
7 | container_name: timetracker-mkcert
8 | volumes:
9 | - ./nginx/ssl:/certs
10 | - mkcert-ca:/root/.local/share/mkcert
11 | environment:
12 | - HOST_IP=${HOST_IP:-192.168.1.100}
13 | - CERT_DOMAINS=localhost 127.0.0.1 ::1 ${HOST_IP:-192.168.1.100} *.local timetracker.local
14 | command: /generate-mkcert-certs.sh
15 | restart: "no"
16 |
17 | nginx:
18 | image: nginx:alpine
19 | container_name: timetracker-nginx
20 | ports:
21 | - "80:80"
22 | - "443:443"
23 | volumes:
24 | - ./nginx/conf.d:/etc/nginx/conf.d:ro
25 | - ./nginx/ssl:/etc/nginx/ssl:ro
26 | depends_on:
27 | mkcert:
28 | condition: service_completed_successfully
29 | app:
30 | condition: service_started
31 | restart: unless-stopped
32 |
33 | app:
34 | ports: [] # nginx handles all ports
35 | environment:
36 | - WTF_CSRF_SSL_STRICT=true
37 | - SESSION_COOKIE_SECURE=true
38 | - CSRF_COOKIE_SECURE=true
39 | restart: unless-stopped
40 |
41 | volumes:
42 | mkcert-ca:
43 | driver: local
44 |
45 |
--------------------------------------------------------------------------------
/docs/guides/QUICK_START_LOCAL_DEVELOPMENT.md:
--------------------------------------------------------------------------------
1 | # Quick Start: Local Development with Docker Compose
2 |
3 | ## TL;DR - Fastest Local Start
4 |
5 | ```powershell
6 | git clone https://github.com/drytrix/TimeTracker.git
7 | cd TimeTracker
8 |
9 | cp env.example .env
10 | # Edit .env and set a strong SECRET_KEY
11 |
12 | docker-compose -f docker-compose.example.yml up -d
13 |
14 | # Open http://localhost:8080
15 | ```
16 |
17 | See the full Docker Compose setup guide: `docs/DOCKER_COMPOSE_SETUP.md`.
18 |
19 | ## Local Development (Python) Alternative
20 |
21 | If you prefer to run locally with Python:
22 |
23 | ```powershell
24 | python -m venv venv
25 | venv\Scripts\activate
26 | pip install -r requirements.txt
27 | python app.py
28 | ```
29 |
30 | ## Analytics & Telemetry (Optional)
31 |
32 | To test PostHog or Sentry in development, set the respective variables in `.env` and restart the app. For advanced local analytics configuration, see `docs/analytics.md` and `assets/README.md`.
33 |
34 | ## Troubleshooting
35 |
36 | - CSRF token errors: For HTTP (localhost), set `WTF_CSRF_SSL_STRICT=false` and ensure `SESSION_COOKIE_SECURE=false`/`CSRF_COOKIE_SECURE=false`.
37 | - Database not ready: The app waits for Postgres healthcheck; check `docker-compose logs db`.
38 | - Timezone issues: Set `TZ` to your locale.
39 |
40 |
--------------------------------------------------------------------------------
/tests/test_ui_quick_wins.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 |
4 | @pytest.mark.smoke
5 | @pytest.mark.routes
6 | def test_base_layout_has_skip_link(authenticated_client):
7 | response = authenticated_client.get("/dashboard")
8 | assert response.status_code == 200
9 | html = response.get_data(as_text=True)
10 | assert "Skip to content" in html
11 | assert 'href="#mainContentAnchor"' in html
12 | assert 'id="mainContentAnchor"' in html
13 |
14 |
15 | @pytest.mark.smoke
16 | @pytest.mark.routes
17 | def test_login_has_primary_button_and_user_icon(client):
18 | response = client.get("/login")
19 | assert response.status_code == 200
20 | html = response.get_data(as_text=True)
21 | assert 'class="btn btn-primary' in html or 'class="btn btn-primary"' in html
22 | assert "fa-user" in html
23 | assert 'id="username"' in html
24 |
25 |
26 | @pytest.mark.smoke
27 | @pytest.mark.routes
28 | def test_tasks_table_has_sticky_and_zebra(authenticated_client):
29 | response = authenticated_client.get("/tasks")
30 | assert response.status_code == 200
31 | html = response.get_data(as_text=True)
32 | assert 'class="table table-zebra' in html or 'class="table table-zebra"' in html
33 | # numeric alignment utility present on Due/Progress columns
34 | assert "table-number" in html
35 |
--------------------------------------------------------------------------------
/migrations/versions/049_add_client_password_setup_token.py:
--------------------------------------------------------------------------------
1 | """Add password setup token fields to clients table
2 |
3 | Revision ID: 049
4 | Revises: 048
5 | Create Date: 2025-01-23
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '049'
14 | down_revision = '048'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | """Add password_setup_token and password_setup_token_expires columns to clients table"""
21 |
22 | # Add password_setup_token column
23 | op.add_column('clients',
24 | sa.Column('password_setup_token', sa.String(length=100), nullable=True)
25 | )
26 | op.create_index('ix_clients_password_setup_token', 'clients', ['password_setup_token'])
27 |
28 | # Add password_setup_token_expires column
29 | op.add_column('clients',
30 | sa.Column('password_setup_token_expires', sa.DateTime(), nullable=True)
31 | )
32 |
33 |
34 | def downgrade():
35 | """Remove password setup token columns from clients table"""
36 |
37 | # Drop columns
38 | op.drop_index('ix_clients_password_setup_token', 'clients')
39 | op.drop_column('clients', 'password_setup_token_expires')
40 | op.drop_column('clients', 'password_setup_token')
41 |
42 |
--------------------------------------------------------------------------------
/migrations/versions/036_add_pdf_design_json.py:
--------------------------------------------------------------------------------
1 | """add invoice_pdf_design_json to settings
2 |
3 | Revision ID: 036_add_pdf_design_json
4 | Revises: 035_enhance_payments
5 | Create Date: 2025-10-29 12:00:00
6 | """
7 |
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '036_add_pdf_design_json'
14 | down_revision = '035_enhance_payments'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade() -> None:
20 | bind = op.get_bind()
21 | inspector = sa.inspect(bind)
22 | if 'settings' not in inspector.get_table_names():
23 | return
24 | columns = {c['name'] for c in inspector.get_columns('settings')}
25 | if 'invoice_pdf_design_json' not in columns:
26 | op.add_column('settings', sa.Column('invoice_pdf_design_json', sa.Text(), nullable=True))
27 |
28 |
29 | def downgrade() -> None:
30 | bind = op.get_bind()
31 | inspector = sa.inspect(bind)
32 | if 'settings' not in inspector.get_table_names():
33 | return
34 | columns = {c['name'] for c in inspector.get_columns('settings')}
35 | if 'invoice_pdf_design_json' in columns:
36 | try:
37 | op.drop_column('settings', 'invoice_pdf_design_json')
38 | except Exception:
39 | pass
40 |
41 |
42 |
--------------------------------------------------------------------------------
/.github/workflows/static.yml:
--------------------------------------------------------------------------------
1 | name: Deploy to GitHub Pages
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
8 | permissions:
9 | contents: read
10 | pages: write
11 | id-token: write
12 |
13 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
14 | # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
15 | concurrency:
16 | group: "pages"
17 | cancel-in-progress: false
18 |
19 | jobs:
20 | # Build job
21 | build:
22 | runs-on: ubuntu-latest
23 | steps:
24 | - name: Checkout
25 | uses: actions/checkout@v4
26 |
27 | - name: Setup Pages
28 | uses: actions/configure-pages@v4
29 |
30 | - name: Upload artifact
31 | uses: actions/upload-pages-artifact@v3
32 | with:
33 | # Upload entire repository
34 | path: '.'
35 |
36 | # Deployment job
37 | deploy:
38 | environment:
39 | name: github-pages
40 | url: ${{ steps.deployment.outputs.page_url }}
41 | runs-on: ubuntu-latest
42 | needs: build
43 | steps:
44 | - name: Deploy to GitHub Pages
45 | id: deployment
46 | uses: actions/deploy-pages@v4
47 |
--------------------------------------------------------------------------------
/migrations/versions/011_add_user_preferred_language.py:
--------------------------------------------------------------------------------
1 | """add user preferred_language column
2 |
3 | Revision ID: 011
4 | Revises: 010
5 | Create Date: 2025-09-11 00:00:00
6 | """
7 |
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '011'
14 | down_revision = '010'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade() -> None:
20 | bind = op.get_bind()
21 | inspector = sa.inspect(bind)
22 | if 'users' not in inspector.get_table_names():
23 | return
24 | # Check existing columns defensively
25 | columns = {c['name'] for c in inspector.get_columns('users')}
26 | if 'preferred_language' not in columns:
27 | op.add_column('users', sa.Column('preferred_language', sa.String(length=8), nullable=True))
28 |
29 |
30 | def downgrade() -> None:
31 | bind = op.get_bind()
32 | inspector = sa.inspect(bind)
33 | if 'users' not in inspector.get_table_names():
34 | return
35 | columns = {c['name'] for c in inspector.get_columns('users')}
36 | if 'preferred_language' in columns:
37 | try:
38 | op.drop_column('users', 'preferred_language')
39 | except Exception:
40 | # Some backends might fail if column involved in indexes; ignore for safety
41 | pass
42 |
43 |
44 |
--------------------------------------------------------------------------------
/migrations/versions/088_add_salesman_splitting_to_reports.py:
--------------------------------------------------------------------------------
1 | """Add salesman splitting to report email schedules
2 |
3 | Revision ID: 088_salesman_splitting_reports
4 | Revises: 087_salesman_email_mapping
5 | Create Date: 2025-01-29
6 |
7 | This migration adds:
8 | - split_by_salesman field to report_email_schedules table
9 | - salesman_field_name field to specify which custom field to use
10 | """
11 | from alembic import op
12 | import sqlalchemy as sa
13 |
14 | # revision identifiers, used by Alembic.
15 | revision = '088_salesman_splitting_reports'
16 | down_revision = '087_salesman_email_mapping'
17 | branch_labels = None
18 | depends_on = None
19 |
20 |
21 | def upgrade():
22 | """Add salesman splitting fields to report_email_schedules"""
23 | # Add split_by_salesman field
24 | op.add_column('report_email_schedules',
25 | sa.Column('split_by_salesman', sa.Boolean(), nullable=False, server_default='false'))
26 |
27 | # Add salesman_field_name field (defaults to 'salesman')
28 | op.add_column('report_email_schedules',
29 | sa.Column('salesman_field_name', sa.String(length=50), nullable=True))
30 |
31 |
32 | def downgrade():
33 | """Remove salesman splitting fields"""
34 | op.drop_column('report_email_schedules', 'salesman_field_name')
35 | op.drop_column('report_email_schedules', 'split_by_salesman')
36 |
37 |
--------------------------------------------------------------------------------
/app/routes/settings.py:
--------------------------------------------------------------------------------
1 | """
2 | Settings Routes
3 | Handles user and system settings
4 | """
5 |
6 | from flask import Blueprint, render_template, request, redirect, url_for, flash, jsonify
7 | from flask_login import login_required, current_user
8 | from flask_babel import gettext as _
9 | from app import db, track_page_view
10 | from app.utils.db import safe_commit
11 |
12 | settings_bp = Blueprint("settings", __name__)
13 |
14 |
15 | @settings_bp.route("/settings")
16 | @login_required
17 | def index():
18 | """Main settings page"""
19 | track_page_view("settings_index")
20 | return render_template("settings/index.html")
21 |
22 |
23 | @settings_bp.route("/settings/keyboard-shortcuts")
24 | @login_required
25 | def keyboard_shortcuts():
26 | """Keyboard shortcuts settings"""
27 | track_page_view("settings_keyboard_shortcuts")
28 | return render_template("settings/keyboard_shortcuts.html")
29 |
30 |
31 | @settings_bp.route("/settings/profile")
32 | @login_required
33 | def profile():
34 | """User profile settings"""
35 | track_page_view("settings_profile")
36 | return redirect(url_for("profile.index"))
37 |
38 |
39 | @settings_bp.route("/settings/preferences")
40 | @login_required
41 | def preferences():
42 | """User preferences"""
43 | track_page_view("settings_preferences")
44 | return render_template("settings/preferences.html")
45 |
--------------------------------------------------------------------------------
/app/templates/errors/500.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block title %}{{ error_info.title if error_info else _('Server Error') }} - {{ app_name }}{% endblock %}
4 |
5 | {% block content %}
6 |
7 |
8 |
9 |
10 |
11 |
500
12 |
{{ error_info.title if error_info else _('Server Error') }}
13 |
14 | {{ error_info.message if error_info else _('Something went wrong on our end. Please try again later.') }}
15 |
16 |
24 |
25 |
26 |
27 |
28 | {% endblock %}
29 |
--------------------------------------------------------------------------------
/app/templates/errors/404.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block title %}{{ error_info.title if error_info else _('Page Not Found') }} - {{ app_name }}{% endblock %}
4 |
5 | {% block content %}
6 |
7 |
8 |
9 |
10 |
11 |
404
12 |
{{ error_info.title if error_info else _('Page Not Found') }}
13 |
14 | {{ error_info.message if error_info else _("The page you're looking for doesn't exist or has been moved.") }}
15 |
16 |
24 |
25 |
26 |
27 |
28 | {% endblock %}
29 |
--------------------------------------------------------------------------------
/tests/test_time_rounding_param.py:
--------------------------------------------------------------------------------
1 | """Additional parameterized tests for time rounding utilities."""
2 |
3 | import pytest
4 | from app.utils.time_rounding import round_time_duration
5 |
6 |
7 | @pytest.mark.unit
8 | @pytest.mark.parametrize(
9 | "seconds, interval, method, expected",
10 | [
11 | pytest.param(3720, 5, "nearest", 3600, id="62m->nearest-5m=60m"),
12 | pytest.param(3780, 5, "nearest", 3900, id="63m->nearest-5m=65m"),
13 | pytest.param(120, 5, "nearest", 0, id="2m->nearest-5m=0"),
14 | pytest.param(180, 5, "nearest", 300, id="3m->nearest-5m=5m"),
15 | pytest.param(3720, 15, "up", 4500, id="62m->up-15m=75m"),
16 | pytest.param(3600, 15, "up", 3600, id="60m->up-15m=60m"),
17 | pytest.param(3660, 15, "up", 4500, id="61m->up-15m=75m"),
18 | pytest.param(3720, 15, "down", 3600, id="62m->down-15m=60m"),
19 | pytest.param(4440, 15, "down", 3600, id="74m->down-15m=60m"),
20 | pytest.param(4500, 15, "down", 4500, id="75m->down-15m=75m"),
21 | pytest.param(3720, 60, "nearest", 3600, id="62m->nearest-60m=60m"),
22 | pytest.param(5400, 60, "nearest", 7200, id="90m->nearest-60m=120m"),
23 | pytest.param(5340, 60, "nearest", 3600, id="89m->nearest-60m=60m"),
24 | ],
25 | )
26 | def test_round_time_duration_parametrized(seconds, interval, method, expected):
27 | assert round_time_duration(seconds, interval, method) == expected
28 |
--------------------------------------------------------------------------------
/app/models/tax_rule.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from app import db
3 |
4 |
5 | class TaxRule(db.Model):
6 | """Flexible tax rules per country/region/client with effective date ranges."""
7 |
8 | __tablename__ = "tax_rules"
9 |
10 | id = db.Column(db.Integer, primary_key=True)
11 | name = db.Column(db.String(100), nullable=False)
12 | country = db.Column(db.String(2), nullable=True) # ISO-3166-1 alpha-2
13 | region = db.Column(db.String(50), nullable=True)
14 | client_id = db.Column(db.Integer, db.ForeignKey("clients.id"), nullable=True, index=True)
15 | project_id = db.Column(db.Integer, db.ForeignKey("projects.id"), nullable=True, index=True)
16 | tax_code = db.Column(db.String(50), nullable=True) # e.g., VAT, GST
17 | rate_percent = db.Column(db.Numeric(7, 4), nullable=False, default=0)
18 | compound = db.Column(db.Boolean, default=False, nullable=False)
19 | inclusive = db.Column(db.Boolean, default=False, nullable=False) # If true, prices include tax
20 | start_date = db.Column(db.Date, nullable=True)
21 | end_date = db.Column(db.Date, nullable=True)
22 | active = db.Column(db.Boolean, default=True, nullable=False)
23 |
24 | created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
25 | updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
26 |
27 | def __repr__(self):
28 | return f""
29 |
--------------------------------------------------------------------------------
/app/routes/api/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | API Routes Package
3 |
4 | This package contains versioned API routes.
5 | Current structure:
6 | - v1: Current stable API (migrated from api_v1.py)
7 | - Future: v2, v3, etc. for breaking changes
8 |
9 | Note: The legacy api_bp is imported from the api.py module file
10 | to maintain backward compatibility.
11 | """
12 |
13 | import os
14 | import importlib.util
15 |
16 | # Import versioned blueprints
17 | from app.routes.api.v1 import api_v1_bp
18 |
19 | # Import legacy api_bp from the api.py module file
20 | # We need to load it directly since Python prioritizes packages over modules
21 | api_module_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), "api.py")
22 |
23 | try:
24 | spec = importlib.util.spec_from_file_location("app.routes.api_legacy", api_module_path)
25 | if spec and spec.loader:
26 | api_legacy_module = importlib.util.module_from_spec(spec)
27 | spec.loader.exec_module(api_legacy_module)
28 | api_bp = api_legacy_module.api_bp
29 | else:
30 | raise ImportError("Could not load api.py module")
31 | except Exception as e:
32 | # Last resort: create a dummy blueprint to prevent import errors
33 | from flask import Blueprint
34 |
35 | api_bp = Blueprint("api", __name__)
36 | import logging
37 |
38 | logger = logging.getLogger(__name__)
39 | logger.warning(f"Could not import api_bp from api.py: {e}. Using dummy blueprint.")
40 |
41 | __all__ = ["api_v1_bp", "api_bp"]
42 |
--------------------------------------------------------------------------------
/docker/entrypoint-local-test.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # TimeTracker Local Test Entrypoint
3 | # Simplified entrypoint for local testing with SQLite
4 |
5 | echo "=== TimeTracker Local Test Container Starting ==="
6 | echo "Timestamp: $(date)"
7 | echo "Container ID: $(hostname)"
8 | echo "Python version: $(python --version 2>/dev/null || echo 'Python not available')"
9 | echo "Current directory: $(pwd)"
10 | echo "User: $(whoami)"
11 | echo
12 |
13 | # Function to log messages with timestamp
14 | log() {
15 | echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1"
16 | }
17 |
18 | # Ensure data directory exists and has proper permissions
19 | log "Setting up data directory..."
20 | mkdir -p /data /data/uploads /app/logs
21 | chmod 755 /data /data/uploads /app/logs
22 |
23 | # If no command was passed from CMD, default to python /app/start.py
24 | if [ $# -eq 0 ]; then
25 | set -- python /app/start.py
26 | fi
27 |
28 | # Set proper ownership for the timetracker user (if it exists)
29 | if id "timetracker" &>/dev/null; then
30 | log "Setting ownership to timetracker user..."
31 | chown -R timetracker:timetracker /data /app/logs || true
32 | log "Switching to timetracker user with gosu..."
33 | cd /app
34 | # Delegate to the standard entrypoint that handles migrations for both Postgres and SQLite
35 | exec gosu timetracker:timetracker /app/docker/entrypoint_fixed.sh "$@"
36 | else
37 | log "timetracker user not found, running as root..."
38 | exec /app/docker/entrypoint_fixed.sh "$@"
39 | fi
40 |
--------------------------------------------------------------------------------
/migrations/versions/048_add_client_portal_credentials.py:
--------------------------------------------------------------------------------
1 | """Add client portal credentials to clients table
2 |
3 | Revision ID: 048
4 | Revises: 047
5 | Create Date: 2025-01-23
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '048'
14 | down_revision = '047'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | """Add portal_enabled, portal_username, and portal_password_hash columns to clients table"""
21 |
22 | # Add portal_enabled column
23 | op.add_column('clients',
24 | sa.Column('portal_enabled', sa.Boolean(), nullable=False, server_default='0')
25 | )
26 |
27 | # Add portal_username column
28 | op.add_column('clients',
29 | sa.Column('portal_username', sa.String(length=80), nullable=True)
30 | )
31 | op.create_index('ix_clients_portal_username', 'clients', ['portal_username'], unique=True)
32 |
33 | # Add portal_password_hash column
34 | op.add_column('clients',
35 | sa.Column('portal_password_hash', sa.String(length=255), nullable=True)
36 | )
37 |
38 |
39 | def downgrade():
40 | """Remove client portal columns from clients table"""
41 |
42 | # Drop columns
43 | op.drop_index('ix_clients_portal_username', 'clients')
44 | op.drop_column('clients', 'portal_password_hash')
45 | op.drop_column('clients', 'portal_username')
46 | op.drop_column('clients', 'portal_enabled')
47 |
48 |
--------------------------------------------------------------------------------
/migrations/versions/047_add_client_portal_fields.py:
--------------------------------------------------------------------------------
1 | """Add client portal fields to users table
2 |
3 | Revision ID: 047
4 | Revises: 046
5 | Create Date: 2025-01-23
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '047'
14 | down_revision = '046'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | """Add client_portal_enabled and client_id columns to users table"""
21 |
22 | # Add client_portal_enabled column
23 | op.add_column('users',
24 | sa.Column('client_portal_enabled', sa.Boolean(), nullable=False, server_default='0')
25 | )
26 |
27 | # Add client_id column with foreign key
28 | op.add_column('users',
29 | sa.Column('client_id', sa.Integer(), nullable=True)
30 | )
31 | op.create_index('ix_users_client_id', 'users', ['client_id'])
32 | op.create_foreign_key(
33 | 'fk_users_client_id',
34 | 'users', 'clients',
35 | ['client_id'], ['id'],
36 | ondelete='SET NULL'
37 | )
38 |
39 |
40 | def downgrade():
41 | """Remove client_portal_enabled and client_id columns from users table"""
42 |
43 | # Drop foreign key and index
44 | op.drop_constraint('fk_users_client_id', 'users', type_='foreignkey')
45 | op.drop_index('ix_users_client_id', 'users')
46 |
47 | # Drop columns
48 | op.drop_column('users', 'client_id')
49 | op.drop_column('users', 'client_portal_enabled')
50 |
51 |
--------------------------------------------------------------------------------
/tests/test_api_audit_activities_v1.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from app import create_app, db
4 | from app.models import User, ApiToken
5 |
6 |
7 | @pytest.fixture
8 | def app():
9 | app = create_app(
10 | {
11 | "TESTING": True,
12 | "SQLALCHEMY_DATABASE_URI": "sqlite:///test_api_audit_activities.sqlite",
13 | "WTF_CSRF_ENABLED": False,
14 | }
15 | )
16 | with app.app_context():
17 | db.create_all()
18 | yield app
19 | db.session.remove()
20 | db.drop_all()
21 |
22 |
23 | @pytest.fixture
24 | def client(app):
25 | return app.test_client()
26 |
27 |
28 | @pytest.fixture
29 | def admin_user(app):
30 | u = User(username="admin", email="admin@example.com", role="admin")
31 | u.is_active = True
32 | db.session.add(u)
33 | db.session.commit()
34 | return u
35 |
36 |
37 | @pytest.fixture
38 | def admin_token(app, admin_user):
39 | token, plain = ApiToken.create_token(user_id=admin_user.id, name="Admin Token", scopes="admin:all,read:reports")
40 | db.session.add(token)
41 | db.session.commit()
42 | return plain
43 |
44 |
45 | def _auth(t):
46 | return {"Authorization": f"Bearer {t}", "Content-Type": "application/json"}
47 |
48 |
49 | def test_audit_and_activities_list(client, admin_token):
50 | r = client.get("/api/v1/audit-logs", headers=_auth(admin_token))
51 | assert r.status_code == 200
52 | r = client.get("/api/v1/activities", headers=_auth(admin_token))
53 | assert r.status_code == 200
54 |
--------------------------------------------------------------------------------
/migrations/versions/054_add_quote_comments.py:
--------------------------------------------------------------------------------
1 | """Add quote support to comments table
2 |
3 | Revision ID: 054
4 | Revises: 053
5 | Create Date: 2025-01-27
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = '054'
13 | down_revision = '053'
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade():
19 | """Add quote_id and is_internal fields to comments table"""
20 | # Add quote_id column
21 | op.add_column('comments',
22 | sa.Column('quote_id', sa.Integer(), nullable=True)
23 | )
24 |
25 | # Add is_internal column (True = internal team comment, False = client-visible)
26 | op.add_column('comments',
27 | sa.Column('is_internal', sa.Boolean(), nullable=False, server_default='true')
28 | )
29 |
30 | # Create index on quote_id
31 | op.create_index('ix_comments_quote_id', 'comments', ['quote_id'], unique=False)
32 |
33 | # Add foreign key constraint
34 | op.create_foreign_key('fk_comments_quote_id', 'comments', 'quotes', ['quote_id'], ['id'], ondelete='CASCADE')
35 |
36 |
37 | def downgrade():
38 | """Remove quote support from comments table"""
39 | # Drop foreign key
40 | op.drop_constraint('fk_comments_quote_id', 'comments', type_='foreignkey')
41 |
42 | # Drop index
43 | op.drop_index('ix_comments_quote_id', 'comments')
44 |
45 | # Drop columns
46 | op.drop_column('comments', 'is_internal')
47 | op.drop_column('comments', 'quote_id')
48 |
49 |
--------------------------------------------------------------------------------
/migrations/versions/064_add_kiosk_mode_settings.py:
--------------------------------------------------------------------------------
1 | """Add kiosk mode settings
2 |
3 | Revision ID: 064
4 | Revises: 063
5 | Create Date: 2025-01-27
6 |
7 | This migration adds kiosk mode settings to the settings table.
8 | """
9 | from alembic import op
10 | import sqlalchemy as sa
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '064'
14 | down_revision = '063'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | """Add kiosk mode settings"""
21 | # Add kiosk mode settings columns
22 | op.add_column('settings', sa.Column('kiosk_mode_enabled', sa.Boolean(), nullable=False, server_default='0'))
23 | op.add_column('settings', sa.Column('kiosk_auto_logout_minutes', sa.Integer(), nullable=False, server_default='15'))
24 | op.add_column('settings', sa.Column('kiosk_allow_camera_scanning', sa.Boolean(), nullable=False, server_default='1'))
25 | op.add_column('settings', sa.Column('kiosk_require_reason_for_adjustments', sa.Boolean(), nullable=False, server_default='0'))
26 | op.add_column('settings', sa.Column('kiosk_default_movement_type', sa.String(20), nullable=False, server_default='adjustment'))
27 |
28 |
29 | def downgrade():
30 | """Remove kiosk mode settings"""
31 | op.drop_column('settings', 'kiosk_default_movement_type')
32 | op.drop_column('settings', 'kiosk_require_reason_for_adjustments')
33 | op.drop_column('settings', 'kiosk_allow_camera_scanning')
34 | op.drop_column('settings', 'kiosk_auto_logout_minutes')
35 | op.drop_column('settings', 'kiosk_mode_enabled')
36 |
37 |
--------------------------------------------------------------------------------
/migrations/versions/022_add_project_code_field.py:
--------------------------------------------------------------------------------
1 | """Add short project code field for compact identifiers
2 |
3 | Revision ID: 023
4 | Revises: 022
5 | Create Date: 2025-10-23 00:00:00
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '023'
14 | down_revision = '022'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | bind = op.get_bind()
21 | dialect_name = bind.dialect.name if bind else 'generic'
22 |
23 | # Add code column if not present
24 | with op.batch_alter_table('projects') as batch_op:
25 | batch_op.add_column(sa.Column('code', sa.String(length=20), nullable=True))
26 | try:
27 | batch_op.create_unique_constraint('uq_projects_code', ['code'])
28 | except Exception:
29 | # Some dialects may not support unique with NULLs the same way; ignore if exists
30 | pass
31 | try:
32 | batch_op.create_index('ix_projects_code', ['code'])
33 | except Exception:
34 | pass
35 |
36 |
37 | def downgrade():
38 | with op.batch_alter_table('projects') as batch_op:
39 | try:
40 | batch_op.drop_index('ix_projects_code')
41 | except Exception:
42 | pass
43 | try:
44 | batch_op.drop_constraint('uq_projects_code', type_='unique')
45 | except Exception:
46 | pass
47 | try:
48 | batch_op.drop_column('code')
49 | except Exception:
50 | pass
51 |
52 |
53 |
--------------------------------------------------------------------------------
/migrations/versions/052_add_quote_discount_fields.py:
--------------------------------------------------------------------------------
1 | """Add discount fields to quotes
2 |
3 | Revision ID: 052
4 | Revises: 051
5 | Create Date: 2025-01-27
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | from sqlalchemy.dialects import postgresql
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '052'
14 | down_revision = '051'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | """Add discount fields to quotes table"""
21 | # Add discount fields
22 | op.add_column('quotes',
23 | sa.Column('discount_type', sa.String(length=20), nullable=True)
24 | )
25 | op.add_column('quotes',
26 | sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=True, server_default='0')
27 | )
28 | op.add_column('quotes',
29 | sa.Column('discount_reason', sa.String(length=500), nullable=True)
30 | )
31 | op.add_column('quotes',
32 | sa.Column('coupon_code', sa.String(length=50), nullable=True)
33 | )
34 |
35 | # Create index on coupon_code for faster lookups
36 | op.create_index('ix_quotes_coupon_code', 'quotes', ['coupon_code'], unique=False)
37 |
38 |
39 | def downgrade():
40 | """Remove discount fields from quotes table"""
41 | # Drop index
42 | op.drop_index('ix_quotes_coupon_code', 'quotes')
43 |
44 | # Drop columns
45 | op.drop_column('quotes', 'coupon_code')
46 | op.drop_column('quotes', 'discount_reason')
47 | op.drop_column('quotes', 'discount_amount')
48 | op.drop_column('quotes', 'discount_type')
49 |
50 |
--------------------------------------------------------------------------------
/app/services/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Service layer for business logic.
3 | This layer contains business logic that was previously in routes and models.
4 | """
5 |
6 | from .time_tracking_service import TimeTrackingService
7 | from .project_service import ProjectService
8 | from .invoice_service import InvoiceService
9 | from .notification_service import NotificationService
10 | from .task_service import TaskService
11 | from .expense_service import ExpenseService
12 | from .client_service import ClientService
13 | from .reporting_service import ReportingService
14 | from .analytics_service import AnalyticsService
15 | from .payment_service import PaymentService
16 | from .quote_service import QuoteService
17 | from .comment_service import CommentService
18 | from .user_service import UserService
19 | from .export_service import ExportService
20 | from .import_service import ImportService
21 | from .email_service import EmailService
22 | from .permission_service import PermissionService
23 | from .backup_service import BackupService
24 | from .health_service import HealthService
25 |
26 | __all__ = [
27 | "TimeTrackingService",
28 | "ProjectService",
29 | "InvoiceService",
30 | "NotificationService",
31 | "TaskService",
32 | "ExpenseService",
33 | "ClientService",
34 | "ReportingService",
35 | "AnalyticsService",
36 | "PaymentService",
37 | "QuoteService",
38 | "CommentService",
39 | "UserService",
40 | "ExportService",
41 | "ImportService",
42 | "EmailService",
43 | "PermissionService",
44 | "BackupService",
45 | "HealthService",
46 | ]
47 |
--------------------------------------------------------------------------------
/migrations/versions/012_add_pdf_template_fields.py:
--------------------------------------------------------------------------------
1 | """add pdf template fields to settings
2 |
3 | Revision ID: 012
4 | Revises: 011
5 | Create Date: 2025-09-12 00:00:00
6 | """
7 |
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '012'
14 | down_revision = '011'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade() -> None:
20 | bind = op.get_bind()
21 | inspector = sa.inspect(bind)
22 | if 'settings' not in inspector.get_table_names():
23 | return
24 | columns = {c['name'] for c in inspector.get_columns('settings')}
25 | if 'invoice_pdf_template_html' not in columns:
26 | op.add_column('settings', sa.Column('invoice_pdf_template_html', sa.Text(), nullable=True))
27 | if 'invoice_pdf_template_css' not in columns:
28 | op.add_column('settings', sa.Column('invoice_pdf_template_css', sa.Text(), nullable=True))
29 |
30 |
31 | def downgrade() -> None:
32 | bind = op.get_bind()
33 | inspector = sa.inspect(bind)
34 | if 'settings' not in inspector.get_table_names():
35 | return
36 | columns = {c['name'] for c in inspector.get_columns('settings')}
37 | if 'invoice_pdf_template_css' in columns:
38 | try:
39 | op.drop_column('settings', 'invoice_pdf_template_css')
40 | except Exception:
41 | pass
42 | if 'invoice_pdf_template_html' in columns:
43 | try:
44 | op.drop_column('settings', 'invoice_pdf_template_html')
45 | except Exception:
46 | pass
47 |
48 |
49 |
--------------------------------------------------------------------------------
/scripts/version-manager.ps1:
--------------------------------------------------------------------------------
1 | # Version Manager for TimeTracker - PowerShell Wrapper
2 |
3 | param(
4 | [Parameter(Position=0)]
5 | [string]$Action,
6 |
7 | [Parameter(Position=1)]
8 | [string]$Version,
9 |
10 | [Parameter(Position=2)]
11 | [string]$Message,
12 |
13 | [int]$BuildNumber,
14 | [switch]$NoPush,
15 | [string]$Tag
16 | )
17 |
18 | if (-not $Action) {
19 | Write-Host "Usage: .\version-manager.ps1 [action] [options]"
20 | Write-Host ""
21 | Write-Host "Actions:"
22 | Write-Host " tag [version] [message] - Create a version tag"
23 | Write-Host " build [number] - Create a build tag"
24 | Write-Host " list - List all tags"
25 | Write-Host " info [tag] - Show tag information"
26 | Write-Host " status - Show current status"
27 | Write-Host " suggest - Suggest next version"
28 | Write-Host ""
29 | Write-Host "Examples:"
30 | Write-Host " .\version-manager.ps1 tag v1.2.3 'Release 1.2.3'"
31 | Write-Host " .\version-manager.ps1 build 123"
32 | Write-Host " .\version-manager.ps1 status"
33 | Write-Host ""
34 | exit 1
35 | }
36 |
37 | # Build arguments for Python script
38 | $args = @($Action)
39 |
40 | if ($Version) { $args += "--version", $Version }
41 | if ($Message) { $args += "--message", $Message }
42 | if ($BuildNumber) { $args += "--build-number", $BuildNumber }
43 | if ($NoPush) { $args += "--no-push" }
44 | if ($Tag) { $args += "--tag", $Tag }
45 |
46 | # Run the Python script
47 | python scripts/version-manager.py @args
48 |
--------------------------------------------------------------------------------
/app/models/saved_filter.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from app import db
3 |
4 |
5 | class SavedFilter(db.Model):
6 | """User-defined saved filters for reuse across views.
7 |
8 | Stores JSON payload with supported keys like project_id, user_id, date ranges,
9 | tags, billable, status, etc.
10 | """
11 |
12 | __tablename__ = "saved_filters"
13 |
14 | id = db.Column(db.Integer, primary_key=True)
15 | user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False, index=True)
16 | name = db.Column(db.String(200), nullable=False)
17 | scope = db.Column(db.String(50), nullable=False, default="global") # e.g., 'time', 'projects', 'tasks', 'reports'
18 | payload = db.Column(db.JSON, nullable=False, default={})
19 |
20 | is_shared = db.Column(db.Boolean, nullable=False, default=False)
21 |
22 | created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
23 | updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
24 |
25 | __table_args__ = (db.UniqueConstraint("user_id", "name", "scope", name="ux_saved_filter_user_name_scope"),)
26 |
27 | def to_dict(self):
28 | return {
29 | "id": self.id,
30 | "user_id": self.user_id,
31 | "name": self.name,
32 | "scope": self.scope,
33 | "payload": self.payload,
34 | "is_shared": self.is_shared,
35 | "created_at": self.created_at.isoformat() if self.created_at else None,
36 | "updated_at": self.updated_at.isoformat() if self.updated_at else None,
37 | }
38 |
--------------------------------------------------------------------------------
/docs/admin/README.md:
--------------------------------------------------------------------------------
1 | # Administrator Documentation
2 |
3 | Complete guides for TimeTracker administrators.
4 |
5 | ## 📖 Quick Links
6 |
7 | ### Configuration
8 | - **[Docker Compose Setup](configuration/DOCKER_COMPOSE_SETUP.md)** - Docker deployment guide
9 | - **[Docker Public Setup](configuration/DOCKER_PUBLIC_SETUP.md)** - Production deployment
10 | - **[Docker Startup Troubleshooting](configuration/DOCKER_STARTUP_TROUBLESHOOTING.md)** - Fix startup issues
11 | - **[Email Configuration](configuration/EMAIL_CONFIGURATION.md)** - Email setup
12 | - **[OIDC Setup](configuration/OIDC_SETUP.md)** - OIDC/SSO authentication setup
13 |
14 | ### Deployment
15 | - **[Version Management](deployment/VERSION_MANAGEMENT.md)** - Managing versions
16 | - **[Release Process](deployment/RELEASE_PROCESS.md)** - Release workflow
17 | - **[Official Builds](deployment/OFFICIAL_BUILDS.md)** - Official build information
18 |
19 | ### Security
20 | - See [security/](security/) for security documentation
21 |
22 | ### Monitoring
23 | - See [monitoring/](monitoring/) for monitoring and analytics setup
24 |
25 | ## 🔧 Common Tasks
26 |
27 | 1. **Initial Setup**: Start with [Docker Compose Setup](configuration/DOCKER_COMPOSE_SETUP.md)
28 | 2. **Configure Email**: See [Email Configuration](configuration/EMAIL_CONFIGURATION.md)
29 | 3. **Set Up OIDC/SSO**: Follow [OIDC Setup](configuration/OIDC_SETUP.md)
30 | 4. **Monitor System**: Check [monitoring/](monitoring/) documentation
31 |
32 | ## 📚 Related Documentation
33 |
34 | - **[Main Documentation Index](../README.md)** - Complete documentation overview
35 | - **[User Guides](../guides/)** - User-facing guides
36 |
--------------------------------------------------------------------------------
/tests/test_api_invoice_templates_api_v1.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from app import create_app, db
4 | from app.models import User, ApiToken
5 |
6 |
7 | @pytest.fixture
8 | def app():
9 | app = create_app(
10 | {
11 | "TESTING": True,
12 | "SQLALCHEMY_DATABASE_URI": "sqlite:///test_api_invoice_templates.sqlite",
13 | "WTF_CSRF_ENABLED": False,
14 | }
15 | )
16 | with app.app_context():
17 | db.create_all()
18 | yield app
19 | db.session.remove()
20 | db.drop_all()
21 |
22 |
23 | @pytest.fixture
24 | def client(app):
25 | return app.test_client()
26 |
27 |
28 | @pytest.fixture
29 | def admin_user(app):
30 | u = User(username="admin", email="admin@example.com", role="admin")
31 | u.is_active = True
32 | db.session.add(u)
33 | db.session.commit()
34 | return u
35 |
36 |
37 | @pytest.fixture
38 | def admin_token(app, admin_user):
39 | token, plain = ApiToken.create_token(user_id=admin_user.id, name="Admin Token", scopes="admin:all")
40 | db.session.add(token)
41 | db.session.commit()
42 | return plain
43 |
44 |
45 | def _auth(t):
46 | return {"Authorization": f"Bearer {t}", "Content-Type": "application/json"}
47 |
48 |
49 | def test_invoice_pdf_templates_list_and_get(client, admin_token):
50 | r = client.get("/api/v1/invoice-pdf-templates", headers=_auth(admin_token))
51 | assert r.status_code == 200
52 | # A4 default template is always available via get_template()
53 | r = client.get("/api/v1/invoice-pdf-templates/A4", headers=_auth(admin_token))
54 | assert r.status_code == 200
55 |
--------------------------------------------------------------------------------
/docs/QUICK_WINS_UI.md:
--------------------------------------------------------------------------------
1 | # UI Quick Wins (October 2025)
2 |
3 | This document summarizes the lightweight improvements applied to the new UI.
4 |
5 | ## What changed
6 |
7 | - Added minimal design tokens, button classes, focus ring utility, table helpers, and chips in `app/static/form-bridge.css`.
8 | - Added an accessible skip link and a main content anchor in `app/templates/base.html`.
9 | - Enhanced `app/templates/tasks/list.html` with sticky header treatment (CSS-only), zebra rows, and numeric alignment for date/progress columns.
10 | - Polished `app/templates/auth/login.html` with primary button styling and an inline user icon for the username field.
11 | - Added smoke tests in `tests/test_ui_quick_wins.py` to ensure presence of these enhancements.
12 |
13 | ## How to use
14 |
15 | - Buttons: use `btn btn-primary`, `btn btn-secondary`, or `btn btn-ghost`. Sizes: add `btn-sm` or `btn-lg`.
16 | - Focus: add `focus-ring` to any interactive element that needs a consistent visible focus.
17 | - Tables: add `table table-zebra` to tables; use `table-compact` for denser rows and `table-number` on numeric cells/headers.
18 | - Chips: use `chip` plus variant like `chip-neutral`, `chip-success`, `chip-warning`, `chip-danger`.
19 |
20 | ## Notes
21 |
22 | - The sticky header effect relies on `position: sticky` applied to the `` elements via `.table` class. Ensure the table is inside a scrolling container (already true for the list view wrapper).
23 | - Token values are minimal fallbacks; prefer Tailwind theme tokens when available. These helpers are safe to remove once the templates are fully converted to Tailwind component primitives.
24 |
25 |
26 |
--------------------------------------------------------------------------------
/docker/start.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Python startup script for TimeTracker
4 | This avoids any shell script issues and runs everything in Python
5 | """
6 |
7 | import os
8 | import sys
9 | import time
10 | import subprocess
11 |
12 | def main():
13 | print("=== Starting TimeTracker (Python Mode) ===")
14 |
15 | # Set environment
16 | os.environ['FLASK_APP'] = 'app'
17 | os.chdir('/app')
18 |
19 | print("Waiting for database to be ready...")
20 | time.sleep(5) # Simple wait
21 |
22 | print("Running SQL database initialization (for basic tables)...")
23 | try:
24 | subprocess.run([sys.executable, '/app/docker/init-database-sql.py'], check=True)
25 | print("SQL database initialization completed")
26 | except subprocess.CalledProcessError as e:
27 | print(f"SQL database initialization failed: {e}")
28 | sys.exit(1)
29 |
30 | print("Running main database initialization...")
31 | try:
32 | subprocess.run([sys.executable, '/app/docker/init-database.py'], check=True)
33 | print("Database initialization completed")
34 | except subprocess.CalledProcessError as e:
35 | print(f"Database initialization failed: {e}")
36 | sys.exit(1)
37 |
38 | print("Starting application...")
39 | # Start gunicorn
40 | os.execv('/usr/local/bin/gunicorn', [
41 | 'gunicorn',
42 | '--bind', '0.0.0.0:8080',
43 | '--worker-class', 'eventlet',
44 | '--workers', '1',
45 | '--timeout', '120',
46 | 'app:create_app()'
47 | ])
48 |
49 | if __name__ == '__main__':
50 | main()
51 |
--------------------------------------------------------------------------------
/migrations/versions/068_add_user_password_hash.py:
--------------------------------------------------------------------------------
1 | """Add password_hash to users table
2 |
3 | Revision ID: 068_add_user_password_hash
4 | Revises: 067_integration_credentials
5 | Create Date: 2025-01-27
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '068_add_user_password_hash'
14 | down_revision = '067_integration_credentials'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def _has_column(inspector, table_name: str, column_name: str) -> bool:
20 | """Check if a column exists in a table"""
21 | try:
22 | return column_name in [col['name'] for col in inspector.get_columns(table_name)]
23 | except Exception:
24 | return False
25 |
26 |
27 | def upgrade():
28 | """Add password_hash column to users table"""
29 | bind = op.get_bind()
30 | inspector = sa.inspect(bind)
31 |
32 | # Ensure users table exists
33 | if 'users' not in inspector.get_table_names():
34 | return
35 |
36 | # Add password_hash column if missing
37 | if not _has_column(inspector, 'users', 'password_hash'):
38 | op.add_column('users', sa.Column('password_hash', sa.String(length=255), nullable=True))
39 |
40 |
41 | def downgrade():
42 | """Remove password_hash column from users table"""
43 | bind = op.get_bind()
44 | inspector = sa.inspect(bind)
45 |
46 | if 'users' not in inspector.get_table_names():
47 | return
48 |
49 | # Drop password_hash column if exists
50 | if _has_column(inspector, 'users', 'password_hash'):
51 | op.drop_column('users', 'password_hash')
52 |
53 |
--------------------------------------------------------------------------------
/tests/test_time_entry_freeze.py:
--------------------------------------------------------------------------------
1 | """Tests demonstrating time-control with freezegun and model time calculations."""
2 |
3 | import datetime as dt
4 |
5 | import pytest
6 |
7 | from app import db
8 | from app.models import TimeEntry
9 | from factories import UserFactory, ProjectFactory
10 |
11 |
12 | @pytest.mark.unit
13 | def test_active_timer_duration_without_real_time(app, time_freezer):
14 | """Create a running timer at T0 and stop it at T0+90 minutes using time freezer."""
15 | freezer = time_freezer("2024-01-01 09:00:00")
16 | with app.app_context():
17 | user = UserFactory()
18 | project = ProjectFactory()
19 | entry = TimeEntry(
20 | user_id=user.id,
21 | project_id=project.id,
22 | start_time=dt.datetime(2024, 1, 1, 9, 0, 0),
23 | notes="Work session",
24 | source="auto",
25 | billable=True,
26 | )
27 | db.session.add(entry)
28 | db.session.commit()
29 |
30 | # Advance frozen time and compute duration deterministically without tz side-effects
31 | freezer.stop()
32 | freezer = time_freezer("2024-01-01 10:30:00")
33 | entry = db.session.get(TimeEntry, entry.id)
34 | entry.end_time = entry.start_time + dt.timedelta(minutes=90)
35 | entry.calculate_duration()
36 | db.session.commit()
37 |
38 | # Duration should be exactly 90 minutes = 5400 seconds (ROUNDING_MINUTES=1 in TestingConfig)
39 | db.session.refresh(entry)
40 | assert entry.duration_seconds == 5400
41 | assert entry.end_time.hour == 10
42 | assert entry.end_time.minute == 30
43 |
--------------------------------------------------------------------------------
/run_tests_individually.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Run each test file individually and report results"""
3 | import sys
4 | import os
5 | import subprocess
6 | from pathlib import Path
7 |
8 | # Add current directory to path
9 | sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
10 |
11 | # Get all test files
12 | test_dir = Path("tests")
13 | test_files = sorted(test_dir.glob("test_*.py"))
14 |
15 | print("=" * 70)
16 | print("Running TimeTracker Tests Individually")
17 | print("=" * 70)
18 | print()
19 |
20 | results = []
21 |
22 | for test_file in test_files:
23 | test_name = test_file.name
24 | print(f"\n{'='*70}")
25 | print(f"Testing: {test_name}")
26 | print(f"{'='*70}")
27 |
28 | # Run pytest for this specific file
29 | cmd = [
30 | sys.executable,
31 | "-m", "pytest",
32 | str(test_file),
33 | "-v",
34 | "--tb=line",
35 | "-x" # Stop on first failure
36 | ]
37 |
38 | result = subprocess.run(cmd, capture_output=False, text=True)
39 |
40 | status = "✓ PASSED" if result.returncode == 0 else "✗ FAILED"
41 | results.append((test_name, status, result.returncode))
42 | print(f"\nResult: {status} (exit code: {result.returncode})")
43 |
44 | print("\n\n" + "=" * 70)
45 | print("SUMMARY OF ALL TESTS")
46 | print("=" * 70)
47 | for test_name, status, code in results:
48 | print(f"{status:12} - {test_name}")
49 |
50 | passed = sum(1 for _, s, _ in results if "PASSED" in s)
51 | failed = sum(1 for _, s, _ in results if "FAILED" in s)
52 | print(f"\nTotal: {len(results)} test files | Passed: {passed} | Failed: {failed}")
53 | print("=" * 70)
54 |
55 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | # Core Flask dependencies
2 | Flask==3.0.0
3 | Flask-SQLAlchemy==3.1.1
4 | Flask-Migrate==4.0.5
5 | Flask-Login==0.6.3
6 | Flask-SocketIO==5.3.6
7 |
8 | # OAuth / OIDC
9 | Authlib==1.3.1
10 | PyJWT==2.8.0
11 |
12 | # Database
13 | SQLAlchemy==2.0.23
14 | alembic==1.13.1
15 | psycopg2-binary==2.9.9
16 |
17 | # Web server
18 | gunicorn==23.0.0
19 | eventlet==0.40.3
20 |
21 | # Security and forms
22 | Flask-WTF==1.2.1
23 | Flask-Limiter==3.8.0
24 |
25 | # Utilities
26 | python-dotenv==1.0.0
27 | pytz==2023.3
28 | python-dateutil==2.8.2
29 | Werkzeug==3.0.6
30 | requests==2.32.4
31 |
32 | # Email
33 | Flask-Mail==0.9.1
34 |
35 | # Excel export
36 | openpyxl==3.1.2
37 |
38 | # PDF Generation
39 | WeasyPrint==60.2
40 | pydyf==0.10.0
41 | Pillow==10.4.0
42 | reportlab==4.0.7
43 |
44 | # Background tasks
45 | APScheduler==3.10.4
46 |
47 | # Internationalization
48 | Flask-Babel==4.0.0
49 | Babel==2.14.0
50 |
51 | # Development and testing
52 | pytest==7.4.3
53 | pytest-flask==1.3.0
54 | pytest-cov==4.1.0
55 | coverage[toml]==7.4.0
56 | black==24.8.0
57 | flake8==6.1.0
58 |
59 | # Security
60 | cryptography==45.0.6
61 | markdown==3.6
62 | bleach==6.1.0
63 |
64 | # Analytics and Monitoring
65 | python-json-logger==2.0.7
66 | sentry-sdk==1.40.0
67 | prometheus-client==0.19.0
68 | posthog==3.1.0
69 |
70 | # API Documentation
71 | flask-swagger-ui==5.21.0
72 | apispec==6.3.0
73 | marshmallow==3.20.1
74 |
75 | # OCR for receipt scanning
76 | pytesseract==0.3.10
77 |
78 | # Payment Gateway Integration
79 | stripe==7.0.0
80 |
81 | # Calendar Integration
82 | google-api-python-client==2.100.0
83 | google-auth-httplib2==0.1.1
84 | google-auth-oauthlib==1.1.0
85 |
86 | # Redis for caching
87 | redis==5.0.1
88 | hiredis==2.2.3
--------------------------------------------------------------------------------
/app/schemas/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Schema/DTO layer for API serialization and validation.
3 | Uses Marshmallow for consistent API responses and input validation.
4 | """
5 |
6 | from .time_entry_schema import TimeEntrySchema, TimeEntryCreateSchema, TimeEntryUpdateSchema
7 | from .project_schema import ProjectSchema, ProjectCreateSchema, ProjectUpdateSchema
8 | from .invoice_schema import InvoiceSchema, InvoiceCreateSchema, InvoiceUpdateSchema
9 | from .task_schema import TaskSchema, TaskCreateSchema, TaskUpdateSchema
10 | from .expense_schema import ExpenseSchema, ExpenseCreateSchema, ExpenseUpdateSchema
11 | from .client_schema import ClientSchema, ClientCreateSchema, ClientUpdateSchema
12 | from .payment_schema import PaymentSchema, PaymentCreateSchema, PaymentUpdateSchema
13 | from .comment_schema import CommentSchema, CommentCreateSchema, CommentUpdateSchema
14 | from .user_schema import UserSchema, UserCreateSchema, UserUpdateSchema
15 |
16 | __all__ = [
17 | "TimeEntrySchema",
18 | "TimeEntryCreateSchema",
19 | "TimeEntryUpdateSchema",
20 | "ProjectSchema",
21 | "ProjectCreateSchema",
22 | "ProjectUpdateSchema",
23 | "InvoiceSchema",
24 | "InvoiceCreateSchema",
25 | "InvoiceUpdateSchema",
26 | "TaskSchema",
27 | "TaskCreateSchema",
28 | "TaskUpdateSchema",
29 | "ExpenseSchema",
30 | "ExpenseCreateSchema",
31 | "ExpenseUpdateSchema",
32 | "ClientSchema",
33 | "ClientCreateSchema",
34 | "ClientUpdateSchema",
35 | "PaymentSchema",
36 | "PaymentCreateSchema",
37 | "PaymentUpdateSchema",
38 | "CommentSchema",
39 | "CommentCreateSchema",
40 | "CommentUpdateSchema",
41 | "UserSchema",
42 | "UserCreateSchema",
43 | "UserUpdateSchema",
44 | ]
45 |
--------------------------------------------------------------------------------
/assets/README.md:
--------------------------------------------------------------------------------
1 | # Assets Directory
2 |
3 | This directory contains static assets for the TimeTracker GitHub Pages website.
4 |
5 | ## Files to Add
6 |
7 | ### Required Assets
8 | - `favicon.ico` - Website favicon (16x16 or 32x32 pixels)
9 | - `og-image.png` - Open Graph image for social media sharing (1200x630 pixels recommended)
10 |
11 | ### Optional Assets
12 | - `logo.png` - Project logo (various sizes: 32x32, 64x64, 128x128, 256x256)
13 | - `screenshots/` - Directory for application screenshots
14 | - `icons/` - Additional icon files
15 |
16 | ## Image Guidelines
17 |
18 | ### Favicon
19 | - Format: ICO or PNG
20 | - Size: 16x16, 32x32, or 48x48 pixels
21 | - Should be simple and recognizable
22 |
23 | ### Open Graph Image
24 | - Format: PNG or JPG
25 | - Size: 1200x630 pixels (1.91:1 aspect ratio)
26 | - Should include project name and key visual elements
27 | - Text should be readable at small sizes
28 |
29 | ### Screenshots
30 | - Format: PNG or JPG
31 | - Size: Minimum 800x600 pixels
32 | - Should showcase key features of the application
33 | - Include descriptive filenames
34 |
35 | ## Current Status
36 | - ✅ `README.md` - This file
37 | - ✅ `screenshots/` - Added 3 application screenshots
38 | - ❌ `favicon.ico` - Need to create
39 | - ❌ `og-image.png` - Need to create
40 |
41 | ## Creating Assets
42 |
43 | ### Favicon
44 | You can create a simple favicon using online tools like:
45 | - [Favicon.io](https://favicon.io/)
46 | - [RealFaviconGenerator](https://realfavicongenerator.net/)
47 |
48 | ### Open Graph Image
49 | Create using design tools like:
50 | - Canva
51 | - Figma
52 | - GIMP
53 | - Photoshop
54 |
55 | Or use online tools like:
56 | - [Canva](https://canva.com/)
57 | - [Bannerbear](https://bannerbear.com/)
58 |
--------------------------------------------------------------------------------
/docs/features/KEYBOARD_SHORTCUTS_README.md:
--------------------------------------------------------------------------------
1 | # Keyboard Shortcuts - Quick Reference
2 |
3 | ## 🎯 Quick Access
4 |
5 | - **View All Shortcuts**: Press `Shift+?`
6 | - **Command Palette**: Press `Ctrl+K` or `Cmd+K`
7 | - **Settings**: Navigate to Settings → Keyboard Shortcuts
8 |
9 | ## 📋 Most Used Shortcuts
10 |
11 | ### Navigation (Vim-style)
12 | | Keys | Action |
13 | |------|--------|
14 | | `g` `d` | Dashboard |
15 | | `g` `p` | Projects |
16 | | `g` `t` | Tasks |
17 | | `g` `r` | Reports |
18 |
19 | ### Quick Actions
20 | | Keys | Action |
21 | |------|--------|
22 | | `Ctrl+K` | Command Palette |
23 | | `Ctrl+/` | Search |
24 | | `Shift+?` | Show All Shortcuts |
25 |
26 | ### Timer Control
27 | | Keys | Action |
28 | |------|--------|
29 | | `t` `s` | Start Timer |
30 | | `t` `p` | Stop Timer |
31 | | `t` `l` | Log Time |
32 |
33 | ## 📚 Full Documentation
34 |
35 | - **User Guide**: [KEYBOARD_SHORTCUTS_ENHANCED.md](./KEYBOARD_SHORTCUTS_ENHANCED.md)
36 | - **Implementation Guide**: [../KEYBOARD_SHORTCUTS_IMPLEMENTATION.md](../KEYBOARD_SHORTCUTS_IMPLEMENTATION.md)
37 | - **Summary**: [../../KEYBOARD_SHORTCUTS_SUMMARY.md](../../KEYBOARD_SHORTCUTS_SUMMARY.md)
38 |
39 | ## 🎨 Features
40 |
41 | ✅ 50+ keyboard shortcuts
42 | ✅ Context-aware (table, form, modal)
43 | ✅ Visual cheat sheet
44 | ✅ Usage statistics
45 | ✅ Full customization
46 | ✅ Accessible (WCAG 2.1 AA)
47 | ✅ Dark mode support
48 | ✅ Print-friendly
49 |
50 | ## 🚀 Getting Started
51 |
52 | 1. Press `Shift+?` to see all available shortcuts
53 | 2. Try navigation: `g` then `d` for Dashboard
54 | 3. Open command palette: `Ctrl+K`
55 | 4. Customize in Settings → Keyboard Shortcuts
56 |
57 | ---
58 |
59 | **Version**: 2.0 | **Status**: ✅ Production Ready
60 |
61 |
--------------------------------------------------------------------------------
/tests/test_tasks_filters_ui.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 |
4 | @pytest.mark.smoke
5 | def test_task_view_renders_markdown(app, client, task, authenticated_client):
6 | # Arrange: give the task a markdown description
7 | from app import db
8 |
9 | task.description = "# Heading\n\n**Bold** and _italic_."
10 | db.session.commit()
11 |
12 | # Act
13 | resp = authenticated_client.get(f"/tasks/{task.id}")
14 |
15 | # Assert: the rendered HTML should include tags produced by markdown filter
16 | assert resp.status_code == 200
17 | html = resp.get_data(as_text=True)
18 | assert "" in html or "" in html
19 | assert "" in html or "" in html
20 |
21 |
22 | @pytest.mark.smoke
23 | def test_project_view_renders_markdown(app, client, project, admin_authenticated_client):
24 | from app import db
25 |
26 | project.description = "Intro with a list:\n\n- item one\n- item two"
27 | db.session.commit()
28 |
29 | resp = admin_authenticated_client.get(f"/projects/{project.id}")
30 | assert resp.status_code == 200
31 | html = resp.get_data(as_text=True)
32 | # Look for list markup from markdown
33 | assert "" in html and "- " in html
34 |
35 |
36 | import pytest
37 |
38 |
39 | @pytest.mark.unit
40 | @pytest.mark.routes
41 | @pytest.mark.smoke
42 | def test_tasks_filters_collapsible_ui(authenticated_client):
43 | resp = authenticated_client.get("/tasks")
44 | assert resp.status_code == 200
45 | html = resp.get_data(as_text=True)
46 | assert 'id="toggleFilters"' in html
47 | assert 'id="filterBody"' in html
48 | assert 'id="filterToggleIcon"' in html
49 | # Ensure localStorage key is referenced (persisted visibility)
50 | assert "taskListFiltersVisible" in html
51 |
--------------------------------------------------------------------------------
/app/schemas/comment_schema.py:
--------------------------------------------------------------------------------
1 | """
2 | Schemas for comment serialization and validation.
3 | """
4 |
5 | from marshmallow import Schema, fields, validate
6 |
7 |
8 | class CommentSchema(Schema):
9 | """Schema for comment serialization"""
10 |
11 | id = fields.Int(dump_only=True)
12 | content = fields.Str(required=True, validate=validate.Length(min=1, max=5000))
13 | project_id = fields.Int(allow_none=True)
14 | task_id = fields.Int(allow_none=True)
15 | quote_id = fields.Int(allow_none=True)
16 | user_id = fields.Int(required=True)
17 | is_internal = fields.Bool(missing=True)
18 | parent_id = fields.Int(allow_none=True)
19 | created_at = fields.DateTime(dump_only=True)
20 | updated_at = fields.DateTime(dump_only=True)
21 |
22 | # Nested fields
23 | author = fields.Nested("UserSchema", dump_only=True, allow_none=True)
24 | project = fields.Nested("ProjectSchema", dump_only=True, allow_none=True)
25 | task = fields.Nested("TaskSchema", dump_only=True, allow_none=True)
26 | replies = fields.Nested("CommentSchema", many=True, dump_only=True, allow_none=True)
27 |
28 |
29 | class CommentCreateSchema(Schema):
30 | """Schema for creating a comment"""
31 |
32 | content = fields.Str(required=True, validate=validate.Length(min=1, max=5000))
33 | project_id = fields.Int(allow_none=True)
34 | task_id = fields.Int(allow_none=True)
35 | quote_id = fields.Int(allow_none=True)
36 | parent_id = fields.Int(allow_none=True)
37 | is_internal = fields.Bool(missing=True)
38 |
39 |
40 | class CommentUpdateSchema(Schema):
41 | """Schema for updating a comment"""
42 |
43 | content = fields.Str(allow_none=True, validate=validate.Length(min=1, max=5000))
44 | is_internal = fields.Bool(allow_none=True)
45 |
--------------------------------------------------------------------------------
/migrations/versions/074_add_password_change_required.py:
--------------------------------------------------------------------------------
1 | """Add password_change_required to users table
2 |
3 | Revision ID: 074_password_change_required
4 | Revises: 073_ai_features_gps_tracking
5 | Create Date: 2025-01-27
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '074_password_change_required'
14 | down_revision = '073_ai_features_gps_tracking'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def _has_column(inspector, table_name: str, column_name: str) -> bool:
20 | """Check if a column exists in a table"""
21 | try:
22 | return column_name in [col['name'] for col in inspector.get_columns(table_name)]
23 | except Exception:
24 | return False
25 |
26 |
27 | def upgrade():
28 | """Add password_change_required column to users table"""
29 | bind = op.get_bind()
30 | inspector = sa.inspect(bind)
31 |
32 | # Ensure users table exists
33 | if 'users' not in inspector.get_table_names():
34 | return
35 |
36 | # Add password_change_required column if missing
37 | if not _has_column(inspector, 'users', 'password_change_required'):
38 | op.add_column('users', sa.Column('password_change_required', sa.Boolean(), nullable=False, server_default='false'))
39 |
40 |
41 | def downgrade():
42 | """Remove password_change_required column from users table"""
43 | bind = op.get_bind()
44 | inspector = sa.inspect(bind)
45 |
46 | if 'users' not in inspector.get_table_names():
47 | return
48 |
49 | # Drop password_change_required column if exists
50 | if _has_column(inspector, 'users', 'password_change_required'):
51 | op.drop_column('users', 'password_change_required')
52 |
53 |
--------------------------------------------------------------------------------
/app/templates/reports/summary.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% from "components/cards.html" import info_card %}
3 |
4 | {% block content %}
5 |
12 |
13 |
14 | {{ info_card("Today's Hours", "%.2f"|format(today_hours), "Logged today") }}
15 | {{ info_card("Week's Hours", "%.2f"|format(week_hours), "Logged this week") }}
16 | {{ info_card("Month's Hours", "%.2f"|format(month_hours), "Logged this month") }}
17 |
18 |
19 |
20 | Top Projects (Last 30 Days)
21 |
22 |
23 |
24 | | Project |
25 | Total Hours |
26 |
27 |
28 |
29 | {% for stat in project_stats %}
30 |
31 | | {{ stat.project.name }} |
32 | {{ "%.2f"|format(stat.hours) }} |
33 |
34 | {% else %}
35 |
36 | | No project data for the last 30 days. |
37 |
38 | {% endfor %}
39 |
40 |
41 |
42 | {% endblock %}
43 |
--------------------------------------------------------------------------------
/app/templates/errors/400.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block title %}{{ _('400 Bad Request') }} - {{ app_name }}{% endblock %}
4 |
5 | {% block content %}
6 |
7 |
8 |
9 |
10 |
15 |
16 | {{ _('Invalid Request') }}
17 |
18 | {{ _('The request you made is invalid or contains errors. This could be due to:') }}
19 |
20 |
21 | - {{ _('Missing or invalid form data') }}
22 | - {{ _('Malformed request parameters') }}
23 |
24 |
32 |
33 |
34 |
35 |
36 |
37 | {% endblock %}
38 |
--------------------------------------------------------------------------------
/migrations/versions/006_add_logo_and_task_timestamps.py:
--------------------------------------------------------------------------------
1 | """add company_logo_filename to settings and started/completed to tasks
2 |
3 | Revision ID: 006
4 | Revises: 005
5 | Create Date: 2025-09-08 16:41:00
6 | """
7 |
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '006'
14 | down_revision = '005'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade() -> None:
20 | bind = op.get_bind()
21 | inspector = sa.inspect(bind)
22 |
23 | settings_cols = {c['name'] for c in inspector.get_columns('settings')}
24 | tasks_cols = {c['name'] for c in inspector.get_columns('tasks')}
25 |
26 | # Add company_logo_filename to settings if missing
27 | if 'company_logo_filename' not in settings_cols:
28 | with op.batch_alter_table('settings') as batch_op:
29 | batch_op.add_column(sa.Column('company_logo_filename', sa.String(length=255), nullable=True, server_default=''))
30 |
31 | # Add started_at and completed_at to tasks if missing
32 | add_task_cols = []
33 | if 'started_at' not in tasks_cols:
34 | add_task_cols.append(sa.Column('started_at', sa.DateTime(), nullable=True))
35 | if 'completed_at' not in tasks_cols:
36 | add_task_cols.append(sa.Column('completed_at', sa.DateTime(), nullable=True))
37 | if add_task_cols:
38 | with op.batch_alter_table('tasks') as batch_op:
39 | for col in add_task_cols:
40 | batch_op.add_column(col)
41 |
42 |
43 | def downgrade() -> None:
44 | with op.batch_alter_table('tasks') as batch_op:
45 | batch_op.drop_column('completed_at')
46 | batch_op.drop_column('started_at')
47 |
48 | with op.batch_alter_table('settings') as batch_op:
49 | batch_op.drop_column('company_logo_filename')
50 |
51 |
52 |
--------------------------------------------------------------------------------
/app/integrations/registry.py:
--------------------------------------------------------------------------------
1 | """
2 | Integration connector registry.
3 | Registers all available connectors with the IntegrationService.
4 | """
5 |
6 | from app.services.integration_service import IntegrationService
7 | from app.integrations.jira import JiraConnector
8 | from app.integrations.slack import SlackConnector
9 | from app.integrations.github import GitHubConnector
10 | from app.integrations.google_calendar import GoogleCalendarConnector
11 | from app.integrations.outlook_calendar import OutlookCalendarConnector
12 | from app.integrations.microsoft_teams import MicrosoftTeamsConnector
13 | from app.integrations.asana import AsanaConnector
14 | from app.integrations.trello import TrelloConnector
15 | from app.integrations.gitlab import GitLabConnector
16 | from app.integrations.quickbooks import QuickBooksConnector
17 | from app.integrations.xero import XeroConnector
18 |
19 |
20 | def register_connectors():
21 | """Register all available connectors."""
22 | IntegrationService.register_connector("jira", JiraConnector)
23 | IntegrationService.register_connector("slack", SlackConnector)
24 | IntegrationService.register_connector("github", GitHubConnector)
25 | IntegrationService.register_connector("google_calendar", GoogleCalendarConnector)
26 | IntegrationService.register_connector("outlook_calendar", OutlookCalendarConnector)
27 | IntegrationService.register_connector("microsoft_teams", MicrosoftTeamsConnector)
28 | IntegrationService.register_connector("asana", AsanaConnector)
29 | IntegrationService.register_connector("trello", TrelloConnector)
30 | IntegrationService.register_connector("gitlab", GitLabConnector)
31 | IntegrationService.register_connector("quickbooks", QuickBooksConnector)
32 | IntegrationService.register_connector("xero", XeroConnector)
33 |
34 |
35 | # Auto-register on import
36 | register_connectors()
37 |
--------------------------------------------------------------------------------
/docs/features/RUN_BLACK_FORMATTING.md:
--------------------------------------------------------------------------------
1 | # Run Black Code Formatting
2 |
3 | ## Quick Fix
4 |
5 | Run ONE of these commands to fix all 44 files:
6 |
7 | ```bash
8 | # Option 1: Direct command
9 | black app/
10 |
11 | # Option 2: Via Python module
12 | python -m black app/
13 |
14 | # Option 3: Via Python launcher (Windows)
15 | py -m black app/
16 | ```
17 |
18 | ## Install Black (If Not Installed)
19 |
20 | ```bash
21 | # Using pip
22 | pip install black
23 |
24 | # Or add to requirements
25 | pip install black
26 | ```
27 |
28 | ## What Will Be Fixed
29 |
30 | Black will reformat these 44 files:
31 | - All files in `app/models/` (22 files)
32 | - All files in `app/routes/` (12 files)
33 | - All files in `app/utils/` (10 files)
34 | - `app/__init__.py`
35 | - `app/config.py`
36 |
37 | ## Verify Formatting
38 |
39 | ```bash
40 | # Check what would be changed (without changing)
41 | black --check app/
42 |
43 | # See diff of changes
44 | black --diff app/
45 |
46 | # Actually apply formatting
47 | black app/
48 | ```
49 |
50 | ## Expected Output
51 |
52 | ```
53 | reformatted app/models/__init__.py
54 | reformatted app/models/client.py
55 | ... (42 more files) ...
56 | All done! ✨ 🍰 ✨
57 | 44 files reformatted.
58 | ```
59 |
60 | ## Alternative: Format on Commit
61 |
62 | If you prefer, you can set up pre-commit hooks:
63 |
64 | ```bash
65 | # Install pre-commit
66 | pip install pre-commit
67 |
68 | # Create .pre-commit-config.yaml
69 | cat > .pre-commit-config.yaml << EOF
70 | repos:
71 | - repo: https://github.com/psf/black
72 | rev: 24.1.1
73 | hooks:
74 | - id: black
75 | language_version: python3.11
76 | EOF
77 |
78 | # Install hooks
79 | pre-commit install
80 |
81 | # Now Black will run automatically on git commit
82 | ```
83 |
84 | ## One-Line Fix
85 |
86 | ```bash
87 | pip install black && black app/
88 | ```
89 |
90 | That's it! 🎉
91 |
92 |
--------------------------------------------------------------------------------
/quick_test_summary.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Quick test summary - runs each test file and shows results"""
3 | import sys
4 | import os
5 | import subprocess
6 |
7 | sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
8 |
9 | test_files = [
10 | "test_basic.py",
11 | "test_analytics.py",
12 | "test_invoices.py",
13 | "test_models_comprehensive.py",
14 | "test_new_features.py",
15 | "test_routes.py",
16 | "test_security.py",
17 | "test_timezone.py"
18 | ]
19 |
20 | print("=" * 80)
21 | print("TIMETRACKER TEST SUMMARY")
22 | print("=" * 80)
23 |
24 | results = []
25 |
26 | for test_file in test_files:
27 | print(f"\nTesting: {test_file}...", end=" ", flush=True)
28 |
29 | cmd = [sys.executable, "-m", "pytest", f"tests/{test_file}", "-q", "--tb=no", "--no-header"]
30 | result = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
31 |
32 | # Parse output for pass/fail counts
33 | output = result.stdout + result.stderr
34 |
35 | if result.returncode == 0:
36 | status = "✓ ALL PASSED"
37 | elif result.returncode == 1:
38 | status = "✗ SOME FAILED"
39 | else:
40 | status = "⚠ ERROR"
41 |
42 | # Try to extract summary line
43 | summary_line = ""
44 | for line in output.split('\n'):
45 | if 'passed' in line.lower() or 'failed' in line.lower() or 'error' in line.lower():
46 | summary_line = line.strip()
47 | if summary_line:
48 | break
49 |
50 | results.append((test_file, status, summary_line))
51 | print(f"{status}")
52 | if summary_line:
53 | print(f" └─ {summary_line}")
54 |
55 | print("\n" + "=" * 80)
56 | print("FINAL SUMMARY")
57 | print("=" * 80)
58 |
59 | for test_file, status, summary in results:
60 | print(f"{status:15} {test_file}")
61 |
62 | print("=" * 80)
63 |
64 |
--------------------------------------------------------------------------------
/tests/smoke_test_prepaid_hours.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from datetime import datetime, date, timedelta
3 | from decimal import Decimal
4 |
5 | from app import db
6 | from app.models import Client, Project, Invoice, TimeEntry
7 | from factories import TimeEntryFactory, ClientFactory, ProjectFactory, InvoiceFactory
8 |
9 |
10 | @pytest.mark.smoke
11 | def test_prepaid_hours_summary_display(app, client, user):
12 | """Smoke test to ensure prepaid hours summary renders on generate-from-time page."""
13 | with client.session_transaction() as sess:
14 | sess["_user_id"] = str(user.id)
15 | sess["_fresh"] = True
16 |
17 | prepaid_client = ClientFactory(
18 | name="Smoke Prepaid", email="smoke@example.com", prepaid_hours_monthly=Decimal("50"), prepaid_reset_day=1
19 | )
20 | db.session.commit()
21 |
22 | project = ProjectFactory(
23 | name="Smoke Project", client_id=prepaid_client.id, billable=True, hourly_rate=Decimal("85.00")
24 | )
25 | db.session.commit()
26 |
27 | invoice = InvoiceFactory(
28 | invoice_number="INV-SMOKE-001",
29 | project_id=project.id,
30 | client_name=prepaid_client.name,
31 | client_id=prepaid_client.id,
32 | due_date=date.today() + timedelta(days=14),
33 | created_by=user.id,
34 | status="draft",
35 | )
36 | db.session.commit()
37 |
38 | start = datetime.utcnow() - timedelta(hours=5)
39 | end = datetime.utcnow()
40 | TimeEntryFactory(user_id=user.id, project_id=project.id, start_time=start, end_time=end, billable=True)
41 |
42 | response = client.get(f"/invoices/{invoice.id}/generate-from-time")
43 | assert response.status_code == 200
44 | html = response.get_data(as_text=True)
45 | assert "Prepaid Hours Overview" in html
46 | assert "Monthly Prepaid Hours" not in html # ensure we are on the summary, not the form
47 |
--------------------------------------------------------------------------------
/migrations/versions/004_add_task_activities_table.py:
--------------------------------------------------------------------------------
1 | """add task_activities table
2 |
3 | Revision ID: 004
4 | Revises: 003
5 | Create Date: 2025-09-07 10:35:00
6 | """
7 |
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '004'
14 | down_revision = '003'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade() -> None:
20 | op.create_table(
21 | 'task_activities',
22 | sa.Column('id', sa.Integer(), primary_key=True),
23 | sa.Column('task_id', sa.Integer(), sa.ForeignKey('tasks.id', ondelete='CASCADE'), nullable=False, index=True),
24 | sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id', ondelete='SET NULL'), nullable=True, index=True),
25 | sa.Column('event', sa.String(length=50), nullable=False, index=True),
26 | sa.Column('details', sa.Text(), nullable=True),
27 | sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
28 | )
29 |
30 | # Explicit indexes (in addition to inline index=True for portability)
31 | op.create_index('idx_task_activities_task_id', 'task_activities', ['task_id'])
32 | op.create_index('idx_task_activities_user_id', 'task_activities', ['user_id'])
33 | op.create_index('idx_task_activities_event', 'task_activities', ['event'])
34 | op.create_index('idx_task_activities_created_at', 'task_activities', ['created_at'])
35 |
36 |
37 | def downgrade() -> None:
38 | op.drop_index('idx_task_activities_created_at', table_name='task_activities')
39 | op.drop_index('idx_task_activities_event', table_name='task_activities')
40 | op.drop_index('idx_task_activities_user_id', table_name='task_activities')
41 | op.drop_index('idx_task_activities_task_id', table_name='task_activities')
42 | op.drop_table('task_activities')
43 |
44 |
45 |
--------------------------------------------------------------------------------
/docs/implementation-notes/IMPLEMENTATION_STATUS.md:
--------------------------------------------------------------------------------
1 | # Implementation Status - Complete
2 |
3 | **Date:** 2025-01-27
4 | **Status:** ✅ 100% COMPLETE
5 |
6 | ---
7 |
8 | ## 🎉 All Improvements Implemented!
9 |
10 | Every single improvement from the comprehensive analysis document has been successfully implemented.
11 |
12 | ---
13 |
14 | ## ✅ Complete Implementation List
15 |
16 | ### Architecture (100%)
17 | - ✅ Service Layer (9 services)
18 | - ✅ Repository Pattern (7 repositories)
19 | - ✅ Schema/DTO Layer (6 schemas)
20 | - ✅ Constants & Enums
21 | - ✅ Event Bus
22 | - ✅ Transaction Management
23 |
24 | ### Performance (100%)
25 | - ✅ Database Indexes (15+)
26 | - ✅ Query Optimization Utilities
27 | - ✅ N+1 Query Prevention
28 | - ✅ Caching Foundation
29 | - ✅ Performance Monitoring
30 |
31 | ### Quality (100%)
32 | - ✅ Input Validation
33 | - ✅ Error Handling
34 | - ✅ API Response Helpers
35 | - ✅ Security Improvements
36 | - ✅ CI/CD Pipeline
37 |
38 | ### Testing (100%)
39 | - ✅ Test Infrastructure
40 | - ✅ Example Unit Tests
41 | - ✅ Example Integration Tests
42 | - ✅ Testing Patterns
43 |
44 | ### Documentation (100%)
45 | - ✅ Comprehensive Analysis
46 | - ✅ Implementation Guides
47 | - ✅ Migration Guides
48 | - ✅ Quick Start Guides
49 | - ✅ API Documentation
50 | - ✅ Usage Examples
51 |
52 | ### Examples (100%)
53 | - ✅ Refactored Timer Routes
54 | - ✅ Refactored Invoice Routes
55 | - ✅ Refactored Project Routes
56 |
57 | ---
58 |
59 | ## 📊 Final Statistics
60 |
61 | - **Files Created:** 50+
62 | - **Lines of Code:** 4,500+
63 | - **Services:** 9
64 | - **Repositories:** 7
65 | - **Schemas:** 6
66 | - **Utilities:** 9
67 | - **Documentation:** 9 files
68 |
69 | ---
70 |
71 | ## 🚀 Ready for Production
72 |
73 | All code is:
74 | - ✅ Linter-clean
75 | - ✅ Well-documented
76 | - ✅ Test-ready
77 | - ✅ Production-ready
78 |
79 | ---
80 |
81 | **Everything is complete!** 🎉
82 |
83 |
--------------------------------------------------------------------------------
/app/models/currency.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from app import db
3 |
4 |
5 | class Currency(db.Model):
6 | """Supported currencies and display metadata."""
7 |
8 | __tablename__ = "currencies"
9 |
10 | code = db.Column(db.String(3), primary_key=True) # e.g., EUR, USD
11 | name = db.Column(db.String(64), nullable=False)
12 | symbol = db.Column(db.String(8), nullable=True) # e.g., €, $
13 | decimal_places = db.Column(db.Integer, default=2, nullable=False)
14 | is_active = db.Column(db.Boolean, default=True, nullable=False)
15 | created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
16 | updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
17 |
18 | def __repr__(self):
19 | return f""
20 |
21 |
22 | class ExchangeRate(db.Model):
23 | """Daily exchange rates between currency pairs."""
24 |
25 | __tablename__ = "exchange_rates"
26 |
27 | id = db.Column(db.Integer, primary_key=True)
28 | base_code = db.Column(db.String(3), db.ForeignKey("currencies.code"), nullable=False, index=True)
29 | quote_code = db.Column(db.String(3), db.ForeignKey("currencies.code"), nullable=False, index=True)
30 | rate = db.Column(db.Numeric(18, 8), nullable=False)
31 | date = db.Column(db.Date, nullable=False, index=True)
32 | source = db.Column(db.String(50), nullable=True) # e.g., ECB, exchangerate.host
33 |
34 | created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
35 | updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
36 |
37 | __table_args__ = (db.UniqueConstraint("base_code", "quote_code", "date", name="uq_exchange_rate_day"),)
38 |
39 | def __repr__(self):
40 | return f""
41 |
--------------------------------------------------------------------------------
/app/templates/errors/403.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block title %}{{ _('403 Forbidden') }} - {{ app_name }}{% endblock %}
4 |
5 | {% block content %}
6 |
7 |
8 |
9 |
10 |
15 |
16 | {{ _('Access Denied') }}
17 |
18 | {{ _("You don't have permission to access this resource. This could be due to:") }}
19 |
20 |
21 | - {{ _('Insufficient privileges') }}
22 | - {{ _('Not logged in') }}
23 | - {{ _('Resource access restrictions') }}
24 |
25 |
33 |
34 |
35 |
36 |
37 |
38 | {% endblock %}
39 |
--------------------------------------------------------------------------------
/tests/test_client_prepaid_model.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from datetime import datetime, date
3 | from decimal import Decimal
4 |
5 | from app import db
6 | from app.models import Client, ClientPrepaidConsumption, User, Project, TimeEntry
7 | from factories import TimeEntryFactory
8 |
9 |
10 | @pytest.mark.models
11 | def test_client_prepaid_properties_and_consumption(app):
12 | client = Client(name="Model Client", prepaid_hours_monthly=Decimal("40.0"), prepaid_reset_day=5)
13 | db.session.add(client)
14 | db.session.commit()
15 |
16 | assert client.prepaid_plan_enabled is True
17 | assert client.prepaid_hours_decimal == Decimal("40.00")
18 |
19 | reference = datetime(2025, 3, 7, 12, 0, 0)
20 | period_start = client.prepaid_month_start(reference)
21 | assert period_start == date(2025, 3, 5)
22 |
23 | user = User(username="modeluser", email="modeluser@example.com")
24 | db.session.add(user)
25 | db.session.commit()
26 |
27 | project = Project(name="Model Project", client_id=client.id, billable=True)
28 | db.session.add(project)
29 | db.session.commit()
30 |
31 | entry = TimeEntryFactory(
32 | user_id=user.id,
33 | project_id=project.id,
34 | start_time=datetime(2025, 3, 5, 9, 0, 0),
35 | end_time=datetime(2025, 3, 5, 21, 0, 0),
36 | billable=True,
37 | )
38 |
39 | # Create a consumption record for 12 hours
40 | consumption = ClientPrepaidConsumption(
41 | client_id=client.id, time_entry_id=entry.id, allocation_month=period_start, seconds_consumed=12 * 3600
42 | )
43 | db.session.add(consumption)
44 | db.session.commit()
45 |
46 | consumed = client.get_prepaid_consumed_hours(period_start)
47 | remaining = client.get_prepaid_remaining_hours(period_start)
48 |
49 | assert consumed.quantize(Decimal("0.01")) == Decimal("12.00")
50 | assert remaining.quantize(Decimal("0.01")) == Decimal("28.00")
51 |
--------------------------------------------------------------------------------
/docker/fix-schema.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Simple script to fix the missing task_id column
4 | """
5 |
6 | import os
7 | import sys
8 | import time
9 | from sqlalchemy import create_engine, text, inspect
10 |
11 | def fix_schema():
12 | """Fix the missing task_id column"""
13 | url = os.getenv("DATABASE_URL", "")
14 |
15 | if not url.startswith("postgresql"):
16 | print("No PostgreSQL database configured")
17 | return False
18 |
19 | try:
20 | engine = create_engine(url, pool_pre_ping=True)
21 | inspector = inspect(engine)
22 |
23 | # Check if time_entries table exists
24 | if 'time_entries' not in inspector.get_table_names():
25 | print("time_entries table not found")
26 | return False
27 |
28 | # Check if task_id column exists
29 | columns = inspector.get_columns("time_entries")
30 | column_names = [col['name'] for col in columns]
31 | print(f"Current columns in time_entries: {column_names}")
32 |
33 | if 'task_id' in column_names:
34 | print("task_id column already exists")
35 | return True
36 |
37 | # Add the missing column
38 | print("Adding task_id column...")
39 | with engine.connect() as conn:
40 | conn.execute(text("ALTER TABLE time_entries ADD COLUMN task_id INTEGER;"))
41 | conn.commit()
42 |
43 | print("✓ task_id column added successfully")
44 | return True
45 |
46 | except Exception as e:
47 | print(f"Error fixing schema: {e}")
48 | import traceback
49 | traceback.print_exc()
50 | return False
51 |
52 | if __name__ == "__main__":
53 | if fix_schema():
54 | print("Schema fix completed successfully")
55 | sys.exit(0)
56 | else:
57 | print("Schema fix failed")
58 | sys.exit(1)
59 |
--------------------------------------------------------------------------------
/docker/start.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | cd /app
4 | export FLASK_APP=app
5 |
6 | echo "=== Starting TimeTracker ==="
7 |
8 | echo "Waiting for database to be ready..."
9 | # Wait for Postgres to be ready
10 | python - <<"PY"
11 | import os
12 | import time
13 | import sys
14 | from sqlalchemy import create_engine, text
15 | from sqlalchemy.exc import OperationalError
16 |
17 | url = os.getenv("DATABASE_URL", "")
18 | if url.startswith("postgresql"):
19 | for attempt in range(30):
20 | try:
21 | engine = create_engine(url, pool_pre_ping=True)
22 | with engine.connect() as conn:
23 | conn.execute(text("SELECT 1"))
24 | print("Database connection established successfully")
25 | break
26 | except Exception as e:
27 | print(f"Waiting for database... (attempt {attempt+1}/30): {e}")
28 | time.sleep(2)
29 | else:
30 | print("Database not ready after waiting, exiting...")
31 | sys.exit(1)
32 | else:
33 | print("No PostgreSQL database configured, skipping connection check")
34 | PY
35 |
36 | echo "Checking if database is initialized..."
37 | # Always run the database initialization script to ensure proper schema
38 | echo "Running database initialization script..."
39 | python /app/docker/init-database.py
40 | if [ $? -ne 0 ]; then
41 | echo "Database initialization failed. Exiting to prevent infinite loop."
42 | exit 1
43 | fi
44 | echo "Database initialization completed successfully"
45 |
46 | # Also run the simple schema fix to ensure task_id column exists
47 | echo "Running schema fix script..."
48 | python /app/docker/fix-schema.py
49 | if [ $? -ne 0 ]; then
50 | echo "Schema fix failed. Exiting."
51 | exit 1
52 | fi
53 | echo "Schema fix completed successfully"
54 |
55 | echo "Starting application..."
56 | exec gunicorn --bind 0.0.0.0:8080 --worker-class eventlet --workers 1 --timeout 120 "app:create_app()"
57 |
--------------------------------------------------------------------------------
/migrations/versions/058_add_quote_versions.py:
--------------------------------------------------------------------------------
1 | """Add quote versions table for revision history
2 |
3 | Revision ID: 058
4 | Revises: 057
5 | Create Date: 2025-01-27
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = '058'
13 | down_revision = '057'
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade():
19 | """Create quote_versions table"""
20 | op.create_table('quote_versions',
21 | sa.Column('id', sa.Integer(), nullable=False),
22 | sa.Column('quote_id', sa.Integer(), nullable=False),
23 | sa.Column('version_number', sa.Integer(), nullable=False),
24 | sa.Column('quote_data', sa.Text(), nullable=False),
25 | sa.Column('changed_by', sa.Integer(), nullable=False),
26 | sa.Column('changed_at', sa.DateTime(), nullable=False),
27 | sa.Column('change_summary', sa.String(length=500), nullable=True),
28 | sa.Column('fields_changed', sa.String(length=500), nullable=True),
29 | sa.ForeignKeyConstraint(['quote_id'], ['quotes.id'], ondelete='CASCADE'),
30 | sa.ForeignKeyConstraint(['changed_by'], ['users.id'], ondelete='CASCADE'),
31 | sa.PrimaryKeyConstraint('id')
32 | )
33 | op.create_index('ix_quote_versions_quote_id', 'quote_versions', ['quote_id'], unique=False)
34 | op.create_index('ix_quote_versions_changed_by', 'quote_versions', ['changed_by'], unique=False)
35 | op.create_index('ix_quote_versions_version_number', 'quote_versions', ['quote_id', 'version_number'], unique=True)
36 |
37 |
38 | def downgrade():
39 | """Drop quote_versions table"""
40 | op.drop_index('ix_quote_versions_version_number', table_name='quote_versions')
41 | op.drop_index('ix_quote_versions_changed_by', table_name='quote_versions')
42 | op.drop_index('ix_quote_versions_quote_id', table_name='quote_versions')
43 | op.drop_table('quote_versions')
44 |
45 |
--------------------------------------------------------------------------------
/app/models/client_prepaid_consumption.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from decimal import Decimal
3 |
4 | from app import db
5 |
6 |
7 | class ClientPrepaidConsumption(db.Model):
8 | """Ledger entries tracking which time entries consumed prepaid hours."""
9 |
10 | __tablename__ = "client_prepaid_consumptions"
11 |
12 | id = db.Column(db.Integer, primary_key=True)
13 | client_id = db.Column(db.Integer, db.ForeignKey("clients.id"), nullable=False, index=True)
14 | time_entry_id = db.Column(db.Integer, db.ForeignKey("time_entries.id"), nullable=False, unique=True, index=True)
15 | invoice_id = db.Column(db.Integer, db.ForeignKey("invoices.id"), nullable=True, index=True)
16 | allocation_month = db.Column(db.Date, nullable=False, index=True)
17 | seconds_consumed = db.Column(db.Integer, nullable=False)
18 | created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
19 | updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
20 |
21 | # Relationships
22 | client = db.relationship(
23 | "Client", backref=db.backref("prepaid_consumptions", lazy="dynamic", cascade="all, delete-orphan")
24 | )
25 | time_entry = db.relationship("TimeEntry", backref=db.backref("prepaid_consumption", uselist=False))
26 | invoice = db.relationship("Invoice", backref=db.backref("prepaid_consumptions", lazy="dynamic"))
27 |
28 | def __repr__(self):
29 | month = self.allocation_month.isoformat() if self.allocation_month else "?"
30 | return f""
31 |
32 | @property
33 | def hours_consumed(self) -> Decimal:
34 | """Return consumed prepaid hours as Decimal."""
35 | if not self.seconds_consumed:
36 | return Decimal("0")
37 | return (Decimal(self.seconds_consumed) / Decimal("3600")).quantize(Decimal("0.01"))
38 |
--------------------------------------------------------------------------------
/app/schemas/user_schema.py:
--------------------------------------------------------------------------------
1 | """
2 | Schemas for user serialization and validation.
3 | """
4 |
5 | from marshmallow import Schema, fields, validate
6 | from app.constants import UserRole
7 |
8 |
9 | class UserSchema(Schema):
10 | """Schema for user serialization"""
11 |
12 | id = fields.Int(dump_only=True)
13 | username = fields.Str(required=True, validate=validate.Length(max=100))
14 | email = fields.Email(allow_none=True)
15 | full_name = fields.Str(allow_none=True, validate=validate.Length(max=200))
16 | role = fields.Str(validate=validate.OneOf([r.value for r in UserRole]))
17 | is_active = fields.Bool(missing=True)
18 | preferred_language = fields.Str(allow_none=True)
19 | created_at = fields.DateTime(dump_only=True)
20 | updated_at = fields.DateTime(dump_only=True)
21 |
22 | # Nested fields (when relations are loaded)
23 | favorite_projects = fields.Nested("ProjectSchema", many=True, dump_only=True, allow_none=True)
24 |
25 |
26 | class UserCreateSchema(Schema):
27 | """Schema for creating a user"""
28 |
29 | username = fields.Str(required=True, validate=validate.Length(min=1, max=100))
30 | email = fields.Email(allow_none=True)
31 | full_name = fields.Str(allow_none=True, validate=validate.Length(max=200))
32 | role = fields.Str(missing=UserRole.USER.value, validate=validate.OneOf([r.value for r in UserRole]))
33 | is_active = fields.Bool(missing=True)
34 | preferred_language = fields.Str(allow_none=True)
35 |
36 |
37 | class UserUpdateSchema(Schema):
38 | """Schema for updating a user"""
39 |
40 | username = fields.Str(allow_none=True, validate=validate.Length(min=1, max=100))
41 | email = fields.Email(allow_none=True)
42 | full_name = fields.Str(allow_none=True, validate=validate.Length(max=200))
43 | role = fields.Str(allow_none=True, validate=validate.OneOf([r.value for r in UserRole]))
44 | is_active = fields.Bool(allow_none=True)
45 | preferred_language = fields.Str(allow_none=True)
46 |
--------------------------------------------------------------------------------
/docs/bugfixes/template_application_fix.md:
--------------------------------------------------------------------------------
1 | # Bug Fix: Template Application Error
2 |
3 | ## Issue
4 | When users tried to select and apply a template from the start timer interface, they received an error message stating "can't apply the template".
5 |
6 | ## Root Cause
7 | There were duplicate route definitions for the template API endpoints:
8 |
9 | 1. **In `app/routes/api.py` (lines 1440-1465)** - Registered first in the application
10 | - `/api/templates/` (GET)
11 | - `/api/templates//use` (POST)
12 | - **Problem**: Missing `TimeEntryTemplate` import, causing `NameError` when routes were accessed
13 |
14 | 2. **In `app/routes/time_entry_templates.py` (lines 301-326)** - Registered later
15 | - Same routes with proper implementation
16 | - Had correct imports and error handling
17 | - Never executed due to duplicate route conflict
18 |
19 | Since the `api_bp` blueprint was registered before `time_entry_templates_bp` in `app/__init__.py`, Flask used the broken routes from `api.py`, causing the error.
20 |
21 | ## Solution
22 | Removed the duplicate route definitions from `app/routes/api.py` (lines 1440-1465), allowing the proper implementation in `app/routes/time_entry_templates.py` to be used.
23 |
24 | ### Code Changes
25 | **File**: `app/routes/api.py`
26 | - **Removed**: Lines 1440-1465 containing duplicate `/api/templates/` routes
27 | - **Reason**: Eliminate route conflict and use proper implementation
28 |
29 | ## Testing
30 | All existing tests pass:
31 | - ✅ `test_get_templates_api` - Get all templates
32 | - ✅ `test_get_single_template_api` - Get specific template
33 | - ✅ `test_use_template_api` - Mark template as used
34 | - ✅ `test_start_timer_from_template` - Start timer from template
35 |
36 | ## Impact
37 | - **Users can now successfully apply templates when starting timers**
38 | - Template usage tracking works correctly
39 | - No other functionality affected
40 |
41 | ## Date Fixed
42 | October 31, 2025
43 |
44 |
--------------------------------------------------------------------------------
/tests/test_api_kanban_v1.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from app import create_app, db
4 | from app.models import User, ApiToken
5 |
6 |
7 | @pytest.fixture
8 | def app():
9 | app = create_app(
10 | {
11 | "TESTING": True,
12 | "SQLALCHEMY_DATABASE_URI": "sqlite:///test_api_kanban.sqlite",
13 | "WTF_CSRF_ENABLED": False,
14 | }
15 | )
16 | with app.app_context():
17 | db.create_all()
18 | yield app
19 | db.session.remove()
20 | db.drop_all()
21 |
22 |
23 | @pytest.fixture
24 | def client(app):
25 | return app.test_client()
26 |
27 |
28 | @pytest.fixture
29 | def user(app):
30 | u = User(username="kbuser", email="kb@example.com", role="admin")
31 | u.is_active = True
32 | db.session.add(u)
33 | db.session.commit()
34 | return u
35 |
36 |
37 | @pytest.fixture
38 | def api_token(app, user):
39 | token, plain = ApiToken.create_token(user_id=user.id, name="Kanban Token", scopes="read:tasks,write:tasks")
40 | db.session.add(token)
41 | db.session.commit()
42 | return plain
43 |
44 |
45 | def _auth(t):
46 | return {"Authorization": f"Bearer {t}", "Content-Type": "application/json"}
47 |
48 |
49 | def test_kanban_columns(client, api_token):
50 | # list (may be empty)
51 | r = client.get("/api/v1/kanban/columns", headers=_auth(api_token))
52 | assert r.status_code == 200
53 |
54 | # create
55 | payload = {"key": "custom", "label": "Custom", "is_system": False}
56 | r = client.post("/api/v1/kanban/columns", headers=_auth(api_token), json=payload)
57 | assert r.status_code == 201
58 | col_id = r.get_json()["column"]["id"]
59 |
60 | # reorder
61 | r = client.post("/api/v1/kanban/columns/reorder", headers=_auth(api_token), json={"column_ids": [col_id]})
62 | assert r.status_code == 200
63 |
64 | # delete
65 | r = client.delete(f"/api/v1/kanban/columns/{col_id}", headers=_auth(api_token))
66 | assert r.status_code == 200
67 |
--------------------------------------------------------------------------------
/migrations/add_project_costs.sql:
--------------------------------------------------------------------------------
1 | -- Migration: Add project_costs table for tracking expenses beyond hourly work
2 | -- Date: 2024-01-01
3 | -- Description: This migration adds support for tracking project costs/expenses
4 | -- such as travel, materials, services, equipment, etc.
5 |
6 | -- Create project_costs table
7 | CREATE TABLE IF NOT EXISTS project_costs (
8 | id SERIAL PRIMARY KEY,
9 | project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
10 | user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
11 | description VARCHAR(500) NOT NULL,
12 | category VARCHAR(50) NOT NULL,
13 | amount NUMERIC(10, 2) NOT NULL,
14 | currency_code VARCHAR(3) NOT NULL DEFAULT 'EUR',
15 | billable BOOLEAN NOT NULL DEFAULT TRUE,
16 | invoiced BOOLEAN NOT NULL DEFAULT FALSE,
17 | invoice_id INTEGER REFERENCES invoices(id) ON DELETE SET NULL,
18 | cost_date DATE NOT NULL,
19 | notes TEXT,
20 | receipt_path VARCHAR(500),
21 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
22 | updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
23 | );
24 |
25 | -- Create indexes for better query performance
26 | CREATE INDEX IF NOT EXISTS ix_project_costs_project_id ON project_costs(project_id);
27 | CREATE INDEX IF NOT EXISTS ix_project_costs_user_id ON project_costs(user_id);
28 | CREATE INDEX IF NOT EXISTS ix_project_costs_cost_date ON project_costs(cost_date);
29 | CREATE INDEX IF NOT EXISTS ix_project_costs_invoice_id ON project_costs(invoice_id);
30 |
31 | -- Add comment to table
32 | COMMENT ON TABLE project_costs IS 'Tracks project expenses beyond hourly work (travel, materials, services, etc.)';
33 | COMMENT ON COLUMN project_costs.category IS 'Category of cost: travel, materials, services, equipment, software, other';
34 | COMMENT ON COLUMN project_costs.billable IS 'Whether this cost should be billed to the client';
35 | COMMENT ON COLUMN project_costs.invoiced IS 'Whether this cost has been included in an invoice';
36 |
37 |
--------------------------------------------------------------------------------
/migrations/versions/055_add_quote_attachments.py:
--------------------------------------------------------------------------------
1 | """Add quote attachments table
2 |
3 | Revision ID: 055
4 | Revises: 054
5 | Create Date: 2025-01-27
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = '055'
13 | down_revision = '054'
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade():
19 | """Create quote_attachments table"""
20 | op.create_table('quote_attachments',
21 | sa.Column('id', sa.Integer(), nullable=False),
22 | sa.Column('quote_id', sa.Integer(), nullable=False),
23 | sa.Column('filename', sa.String(length=255), nullable=False),
24 | sa.Column('original_filename', sa.String(length=255), nullable=False),
25 | sa.Column('file_path', sa.String(length=500), nullable=False),
26 | sa.Column('file_size', sa.Integer(), nullable=False),
27 | sa.Column('mime_type', sa.String(length=100), nullable=True),
28 | sa.Column('description', sa.Text(), nullable=True),
29 | sa.Column('is_visible_to_client', sa.Boolean(), nullable=False, server_default='false'),
30 | sa.Column('uploaded_by', sa.Integer(), nullable=False),
31 | sa.Column('uploaded_at', sa.DateTime(), nullable=False),
32 | sa.ForeignKeyConstraint(['quote_id'], ['quotes.id'], ondelete='CASCADE'),
33 | sa.ForeignKeyConstraint(['uploaded_by'], ['users.id'], ondelete='CASCADE'),
34 | sa.PrimaryKeyConstraint('id')
35 | )
36 | op.create_index('ix_quote_attachments_quote_id', 'quote_attachments', ['quote_id'], unique=False)
37 | op.create_index('ix_quote_attachments_uploaded_by', 'quote_attachments', ['uploaded_by'], unique=False)
38 |
39 |
40 | def downgrade():
41 | """Drop quote_attachments table"""
42 | op.drop_index('ix_quote_attachments_uploaded_by', table_name='quote_attachments')
43 | op.drop_index('ix_quote_attachments_quote_id', table_name='quote_attachments')
44 | op.drop_table('quote_attachments')
45 |
46 |
--------------------------------------------------------------------------------
/migrations/versions/082_add_global_integrations.py:
--------------------------------------------------------------------------------
1 | """Add global integrations support
2 |
3 | Revision ID: 082_add_global_integrations
4 | Revises: 081_add_int_oauth_creds
5 | Create Date: 2025-01-20 12:00:00.000000
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '082_add_global_integrations'
14 | down_revision = '081_add_int_oauth_creds'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | with op.batch_alter_table('integrations', schema=None) as batch_op:
21 | # Add is_global flag
22 | batch_op.add_column(sa.Column('is_global', sa.Boolean(), nullable=False, server_default='0'))
23 |
24 | # Make user_id nullable for global integrations
25 | batch_op.alter_column('user_id',
26 | existing_type=sa.Integer(),
27 | nullable=True)
28 |
29 | # Add index for global integrations
30 | batch_op.create_index('ix_integrations_is_global', ['is_global'], unique=False)
31 |
32 | # Note: Unique constraint for global integrations enforced at application level
33 | # (one global integration per provider) since SQLite doesn't support partial indexes
34 |
35 |
36 | def downgrade():
37 | with op.batch_alter_table('integrations', schema=None) as batch_op:
38 | # Remove index
39 | batch_op.drop_index('ix_integrations_is_global')
40 |
41 | # Make user_id required again (set to first user for existing records)
42 | # First, set user_id for any null values
43 | op.execute("UPDATE integrations SET user_id = (SELECT id FROM users LIMIT 1) WHERE user_id IS NULL")
44 |
45 | batch_op.alter_column('user_id',
46 | existing_type=sa.Integer(),
47 | nullable=False)
48 |
49 | # Remove is_global column
50 | batch_op.drop_column('is_global')
51 |
52 |
--------------------------------------------------------------------------------
/docs/AVATAR_PERSISTENCE_SUMMARY.md:
--------------------------------------------------------------------------------
1 | # Avatar Persistence Update - Summary
2 |
3 | ## Quick Summary
4 |
5 | ✅ **Profile pictures now persist between Docker updates!**
6 |
7 | User avatars are now stored in the persistent `/data` volume instead of the application directory, ensuring they survive container rebuilds and updates.
8 |
9 | ## What to Do
10 |
11 | ### For Existing Installations
12 |
13 | If you have users with existing profile pictures:
14 |
15 | ```bash
16 | # 1. Stop containers
17 | docker-compose down
18 |
19 | # 2. Run migration
20 | docker-compose run --rm app python /app/docker/migrate-avatar-storage.py
21 |
22 | # 3. Start containers
23 | docker-compose up -d
24 | ```
25 |
26 | ### For Fresh Installations
27 |
28 | Nothing! The new location will be used automatically.
29 |
30 | ## Changes Made
31 |
32 | | Component | Change |
33 | |-----------|--------|
34 | | **Storage Location** | `app/static/uploads/avatars/` → `/data/uploads/avatars/` |
35 | | **Persistence** | ❌ Lost on update → ✅ Persists across updates |
36 | | **Docker Volume** | Uses existing `app_data` volume |
37 | | **URL Structure** | `/uploads/avatars/{filename}` (unchanged) |
38 |
39 | ## Files Modified
40 |
41 | 1. ✅ `app/routes/auth.py` - Updated upload folder path
42 | 2. ✅ `app/models/user.py` - Updated avatar path method
43 | 3. ✅ `docker/migrate-avatar-storage.py` - New migration script
44 | 4. ✅ `docs/AVATAR_STORAGE_MIGRATION.md` - Full migration guide
45 |
46 | ## Verification
47 |
48 | Test that avatars work correctly:
49 |
50 | 1. ✅ Existing avatars display correctly
51 | 2. ✅ New avatar uploads work
52 | 3. ✅ Avatar removal works
53 | 4. ✅ Avatars persist after `docker-compose down && docker-compose up`
54 |
55 | ## See Also
56 |
57 | - 📖 [Full Migration Guide](./AVATAR_STORAGE_MIGRATION.md)
58 | - 📖 [Logo Upload System](./LOGO_UPLOAD_SYSTEM_README.md) (similar persistent storage)
59 |
60 | ---
61 |
62 | **Author:** AI Assistant
63 | **Date:** October 2025
64 | **Related Issue:** Profile pictures persistence between versions
65 |
66 |
--------------------------------------------------------------------------------
/migrations/versions/085_add_project_custom_fields.py:
--------------------------------------------------------------------------------
1 | """Add custom fields to projects
2 |
3 | Revision ID: 085_add_project_custom_fields
4 | Revises: 084_add_custom_field_definitions
5 | Create Date: 2025-01-28
6 |
7 | This migration adds:
8 | - custom_fields JSON column to projects table for flexible custom data storage
9 | """
10 | from alembic import op
11 | import sqlalchemy as sa
12 | from sqlalchemy.dialects import postgresql
13 |
14 |
15 | # revision identifiers, used by Alembic.
16 | revision = '085_add_project_custom_fields'
17 | down_revision = '084_custom_field_definitions'
18 | branch_labels = None
19 | depends_on = None
20 |
21 |
22 | def _has_column(inspector, table_name: str, column_name: str) -> bool:
23 | """Check if a column exists in a table"""
24 | try:
25 | return column_name in [col['name'] for col in inspector.get_columns(table_name)]
26 | except Exception:
27 | return False
28 |
29 |
30 | def upgrade():
31 | """Add custom_fields to projects table"""
32 | bind = op.get_bind()
33 | inspector = sa.inspect(bind)
34 |
35 | # Add custom_fields column to projects table if it doesn't exist
36 | if 'projects' in inspector.get_table_names():
37 | if not _has_column(inspector, 'projects', 'custom_fields'):
38 | # Use JSONB for PostgreSQL, JSON for SQLite
39 | try:
40 | op.add_column('projects', sa.Column('custom_fields', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
41 | except Exception:
42 | # Fallback to JSON for SQLite
43 | op.add_column('projects', sa.Column('custom_fields', sa.JSON(), nullable=True))
44 |
45 |
46 | def downgrade():
47 | """Remove custom_fields from projects table"""
48 | bind = op.get_bind()
49 | inspector = sa.inspect(bind)
50 |
51 | # Remove custom_fields column from projects table
52 | if 'projects' in inspector.get_table_names():
53 | if _has_column(inspector, 'projects', 'custom_fields'):
54 | op.drop_column('projects', 'custom_fields')
55 |
56 |
--------------------------------------------------------------------------------
/app/config/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Configuration module for TimeTracker.
3 |
4 | This module contains:
5 | - Flask application configuration (Config, ProductionConfig, etc.)
6 | - Analytics configuration for telemetry
7 | """
8 |
9 | # Import Flask configuration classes from parent config.py
10 | # We need to import from the parent app module to avoid circular imports
11 | import sys
12 | import os
13 |
14 | # Import analytics configuration
15 | from app.config.analytics_defaults import get_analytics_config, has_analytics_configured
16 |
17 | # Import Flask Config classes from the config.py file in parent directory
18 | # The config.py was shadowed when we created this config/ package
19 | # So we need to import it properly
20 | try:
21 | # Try to import from a renamed file if it exists
22 | from app.flask_config import Config, ProductionConfig, DevelopmentConfig, TestingConfig
23 | except ImportError:
24 | # If the file wasn't renamed, we need to import it differently
25 | # Add parent to path temporarily to import the shadowed config.py
26 | import importlib.util
27 |
28 | config_py_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), "config.py")
29 | if os.path.exists(config_py_path):
30 | spec = importlib.util.spec_from_file_location("flask_config_module", config_py_path)
31 | flask_config = importlib.util.module_from_spec(spec)
32 | spec.loader.exec_module(flask_config)
33 | Config = flask_config.Config
34 | ProductionConfig = flask_config.ProductionConfig
35 | DevelopmentConfig = flask_config.DevelopmentConfig
36 | TestingConfig = flask_config.TestingConfig
37 | else:
38 | # Fallback - create minimal config
39 | class Config:
40 | pass
41 |
42 | ProductionConfig = Config
43 | DevelopmentConfig = Config
44 | TestingConfig = Config
45 |
46 | __all__ = [
47 | "get_analytics_config",
48 | "has_analytics_configured",
49 | "Config",
50 | "ProductionConfig",
51 | "DevelopmentConfig",
52 | "TestingConfig",
53 | ]
54 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 120
3 | target-version = ['py311']
4 | include = '\.pyi?$'
5 | extend-exclude = '''
6 | /(
7 | # directories
8 | \.eggs
9 | | \.git
10 | | \.hg
11 | | \.mypy_cache
12 | | \.tox
13 | | \.venv
14 | | venv
15 | | _build
16 | | buck-out
17 | | build
18 | | dist
19 | | migrations
20 | )/
21 | '''
22 |
23 | [tool.pylint.messages_control]
24 | disable = [
25 | "C0111", # missing-docstring
26 | "C0103", # invalid-name
27 | "R0903", # too-few-public-methods
28 | "R0913", # too-many-arguments
29 | ]
30 |
31 | [tool.pylint.format]
32 | max-line-length = 120
33 |
34 | [tool.bandit]
35 | exclude_dirs = ["tests", "migrations", "venv", ".venv"]
36 | skips = ["B101"] # Skip assert_used test
37 |
38 | [tool.coverage.run]
39 | source = ["app"]
40 | omit = [
41 | "*/tests/*",
42 | "*/test_*.py",
43 | "*/__pycache__/*",
44 | "*/venv/*",
45 | "*/env/*",
46 | "*/migrations/*",
47 | "app/utils/pdf_generator.py",
48 | "app/utils/pdf_generator_fallback.py",
49 | ]
50 |
51 | [tool.coverage.report]
52 | precision = 2
53 | show_missing = true
54 | skip_covered = false
55 | exclude_lines = [
56 | "pragma: no cover",
57 | "def __repr__",
58 | "raise AssertionError",
59 | "raise NotImplementedError",
60 | "if __name__ == .__main__.:",
61 | "if TYPE_CHECKING:",
62 | "@abstractmethod",
63 | ]
64 |
65 | [tool.mypy]
66 | python_version = "3.11"
67 | warn_return_any = true
68 | warn_unused_configs = true
69 | disallow_untyped_defs = false
70 | ignore_missing_imports = true
71 | exclude = [
72 | "migrations/",
73 | "tests/",
74 | "venv/",
75 | ".venv/",
76 | ]
77 |
78 | [tool.pytest.ini_options]
79 | testpaths = ["tests"]
80 | python_files = ["test_*.py"]
81 | python_classes = ["Test*"]
82 | python_functions = ["test_*"]
83 | addopts = [
84 | "-v",
85 | "--tb=short",
86 | "--strict-markers",
87 | "--color=yes",
88 | "-W ignore::DeprecationWarning",
89 | "-W ignore::PendingDeprecationWarning",
90 | "--durations=10",
91 | ]
92 |
--------------------------------------------------------------------------------
/app/schemas/expense_schema.py:
--------------------------------------------------------------------------------
1 | """
2 | Schemas for expense serialization and validation.
3 | """
4 |
5 | from marshmallow import Schema, fields, validate
6 | from decimal import Decimal
7 |
8 |
9 | class ExpenseSchema(Schema):
10 | """Schema for expense serialization"""
11 |
12 | id = fields.Int(dump_only=True)
13 | project_id = fields.Int(required=True)
14 | amount = fields.Decimal(required=True, places=2)
15 | description = fields.Str(required=True, validate=validate.Length(max=500))
16 | date = fields.Date(required=True)
17 | category_id = fields.Int(allow_none=True)
18 | billable = fields.Bool(missing=False)
19 | receipt_path = fields.Str(allow_none=True)
20 | created_by = fields.Int(required=True)
21 | created_at = fields.DateTime(dump_only=True)
22 | updated_at = fields.DateTime(dump_only=True)
23 |
24 | # Nested fields
25 | project = fields.Nested("ProjectSchema", dump_only=True, allow_none=True)
26 | category = fields.Nested("ExpenseCategorySchema", dump_only=True, allow_none=True)
27 |
28 |
29 | class ExpenseCreateSchema(Schema):
30 | """Schema for creating an expense"""
31 |
32 | project_id = fields.Int(required=True)
33 | amount = fields.Decimal(required=True, places=2, validate=validate.Range(min=Decimal("0.01")))
34 | description = fields.Str(required=True, validate=validate.Length(min=1, max=500))
35 | date = fields.Date(required=True)
36 | category_id = fields.Int(allow_none=True)
37 | billable = fields.Bool(missing=False)
38 | receipt_path = fields.Str(allow_none=True)
39 |
40 |
41 | class ExpenseUpdateSchema(Schema):
42 | """Schema for updating an expense"""
43 |
44 | project_id = fields.Int(allow_none=True)
45 | amount = fields.Decimal(allow_none=True, places=2, validate=validate.Range(min=Decimal("0.01")))
46 | description = fields.Str(allow_none=True, validate=validate.Length(max=500))
47 | date = fields.Date(allow_none=True)
48 | category_id = fields.Int(allow_none=True)
49 | billable = fields.Bool(allow_none=True)
50 | receipt_path = fields.Str(allow_none=True)
51 |
--------------------------------------------------------------------------------
/docker/test-db.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Simple database test script for TimeTracker
4 | This script tests database connectivity and shows initialization status.
5 | """
6 |
7 | import os
8 | import sys
9 | from sqlalchemy import create_engine, text, inspect
10 |
11 | def test_database():
12 | """Test database connectivity and show status"""
13 | url = os.getenv("DATABASE_URL", "")
14 |
15 | if not url.startswith("postgresql"):
16 | print("No PostgreSQL database configured")
17 | return
18 |
19 | print(f"Testing database connection to: {url}")
20 |
21 | try:
22 | # Test connection
23 | engine = create_engine(url, pool_pre_ping=True)
24 | with engine.connect() as conn:
25 | result = conn.execute(text("SELECT version()"))
26 | version = result.fetchone()[0]
27 | print(f"✓ Database connection successful")
28 | print(f" PostgreSQL version: {version}")
29 |
30 | # Check tables
31 | inspector = inspect(engine)
32 | existing_tables = inspector.get_table_names()
33 | required_tables = ['users', 'projects', 'time_entries', 'settings']
34 |
35 | print(f"\nDatabase tables:")
36 | for table in required_tables:
37 | if table in existing_tables:
38 | print(f" ✓ {table}")
39 | else:
40 | print(f" ✗ {table} (missing)")
41 |
42 | missing_tables = [table for table in required_tables if table not in existing_tables]
43 |
44 | if missing_tables:
45 | print(f"\nDatabase is NOT fully initialized")
46 | print(f"Missing tables: {missing_tables}")
47 | return False
48 | else:
49 | print(f"\n✓ Database is fully initialized")
50 | return True
51 |
52 | except Exception as e:
53 | print(f"✗ Database connection failed: {e}")
54 | return False
55 |
56 | if __name__ == "__main__":
57 | success = test_database()
58 | sys.exit(0 if success else 1)
59 |
--------------------------------------------------------------------------------
/docker/fix-upload-permissions.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Fix script for upload directory permissions
3 | # Run this in your Docker container to resolve file upload permission issues
4 |
5 | echo "=== Fixing upload directory permissions ==="
6 |
7 | # Define the upload directories that need permissions fixed
8 | UPLOAD_DIRS=(
9 | "/app/app/static/uploads"
10 | "/app/app/static/uploads/logos"
11 | "/app/app/static/uploads/avatars"
12 | "/app/static/uploads"
13 | "/app/static/uploads/logos"
14 | "/app/static/uploads/avatars"
15 | )
16 |
17 | # Function to fix directory permissions
18 | fix_directory() {
19 | local dir="$1"
20 | if [ -d "$dir" ]; then
21 | echo "Fixing permissions for: $dir"
22 | chmod 755 "$dir"
23 |
24 | # Test write permissions
25 | local test_file="$dir/test_permissions.tmp"
26 | if echo "test" > "$test_file" 2>/dev/null; then
27 | rm -f "$test_file"
28 | echo "✓ Write permission test passed for: $dir"
29 | else
30 | echo "⚠ Write permission test failed for: $dir"
31 | fi
32 | else
33 | echo "Creating directory: $dir"
34 | mkdir -p "$dir"
35 | chmod 755 "$dir"
36 | echo "✓ Created directory: $dir"
37 | fi
38 | }
39 |
40 | # Fix permissions for all upload directories
41 | for dir in "${UPLOAD_DIRS[@]}"; do
42 | fix_directory "$dir"
43 | done
44 |
45 | # Also fix the parent static directories
46 | STATIC_DIRS=("/app/app/static" "/app/static")
47 | for dir in "${STATIC_DIRS[@]}"; do
48 | if [ -d "$dir" ]; then
49 | echo "Fixing permissions for static directory: $dir"
50 | chmod 755 "$dir"
51 | echo "✓ Set static directory permissions for: $dir"
52 | fi
53 | done
54 |
55 | echo ""
56 | echo "=== Permission fix completed ==="
57 | echo "The application should now be able to upload logo files."
58 |
59 | # Show current permissions
60 | echo ""
61 | echo "Current directory permissions:"
62 | for dir in "${UPLOAD_DIRS[@]}"; do
63 | if [ -d "$dir" ]; then
64 | ls -ld "$dir"
65 | fi
66 | done
67 |
--------------------------------------------------------------------------------
/app/schemas/task_schema.py:
--------------------------------------------------------------------------------
1 | """
2 | Schemas for task serialization and validation.
3 | """
4 |
5 | from marshmallow import Schema, fields, validate
6 | from app.constants import TaskStatus
7 |
8 |
9 | class TaskSchema(Schema):
10 | """Schema for task serialization"""
11 |
12 | id = fields.Int(dump_only=True)
13 | name = fields.Str(required=True, validate=validate.Length(max=200))
14 | description = fields.Str(allow_none=True)
15 | project_id = fields.Int(required=True)
16 | assignee_id = fields.Int(allow_none=True)
17 | status = fields.Str(validate=validate.OneOf([s.value for s in TaskStatus]))
18 | priority = fields.Str(validate=validate.OneOf(["low", "medium", "high", "urgent"]))
19 | due_date = fields.Date(allow_none=True)
20 | created_by = fields.Int(required=True)
21 | created_at = fields.DateTime(dump_only=True)
22 | updated_at = fields.DateTime(dump_only=True)
23 |
24 | # Nested fields
25 | project = fields.Nested("ProjectSchema", dump_only=True, allow_none=True)
26 | assignee = fields.Nested("UserSchema", dump_only=True, allow_none=True)
27 |
28 |
29 | class TaskCreateSchema(Schema):
30 | """Schema for creating a task"""
31 |
32 | name = fields.Str(required=True, validate=validate.Length(min=1, max=200))
33 | description = fields.Str(allow_none=True)
34 | project_id = fields.Int(required=True)
35 | assignee_id = fields.Int(allow_none=True)
36 | priority = fields.Str(missing="medium", validate=validate.OneOf(["low", "medium", "high", "urgent"]))
37 | due_date = fields.Date(allow_none=True)
38 |
39 |
40 | class TaskUpdateSchema(Schema):
41 | """Schema for updating a task"""
42 |
43 | name = fields.Str(allow_none=True, validate=validate.Length(min=1, max=200))
44 | description = fields.Str(allow_none=True)
45 | assignee_id = fields.Int(allow_none=True)
46 | status = fields.Str(allow_none=True, validate=validate.OneOf([s.value for s in TaskStatus]))
47 | priority = fields.Str(allow_none=True, validate=validate.OneOf(["low", "medium", "high", "urgent"]))
48 | due_date = fields.Date(allow_none=True)
49 |
--------------------------------------------------------------------------------
/app/utils/rate_limiting.py:
--------------------------------------------------------------------------------
1 | """
2 | Rate limiting utilities and helpers.
3 | """
4 |
5 | from typing import Callable, Optional, Dict, Any
6 | from functools import wraps
7 | from flask import request, current_app
8 | from flask_limiter import Limiter
9 | from flask_limiter.util import get_remote_address
10 |
11 |
12 | def get_rate_limit_key() -> str:
13 | """
14 | Get rate limit key for current request.
15 |
16 | Uses API token if available, otherwise IP address.
17 | """
18 | # Check for API token
19 | if hasattr(request, "api_user") and request.api_user:
20 | return f"api_token:{request.api_user.id}"
21 |
22 | # Check for authenticated user
23 | from flask_login import current_user
24 |
25 | if current_user and current_user.is_authenticated:
26 | return f"user:{current_user.id}"
27 |
28 | # Fall back to IP address
29 | return get_remote_address()
30 |
31 |
32 | def rate_limit(per_minute: Optional[int] = None, per_hour: Optional[int] = None, per_day: Optional[int] = None):
33 | """
34 | Decorator for rate limiting endpoints.
35 |
36 | Args:
37 | per_minute: Requests per minute
38 | per_hour: Requests per hour
39 | per_day: Requests per day
40 |
41 | Usage:
42 | @rate_limit(per_minute=60, per_hour=1000)
43 | def my_endpoint():
44 | pass
45 | """
46 |
47 | def decorator(func: Callable) -> Callable:
48 | @wraps(func)
49 | def wrapper(*args, **kwargs):
50 | # Rate limiting is handled by Flask-Limiter middleware
51 | # This decorator is mainly for documentation
52 | return func(*args, **kwargs)
53 |
54 | return wrapper
55 |
56 | return decorator
57 |
58 |
59 | def get_rate_limit_info() -> Dict[str, Any]:
60 | """
61 | Get rate limit information for current request.
62 |
63 | Returns:
64 | dict with rate limit info
65 | """
66 | # This would integrate with Flask-Limiter to get current limits
67 | # For now, return default info
68 | return {"limit": 100, "remaining": 99, "reset": None}
69 |
--------------------------------------------------------------------------------
/docker/generate-mkcert-certs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # Auto-generate mkcert certificates in container
3 |
4 | set -e
5 |
6 | CERT_DIR="/certs"
7 | CERT_FILE="$CERT_DIR/cert.pem"
8 | KEY_FILE="$CERT_DIR/key.pem"
9 | CA_FILE="$CERT_DIR/rootCA.pem"
10 |
11 | echo "=========================================="
12 | echo "mkcert Certificate Generator"
13 | echo "=========================================="
14 | echo ""
15 |
16 | # Create cert directory
17 | mkdir -p "$CERT_DIR"
18 |
19 | # Check if certificates exist
20 | if [ -f "$CERT_FILE" ] && [ -f "$KEY_FILE" ]; then
21 | echo "✅ Certificates already exist"
22 | exit 0
23 | fi
24 |
25 | echo "🔧 Generating mkcert certificates..."
26 | echo ""
27 |
28 | # Install local CA (for container use)
29 | mkcert -install
30 |
31 | # Get domains/IPs to include
32 | DOMAINS=${CERT_DOMAINS:-"localhost 127.0.0.1 ::1"}
33 | echo "Generating certificate for: $DOMAINS"
34 | echo ""
35 |
36 | # Generate certificates
37 | mkcert -key-file "$KEY_FILE" -cert-file "$CERT_FILE" $DOMAINS
38 |
39 | # Copy CA certificate for user to install on host
40 | cp "$(mkcert -CAROOT)/rootCA.pem" "$CA_FILE" 2>/dev/null || true
41 |
42 | chmod 644 "$CERT_FILE" "$CA_FILE" 2>/dev/null || true
43 | chmod 600 "$KEY_FILE"
44 |
45 | echo ""
46 | echo "✅ mkcert certificates generated!"
47 | echo ""
48 | echo "📋 Next steps:"
49 | echo " 1. The certificates are in: nginx/ssl/"
50 | echo " 2. To avoid browser warnings, install rootCA.pem on your host:"
51 | echo ""
52 | echo " Windows:"
53 | echo " - Double-click nginx/ssl/rootCA.pem"
54 | echo " - Install to: Trusted Root Certification Authorities"
55 | echo ""
56 | echo " macOS:"
57 | echo " - Double-click nginx/ssl/rootCA.pem"
58 | echo " - Add to Keychain and mark as trusted"
59 | echo ""
60 | echo " Linux:"
61 | echo " sudo cp nginx/ssl/rootCA.pem /usr/local/share/ca-certificates/mkcert.crt"
62 | echo " sudo update-ca-certificates"
63 | echo ""
64 | echo " 3. Restart your browser"
65 | echo " 4. Access: https://localhost or https://$HOST_IP"
66 | echo ""
67 | echo "=========================================="
68 |
69 |
--------------------------------------------------------------------------------
/migrations/versions/087_add_salesman_email_mapping.py:
--------------------------------------------------------------------------------
1 | """Add salesman email mapping table
2 |
3 | Revision ID: 087_salesman_email_mapping
4 | Revises: 086_project_client_attachments
5 | Create Date: 2025-01-29
6 |
7 | This migration adds:
8 | - salesman_email_mappings table for mapping salesman initials to email addresses
9 | """
10 | from alembic import op
11 | import sqlalchemy as sa
12 |
13 | # revision identifiers, used by Alembic.
14 | revision = '087_salesman_email_mapping'
15 | down_revision = '086_project_client_attachments'
16 | branch_labels = None
17 | depends_on = None
18 |
19 |
20 | def upgrade():
21 | """Create salesman_email_mappings table"""
22 | op.create_table('salesman_email_mappings',
23 | sa.Column('id', sa.Integer(), nullable=False),
24 | sa.Column('salesman_initial', sa.String(length=20), nullable=False),
25 | sa.Column('email_address', sa.String(length=255), nullable=True),
26 | sa.Column('email_pattern', sa.String(length=255), nullable=True), # e.g., '{value}@test.de'
27 | sa.Column('domain', sa.String(length=255), nullable=True), # e.g., 'test.de' for pattern-based emails
28 | sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true'),
29 | sa.Column('notes', sa.Text(), nullable=True),
30 | sa.Column('created_at', sa.DateTime(), nullable=False),
31 | sa.Column('updated_at', sa.DateTime(), nullable=False),
32 | sa.PrimaryKeyConstraint('id'),
33 | sa.UniqueConstraint('salesman_initial', name='uq_salesman_email_mapping_initial')
34 | )
35 | op.create_index('ix_salesman_email_mappings_initial', 'salesman_email_mappings', ['salesman_initial'], unique=False)
36 | op.create_index('ix_salesman_email_mappings_active', 'salesman_email_mappings', ['is_active'], unique=False)
37 |
38 |
39 | def downgrade():
40 | """Drop salesman_email_mappings table"""
41 | op.drop_index('ix_salesman_email_mappings_active', table_name='salesman_email_mappings')
42 | op.drop_index('ix_salesman_email_mappings_initial', table_name='salesman_email_mappings')
43 | op.drop_table('salesman_email_mappings')
44 |
45 |
--------------------------------------------------------------------------------
/docker/start-fixed.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | cd /app
4 | export FLASK_APP=app
5 |
6 | echo "=== Starting TimeTracker (Fixed Shell Mode) ==="
7 |
8 | echo "Waiting for database to be ready..."
9 | # Wait for Postgres to be ready
10 | python - <<"PY"
11 | import os
12 | import time
13 | import sys
14 | from sqlalchemy import create_engine, text
15 | from sqlalchemy.exc import OperationalError
16 |
17 | url = os.getenv("DATABASE_URL", "")
18 | if url.startswith("postgresql"):
19 | for attempt in range(30):
20 | try:
21 | engine = create_engine(url, pool_pre_ping=True)
22 | with engine.connect() as conn:
23 | conn.execute(text("SELECT 1"))
24 | print("Database connection established successfully")
25 | break
26 | except Exception as e:
27 | print(f"Waiting for database... (attempt {attempt+1}/30): {e}")
28 | time.sleep(2)
29 | else:
30 | print("Database not ready after waiting, exiting...")
31 | sys.exit(1)
32 | else:
33 | print("No PostgreSQL database configured, skipping connection check")
34 | PY
35 |
36 | echo "=== RUNNING DATABASE INITIALIZATION ==="
37 |
38 | # Step 1: Run SQL database initialization first (creates basic tables including tasks)
39 | echo "Step 1: Running SQL database initialization..."
40 | if python /app/docker/init-database-sql.py; then
41 | echo "✓ SQL database initialization completed"
42 | else
43 | echo "✗ SQL database initialization failed"
44 | exit 1
45 | fi
46 |
47 | # Step 2: Run main database initialization (handles Flask-specific setup)
48 | echo "Step 2: Running main database initialization..."
49 | if python /app/docker/init-database.py; then
50 | echo "✓ Main database initialization completed"
51 | else
52 | echo "✗ Main database initialization failed"
53 | exit 1
54 | fi
55 |
56 | echo "✓ All database initialization completed successfully"
57 |
58 | echo "Starting application..."
59 | # Start gunicorn
60 | exec gunicorn \
61 | --bind 0.0.0.0:8080 \
62 | --worker-class eventlet \
63 | --workers 1 \
64 | --timeout 120 \
65 | app:create_app()
66 |
--------------------------------------------------------------------------------
|