├── .dockerignore
├── .flake8
├── .github
└── workflows
│ ├── pythonapp.yml
│ └── pythonpublish.yml
├── .gitignore
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Dockerfile
├── LICENSE
├── MANIFEST.in
├── README.md
├── codecov.yml
├── docker-compose.yml
├── mypy.ini
├── pyproject.toml
├── requirements-testing.txt
├── requirements.txt
├── setup.py
└── tracklater
├── __init__.py
├── database.py
├── example_settings.json
├── main.py
├── models.py
├── settings_utils.py
├── static
├── daytimeline.vue.js
├── home.vue.js
├── index.html
├── timeline.vue.js
└── toolbar.vue.js
├── test_settings.py
├── tests
├── __init__.py
├── conftest.py
├── test_activitywatch.py
├── test_app.py
├── test_gitmodule.py
├── test_jira.py
├── test_main.py
├── test_slack.py
├── test_taiga.py
├── test_thyme.py
└── test_toggl.py
├── timemodules
├── __init__.py
├── activitywatch.py
├── clockify.py
├── fixture
│ ├── git_test_data.json
│ ├── search_results.json
│ └── search_results_2.json
├── github.py
├── gitmodule.py
├── interfaces.py
├── jira.py
├── slack.py
├── taiga.py
├── thyme.py
└── toggl.py
├── utils.py
└── views.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | .github
2 |
3 | *.pyc
4 |
5 | docker_run.sh
6 | Dockerfile
7 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 100
3 |
4 | exclude=./tracklater/settings.py,
5 | ./tracklater/user_settings.py,
6 | ./tracklater/example_settings.py,
7 | ./tracklater/test_settings.py,
--------------------------------------------------------------------------------
/.github/workflows/pythonapp.yml:
--------------------------------------------------------------------------------
1 | name: Python application
2 |
3 | on: [push]
4 |
5 | jobs:
6 | build:
7 |
8 | runs-on: ubuntu-latest
9 |
10 | steps:
11 | - uses: actions/checkout@v1
12 | - uses: actions/cache@v1
13 | with:
14 | path: ~/.cache/pip
15 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirement*.txt') }}
16 | restore-keys: |
17 | ${{ runner.os }}-pip-
18 | - name: Set up Python 3.7
19 | uses: actions/setup-python@v1
20 | with:
21 | python-version: 3.7
22 | - name: Install dependencies
23 | run: |
24 | python -m pip install --upgrade pip
25 | pip install -r requirements-testing.txt
26 | - name: Lint with flake8
27 | run: |
28 | # stop the build if there are Python syntax errors or undefined names
29 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
30 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
31 | flake8 . --count --exit-zero --max-line-length=100 --statistics
32 | - name: Lint with mypy
33 | run: |
34 | mypy tracklater
35 | - name: Test with pytest
36 | run: |
37 | pytest --cov=. --cov-report term
38 | codecov
39 | env:
40 | CODECOV_TOKEN: 9a7b0691-eff9-49e2-89d8-15c51a8df636
41 |
--------------------------------------------------------------------------------
/.github/workflows/pythonpublish.yml:
--------------------------------------------------------------------------------
1 | name: Upload Python Package
2 |
3 | on:
4 | release:
5 | types: [created]
6 |
7 | jobs:
8 | deploy:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v1
12 | - name: Set up Python
13 | uses: actions/setup-python@v1
14 | with:
15 | python-version: '3.x'
16 | - name: Install dependencies
17 | run: |
18 | python -m pip install --upgrade pip
19 | pip install setuptools wheel twine
20 | - name: Build and publish
21 | env:
22 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
23 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
24 | run: |
25 | python setup.py sdist bdist_wheel
26 | twine upload dist/*
27 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # IPython
76 | profile_default/
77 | ipython_config.py
78 |
79 | # pyenv
80 | .python-version
81 |
82 | # celery beat schedule file
83 | celerybeat-schedule
84 |
85 | # SageMath parsed files
86 | *.sage.py
87 |
88 | # Environments
89 | .env
90 | .venv
91 | env/
92 | venv/
93 | ENV/
94 | env.bak/
95 | venv.bak/
96 |
97 | # Spyder project settings
98 | .spyderproject
99 | .spyproject
100 |
101 | # Rope project settings
102 | .ropeproject
103 |
104 | # mkdocs documentation
105 | /site
106 |
107 | # mypy
108 | .mypy_cache/
109 | .dmypy.json
110 | dmypy.json
111 |
112 |
113 | jira-cache
114 |
115 | #virtualenv
116 | tracklater/thyme
117 | tracklater/*.jira-cache
118 | tracklater/user_settings.py
119 | tracklater/*/*.jira-cache
120 | tracklater/.project
121 | tracklater/database.db
122 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | 1.6.1
2 |
3 | - Bug fixes:
4 | - Fix bug with dot-entries and generation
5 | - Fix using week view on sundays.
6 | - Fix missing commits from non-active branch
7 |
8 | 1.6.0
9 |
10 | - Improved performance
11 | - Multiple bug fixes
12 | - Added grouping & group coloring to all time modules
13 | - Added clockify as a toggl alternative
14 |
15 | 1.5.0
16 |
17 | - Creating new events is improved:
18 | - They "snap" to each other on create
19 | - Project is automatically set
20 | - Issue is automatically set if commit begins with "issue slug"
21 | - The new entry automatically spans all time entries
22 |
23 | - Switched to a week view with buttons to switch weeks
24 |
25 |
26 | 1.4.0
27 |
28 | - Make the page responsive for mobile
29 | - Update the timeline library
30 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to making participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age, body
8 | size, disability, ethnicity, sex characteristics, gender identity and expression,
9 | level of experience, education, socio-economic status, nationality, personal
10 | appearance, race, religion, or sexual identity and orientation.
11 |
12 | ## Our Standards
13 |
14 | Examples of behavior that contributes to creating a positive environment
15 | include:
16 |
17 | * Using welcoming and inclusive language
18 | * Being respectful of differing viewpoints and experiences
19 | * Gracefully accepting constructive criticism
20 | * Focusing on what is best for the community
21 | * Showing empathy towards other community members
22 |
23 | Examples of unacceptable behavior by participants include:
24 |
25 | * The use of sexualized language or imagery and unwelcome sexual attention or
26 | advances
27 | * Trolling, insulting/derogatory comments, and personal or political attacks
28 | * Public or private harassment
29 | * Publishing others' private information, such as a physical or electronic
30 | address, without explicit permission
31 | * Other conduct which could reasonably be considered inappropriate in a
32 | professional setting
33 |
34 | ## Our Responsibilities
35 |
36 | Project maintainers are responsible for clarifying the standards of acceptable
37 | behavior and are expected to take appropriate and fair corrective action in
38 | response to any instances of unacceptable behavior.
39 |
40 | Project maintainers have the right and responsibility to remove, edit, or
41 | reject comments, commits, code, wiki edits, issues, and other contributions
42 | that are not aligned to this Code of Conduct, or to ban temporarily or
43 | permanently any contributor for other behaviors that they deem inappropriate,
44 | threatening, offensive, or harmful.
45 |
46 | ## Scope
47 |
48 | This Code of Conduct applies both within project spaces and in public spaces
49 | when an individual is representing the project or its community. Examples of
50 | representing a project or community include using an official project e-mail
51 | address, posting via an official social media account, or acting as an appointed
52 | representative at an online or offline event. Representation of a project may be
53 | further defined and clarified by project maintainers.
54 |
55 | ## Enforcement
56 |
57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
58 | reported by contacting the project team at eero.vilpponen@gmail.com. All
59 | complaints will be reviewed and investigated and will result in a response that
60 | is deemed necessary and appropriate to the circumstances. The project team is
61 | obligated to maintain confidentiality with regard to the reporter of an incident.
62 | Further details of specific enforcement policies may be posted separately.
63 |
64 | Project maintainers who do not follow or enforce the Code of Conduct in good
65 | faith may face temporary or permanent repercussions as determined by other
66 | members of the project's leadership.
67 |
68 | ## Attribution
69 |
70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
72 |
73 | [homepage]: https://www.contributor-covenant.org
74 |
75 | For answers to common questions about this code of conduct, see
76 | https://www.contributor-covenant.org/faq
77 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to contribute
2 |
3 | * Post an issue about anything
4 | * After creating an issue you can also start to work on your own pull request. This could be a module or general improvements.
5 |
6 | # Coding conventions
7 |
8 | * Use basic PEP8 style for python.
9 | * Add type hints if you can. If you can't, it's also fine. Mypy and flake8 are used to validate code.
10 |
11 | # Getting started with the code
12 |
13 | Requirements:
14 | * Python 3.7 with pip
15 |
16 | Clone the repo
17 | ```
18 | git clone git@github.com:Eerovil/TrackLater.git
19 | cd TrackLater
20 | ```
21 | Create a virtualenv (recommended)
22 | ```
23 | mkvirtualenv tracklater -p python3.7 -a .
24 | ```
25 | Install requirements and launch in debug mode.
26 | ```
27 | pip install -r requirements.txt
28 | FLASK_APP=tracklater FLASK_DEBUG=1 python -m flask run
29 | ```
30 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | from ubuntu:20.04
2 |
3 | RUN apt-get update && apt-get -y install \
4 | software-properties-common \
5 | && rm -rf /var/lib/apt/lists/*
6 | RUN add-apt-repository ppa:deadsnakes/ppa
7 |
8 | RUN arch=$(arch | sed s/aarch64/arm64/ | sed s/x86_64/amd64/); \
9 | echo "arch: $arch"; \
10 | if [ "$arch" = "arm64" ]; then echo "libz1" > archpackages.txt; else echo "lib32z1-dev" > archpackages.txt; fi;
11 |
12 | RUN sed -i 's|http://archive.ubuntu.com|ftp://mirrors.nic.funet.fi|g' /etc/apt/sources.list
13 | RUN apt-get update && apt-get -y install \
14 | gettext \
15 | build-essential \
16 | git \
17 | libmemcached-dev \
18 | libmysqlclient-dev \
19 | subversion \
20 | libxml2 \
21 | libxml2-dev \
22 | libxslt1.1 \
23 | libxslt1-dev \
24 | libssl-dev \
25 | libffi-dev \
26 | vim \
27 | software-properties-common \
28 | python3-mysqldb \
29 | python3-crypto \
30 | python3-dev \
31 | python3-pip \
32 | python3 \
33 | $(cat archpackages.txt) \
34 | && rm -rf /var/lib/apt/lists/*
35 |
36 | RUN mkdir /code
37 | WORKDIR /code
38 | COPY ./requirements.txt /code/
39 | RUN pip3 install -r requirements.txt
40 |
41 | RUN mkdir /root/.ssh && ln -s /root/.ssh-mount/id_rsa /root/.ssh/id_rsa && chown -R root:root /root/.ssh
42 |
43 | CMD FLASK_APP=tracklater FLASK_DEBUG=1 flask run --host=0.0.0.0 --port=5000
44 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Eerovil
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include tracklater/static/*
2 | include tracklater/timemodules/fixture/*
3 | include tracklater/example_settings.json
4 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # TrackLater
2 |
3 | 
4 |
5 | Screenshot has green/red colors for my current two projects/clients. Commits, thyme entries and slack messages are automatically colored.
6 |
7 | Forgot to track your time for work? TrackLater helps you track time after-the-fact by combining clues and showing your day on a simple timeline view.
8 |
9 | The initial version supports fetching clues from
10 | * [Thyme](https://github.com/sourcegraph/thyme)
11 | * [ActivityWatch](https://github.com/ActivityWatch)
12 | * Git
13 | * Slack
14 |
15 | Time entries can be exported to
16 | * Toggl
17 | * Clockify
18 |
19 | Issues and projects/clients can be fetched from
20 | * Jira
21 | * Taiga
22 | * GitHub
23 | * Toggl (projects/clients)
24 |
25 | # Background
26 |
27 | Everyone who uses time-tracking for their work knows that it can be a real pain, especially if you're a forgetful person like me. I pretty much never remember to start my timers, and when I do, I for sure will not remember to turn them off.
28 |
29 | When working with multiple clients, it can be crucial (billing-wise) to track your time correctly and to be able to differentiate all tasks by client. For people that work 9-5 in-office for the same client and without need to track each task separately this app is probably overkill.
30 |
31 | With this in mind, I built a basic app to use Thyme for passive time-tracking, and Toggl-api for exporting. I quickly found that my workflow was substantially improved by only having to think about time-tracking 1-2 times per week. I've now used this app for about a year, building a new timemodule every now and then.
32 |
33 | TrackLater offers a basic set of features to help all time-trackers get their timesheets in order:
34 | * A timeline overview, which is usually missing from tracking software
35 | * Easily add time entries, with automatically detected projects and responsive UI
36 | * Get all your breadcrumbs, tracks, clues, footsteps in one place
37 |
38 | # Implementation notes
39 |
40 | Every module separates their issues, time-entries and projects by *group*. This makes inter-module communication simple: e.g. commits made in the git repository for *group* x will be attributed to the corresponding Toggl project for *group* x.
41 |
42 | *Groups* are arbitrary and decided by the user when creating their settings file. A good way to choose your amount of *group*s
43 | is to create a *group* for each client/work project.
44 |
45 | While all modules are optional, an ~important backbone for TimeLater is [thyme](https://github.com/sourcegraph/thyme)~.
46 | UPDATE: I couldn't get thyme working on my new mac, so the new "backbone" will be ActivityWatch.
47 | The thyme module assumes an implementation where every day is stored in a file named `YYYY-MM-DD.json`. It's recommended to set up an automatic thyme tracking script for this.
48 |
49 | I'm using a basic script to run thyme. It has evolved a bit after about a year of tracking: Sometimes thyme fails tracking and corrupts the whole file,
50 | so I definitely recommend using this script. https://gist.github.com/Eerovil/36d109d531659d24bfafea7111b12e90
51 |
52 | To run thyme automatically every 20 seconds you can add this to your crontab. Windows users can probably use services (don't quote me on this).
53 | ```
54 | * * * * * DISPLAY=:0 /home/eero/Documents/thyme/track-thyme-log.sh
55 | * * * * * ( sleep 20; DISPLAY=:0 /home/eero/Documents/thyme/track-thyme-log.sh )
56 | * * * * * ( sleep 40; DISPLAY=:0 /home/eero/Documents/thyme/track-thyme-log.sh )
57 | ```
58 |
59 | # Running
60 |
61 | Install inside a virtualenv from PyPI. After first run & page load the example configuration should
62 | be created at `~/.config/tracklater.json` (Windows and Mac configs found somewhere else, check [here](https://github.com/ActiveState/appdirs)).
63 |
64 | ```
65 | mkvirtualenv tracklater -p python3.7
66 | pip install tracklater
67 | tracklater
68 | ```
69 |
70 | or
71 |
72 | Clone the repository, install inside a virtualenv and run:
73 | ```
74 | git clone git@github.com:Eerovil/TrackLater.git
75 | cd TrackLater
76 | mkvirtualenv tracklater -p python3.7 -a .
77 | pip install .
78 | tracklater
79 | ```
80 |
81 | Additional example command to start the server. Must be run in the root directory.
82 | ```
83 | FLASK_APP=tracklater python -m flask run
84 | ```
85 |
86 | # Usage
87 |
88 | Select time entries from thyme and click export.
89 |
90 | You can also double click on the timeline to create entries. Edit by selecting, dragging etc.
91 |
92 | # Contributing
93 |
94 | Building and running the project is easy, as you can simply clone the repo and start making PRs.
95 |
96 | If your workflow is not exactly like mine and you need additional functionality, please create an issue and we can start working on supporting your required modules.
97 |
98 | Ideas for future support:
99 | * Jira time tracking
100 | * Maybe a Chrome page history parser?
101 |
102 | # Settings guide
103 |
104 | Create a file called `user_settings.py` to the root folder (containing `app.py`)
105 |
106 | To load test settings you can add `from test_settings import *` to the end of the file. This will use test data and no actual API calls will be made.
107 |
108 | Each module has their own settings dict, containing a settings dict for each group. There is also
109 | a `global` key for non-group specific settings.
110 |
111 | This example settings file contains two groups: `group1` and `group2`.
112 |
113 | In the example workers workflow, `group1`'s issues are fetched from Jira while `group2`'s issues are from Taiga.io,
114 | so you will find that the JIRA settings have no `group2` key and TAIGA settings has no `group1` key.
115 |
116 | Time tracking (for billing) is done through Toggl. Also, both groups happen to have their own workspaces on slack, and obviously their own git repositories.
117 |
118 | ```
119 |
120 | # edit to your liking and save as ~/.config/tracklater.json. Remove the comments
121 |
122 | {
123 | "TESTING": false,
124 | "ENABLED_MODULES": [
125 | "activitywatch",
126 | "thyme",
127 | "gitmodule",
128 | "toggl",
129 | "clockify",
130 | "taiga",
131 | "jira",
132 | "slack"
133 | ],
134 |
135 | "UI_SETTINGS": {
136 | "toggl": {
137 | "global": "#E01A22"
138 | },
139 | "thyme": {
140 | "global": "#1aef65"
141 | },
142 | "gitmodule": {
143 | "global": "#F44D27"
144 | },
145 | "slack": {
146 | "global": "#4A154B"
147 | }
148 | },
149 | "TOGGL": {
150 | "global": {
151 | "API_KEY": "your-api-key"
152 | },
153 | "group1": {
154 | "NAME": "First Group",
155 | "PROJECTS": {
156 | "Development": "default",
157 | "Bug fixing": "bug"
158 | }
159 | },
160 | "group2": {
161 | "NAME": "Second Group",
162 | "PROJECTS": {
163 | "Development": "default",
164 | "Bug fixing": "default"
165 | }
166 | }
167 | },
168 |
169 | "CLOCKIFY": {
170 | "global": {
171 | "API_KEY": "",
172 | "WORKSPACE": "workspace-id" # This is optional. (Only one workspace is supported!)
173 | },
174 | "group1": {
175 | "NAME": "Red",
176 | "PROJECTS": {
177 | "Red": "default"
178 | }
179 | },
180 | "group2": {
181 | "NAME": "Blue",
182 | "PROJECTS": {
183 | "Blue": "default"
184 | }
185 | }
186 | },
187 |
188 |
189 | "GIT": {
190 | "global": {
191 | # Only commits made by users with EMAILS will be shown
192 | "EMAILS": ["firstname.lastname@email.com"]
193 | },
194 | "group1": {
195 | # Full path to the git repo
196 | "REPOS": ["/full/path/to/group1/repo"]
197 | },
198 | "group2": {
199 | "REPOS": ["/full/path/to/group2/repo"]
200 | }
201 | },
202 |
203 | "JIRA": {
204 | "group1": {
205 | # Each group must have these settings
206 | "CREDENTIALS": ["username", "password"],
207 | "URL": "https://group1.atlassian.net",
208 | "PROJECT_KEY": "DEV"
209 | }
210 | },
211 |
212 | "TAIGA": {
213 | "global": {
214 | "CREDENTIALS": ["username", "password"]
215 | },
216 | "group2": {
217 | # project_slug can be found in the URL
218 | "project_slug": "username-group2"
219 | }
220 | },
221 |
222 | "THYME": {
223 | "global": {
224 | # Directory containing the json files generated by thyme
225 | "DIR": "/full/path/to/thyme/dir"
226 | }
227 | },
228 |
229 | "ACTIVITYWATCH": {
230 | "global": {
231 | "EVENTS_URL": "http://host.docker.internal:5600/api/0/buckets/aw-watcher-window_Eeros-MacBook-Air.local/events",
232 | "IDLE": 900,
233 | "CUTOFF": 300
234 | },
235 | },
236 |
237 | "GITHUB": {
238 | "global": {
239 | "TOKEN": "token" # needs permissions specified here: https://developer.github.com/v4/guides/forming-calls/#authenticating-with-graphql
240 | },
241 | "group1": {
242 | "repo": ["owner", "repo1"]
243 | },
244 | "group2": {
245 | "repo": ["owner", "repo2"]
246 | }
247 | },
248 |
249 | "SLACK": {
250 | # Each group should contain a workspace to match all messager to a group
251 | "global": {
252 | # Global catch-all workspace for all groups
253 | "API_KEY": "legacy-slack-api-key-global",
254 | "USER_ID": "your-user-id"
255 | },
256 | "group2": {
257 | # Messages in this workspace will be matched to group2
258 | "API_KEY": "legacy-slack-api-key-group2",
259 | "USER_ID": "your-user-id"
260 | }
261 | }
262 | }
263 |
264 | ```
265 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | ignore:
2 | - "tracklater/tests"
3 | - "tracklater/*_settings.py"
4 | coverage:
5 | status:
6 | project:
7 | default:
8 | target: 70%
9 | threshold: null
10 | patch: false
11 | changes: false
12 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3"
2 |
3 | services:
4 | tracklater:
5 | build:
6 | context: .
7 | dockerfile: Dockerfile
8 | ports:
9 | - "5001:5000"
10 | volumes:
11 | - "~/.ssh:/root/.ssh-mount"
12 | - "./:/code"
13 | - "~/.config/tracklater.json:/root/.config/tracklater.json"
14 | - "~/Documents:/home/eerovilpponen/Documents"
15 |
--------------------------------------------------------------------------------
/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 |
3 | ignore_missing_imports = True
4 |
5 |
6 | [mypy-tracklater.settings]
7 | ignore_errors = True
8 |
9 | [mypy-tracklater.user_settings]
10 | ignore_errors = True
11 |
12 | [mypy-tracklater.example_settings]
13 | ignore_errors = True
14 |
15 | [mypy-tracklater.test_settings]
16 | ignore_errors = True
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools>=42",
4 | "wheel"
5 | ]
6 | build-backend = "setuptools.build_meta"
7 |
--------------------------------------------------------------------------------
/requirements-testing.txt:
--------------------------------------------------------------------------------
1 | -r requirements.txt
2 |
3 | pytest
4 | requests_mock
5 | pdbpp
6 | pytest-cov
7 | codecov
8 | flake8
9 | types-pytz
10 | mypy
11 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | GitPython==2.1.11
3 | gitdb2==3.0.1
4 | python-dateutil==2.8.0
5 | slack_sdk
6 | pytz==2019.1
7 | flask-sqlalchemy
8 | requests==2.22.0
9 | appdirs==1.4.3
10 | multidict
11 | aiohttp
12 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | with open('requirements.txt') as f:
4 | requirements = f.read().splitlines()
5 |
6 | with open("README.md", "r") as fh:
7 | long_description = fh.read()
8 |
9 | setup(
10 | name='tracklater',
11 | version='1.6.1',
12 | description=('TrackLater helps you track time after-the-fact by combining clues and showing'
13 | 'your day on a simple timeline view.'),
14 | long_description=long_description,
15 | long_description_content_type="text/markdown",
16 | author='Eero Vilpponen',
17 | author_email='eero.vilpponen@gmail.com',
18 | packages=find_packages(),
19 | py_modules=['tracklater'],
20 | install_requires=requirements,
21 | python_requires='>=3.7.1',
22 | include_package_data=True,
23 | entry_points={
24 | 'console_scripts': ['tracklater = tracklater.__init__:run']
25 | },
26 | url="https://github.com/Eerovil/TrackLater",
27 | classifiers=[
28 | "Programming Language :: Python :: 3.7",
29 | "License :: OSI Approved :: MIT License",
30 | "Operating System :: OS Independent",
31 | ],
32 | )
33 |
--------------------------------------------------------------------------------
/tracklater/__init__.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 | import os
3 | from tracklater.database import db
4 | from tracklater.settings_utils import settings_wrapper as settings # noqa
5 |
6 | import logging
7 | logger = logging.getLogger(__name__)
8 |
9 |
10 | def create_app(name=__name__):
11 | app = Flask(name)
12 |
13 | DIRECTORY = os.path.dirname(os.path.realpath(__file__))
14 |
15 | app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}/database.db'.format(DIRECTORY)
16 | app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
17 |
18 | from tracklater.models import ApiCall, Project, Issue, Entry # noqa
19 |
20 | db.init_app(app)
21 | with app.app_context():
22 | db.create_all()
23 |
24 | from tracklater import views
25 |
26 | app.register_blueprint(views.bp)
27 |
28 | return app
29 |
30 |
31 | def run():
32 | app = create_app(name="tracklater")
33 | app.run(debug=True, port=5000, host="localhost")
34 |
--------------------------------------------------------------------------------
/tracklater/database.py:
--------------------------------------------------------------------------------
1 | from flask_sqlalchemy import SQLAlchemy
2 |
3 | db = SQLAlchemy()
4 |
--------------------------------------------------------------------------------
/tracklater/example_settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "TESTING": false,
3 | "ENABLED_MODULES": [
4 | "thyme",
5 | "gitmodule",
6 | "toggl",
7 | "taiga",
8 | "jira",
9 | "slack"
10 | ],
11 |
12 | "UI_SETTINGS": {
13 | "toggl": {
14 | "global": "#E01A22"
15 | },
16 | "thyme": {
17 | "global": "#1aef65"
18 | },
19 | "gitmodule": {
20 | "global": "#F44D27"
21 | },
22 | "slack": {
23 | "global": "#4A154B"
24 | }
25 | },
26 | "TOGGL": {
27 | "global": {
28 | "API_KEY": "your-api-key"
29 | },
30 | "group1": {
31 | "NAME": "First Group",
32 | "PROJECTS": {
33 | "Development": "default",
34 | "Bug fixing": "bug"
35 | }
36 | },
37 | "group2": {
38 | "NAME": "Second Group",
39 | "PROJECTS": {
40 | "Development": "default",
41 | "Bug fixing": "default"
42 | }
43 | }
44 | },
45 |
46 | "CLOCKIFY": {
47 | "global": {
48 | "API_KEY": ""
49 | },
50 | "group1": {
51 | "NAME": "Red",
52 | "PROJECTS": {
53 | "Red": "default"
54 | }
55 | },
56 | "group2": {
57 | "NAME": "Blue",
58 | "PROJECTS": {
59 | "Blue": "default"
60 | }
61 | }
62 | },
63 |
64 | "GIT": {
65 | "global": {
66 | "EMAILS": ["firstname.lastname@email.com"]
67 | },
68 | "group1": {
69 | "REPOS": ["/full/path/to/group1/repo"]
70 | },
71 | "group2": {
72 | "REPOS": ["/full/path/to/group2/repo"]
73 | }
74 | },
75 |
76 | "JIRA": {
77 | "group1": {
78 | "CREDENTIALS": ["username", "password"],
79 | "URL": "https://group1.atlassian.net",
80 | "PROJECT_KEY": "DEV"
81 | }
82 | },
83 |
84 | "TAIGA": {
85 | "global": {
86 | "CREDENTIALS": ["username", "password"]
87 | },
88 | "group2": {
89 | "project_slug": "username-group2"
90 | }
91 | },
92 |
93 | "THYME": {
94 | "global": {
95 | "DIR": "/full/path/to/thyme/dir"
96 | }
97 | },
98 |
99 | "SLACK": {
100 | "global": {
101 | "API_KEY": "legacy-slack-api-key-global",
102 | "USER_ID": "your-user-id"
103 | },
104 | "group2": {
105 | "API_KEY": "legacy-slack-api-key-group2",
106 | "USER_ID": "your-user-id"
107 | }
108 | }
109 | }
--------------------------------------------------------------------------------
/tracklater/main.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- encoding: utf-8 -*-
3 |
4 |
5 | import importlib
6 | from typing import Dict
7 | from types import ModuleType
8 |
9 | from tracklater import settings
10 | from tracklater.timemodules.interfaces import AbstractParser
11 | from tracklater.models import ApiCall, Entry, Issue, Project
12 | from tracklater.database import db
13 |
14 | import logging
15 | logger = logging.getLogger(__name__)
16 |
17 |
18 | def store_parser_to_database(parser, module_name, start_date, end_date):
19 | Entry.query.filter(
20 | Entry.module == module_name, Entry.start_time >= start_date,
21 | Entry.start_time <= end_date
22 | ).delete()
23 | for entry in parser.entries:
24 | entry.module = module_name
25 | db.session.merge(entry)
26 | for issue in parser.issues:
27 | issue.module = module_name
28 | db.session.merge(issue)
29 | for project in parser.projects:
30 | project.module = module_name
31 | db.session.merge(project)
32 | db.session.add(ApiCall(
33 | start_date=start_date,
34 | end_date=end_date,
35 | module=module_name
36 | ))
37 | db.session.commit()
38 |
39 |
40 | def set_parser_caching_data(parser, module_name):
41 | apicall = ApiCall.query.filter_by(module=module_name).order_by('created').first()
42 | if apicall:
43 | parser.set_database_values(
44 | start_date=apicall.start_date,
45 | end_date=apicall.end_date,
46 | issue_count=Issue.query.filter_by(module=module_name).count(),
47 | entry_count=Entry.query.filter_by(module=module_name).count(),
48 | project_count=Project.query.filter_by(module=module_name).count(),
49 | )
50 |
51 |
52 | class Parser(object):
53 | def __init__(self, start_date, end_date, modules=None) -> None:
54 | self.start_date = start_date
55 | self.end_date = end_date
56 | self.modules: Dict[str, AbstractParser] = {}
57 |
58 | for module_name in settings.ENABLED_MODULES:
59 | if modules and module_name not in modules:
60 | continue
61 | module: ModuleType = importlib.import_module(
62 | 'tracklater.timemodules.{}'.format(module_name)
63 | )
64 | if getattr(module, 'Parser', None) is None:
65 | logger.warning('Module %s has no Parser class', module_name)
66 | parser = module.Parser(self.start_date, self.end_date) # type: ignore
67 | self.modules[module_name] = parser
68 |
69 | def parse(self) -> None:
70 | parsers = []
71 | group_to_project = {project.group: project.pid for project in Project.query.all()}
72 | project_to_group = {project.pid: project.group for project in Project.query.all()}
73 | for module_name, parser in self.modules.items():
74 | set_parser_caching_data(parser, module_name)
75 | logger.warning("Parsing %s", module_name)
76 | parser.parse()
77 | parsers.append((module_name, parser))
78 | for entry in parser.entries:
79 | if not entry.project and entry.group:
80 | entry.project = group_to_project.get(entry.group, None)
81 | if not entry.group and entry.project:
82 | entry.group = project_to_group.get(str(entry.project), None)
83 | store_parser_to_database(self.modules[module_name], module_name,
84 | start_date=self.start_date, end_date=self.end_date)
85 | logger.warning("Task done %s", module_name)
86 |
--------------------------------------------------------------------------------
/tracklater/models.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy import Column, Integer, String, DateTime, Text, PickleType
2 | from datetime import datetime, timedelta
3 | from typing import Optional
4 |
5 | from tracklater.database import db
6 | from tracklater import settings
7 |
8 | import logging
9 | logger = logging.getLogger(__name__)
10 |
11 |
12 | class ApiCall(db.Model):
13 | pk: int = Column(Integer, primary_key=True)
14 | module: str = Column(String(50), nullable=False)
15 | created = Column(DateTime, default=datetime.utcnow)
16 | start_date: datetime = Column(DateTime)
17 | end_date: Optional[datetime] = Column(DateTime)
18 |
19 |
20 | class Project(db.Model):
21 | __tablename__ = 'projects'
22 | module: str = Column(String(50), primary_key=True)
23 | pid: str = Column(String(50), primary_key=True, nullable=True)
24 | group: str = Column(String(50))
25 | title: str = Column(String(50))
26 |
27 | def to_dict(self):
28 | return {
29 | "title": self.title,
30 | "id": self.pid,
31 | "group": self.group,
32 | }
33 |
34 |
35 | class Issue(db.Model):
36 | __tablename__ = 'issues'
37 | module: str = Column(String(50), primary_key=True)
38 | id: str = Column(String(50), primary_key=True, nullable=True)
39 | key: str = Column(String(50), primary_key=True, nullable=True)
40 | group: str = Column(String(50))
41 | title: str = Column(String(50))
42 | uuid: Optional[str] = Column(String(50))
43 | extra_data: dict = Column(PickleType) # For custom js
44 |
45 | def to_dict(self):
46 | return {
47 | "id": self.id,
48 | "title": self.title,
49 | "key": self.key,
50 | "group": self.group,
51 | "extra_data": self.extra_data,
52 | "uuid": self.uuid
53 | }
54 |
55 |
56 | class Entry(db.Model):
57 | __tablename__ = 'entries'
58 | module: str = Column(String(50), primary_key=True)
59 | id: str = Column(String(50), primary_key=True, nullable=True, unique=True)
60 | start_time: datetime = Column(DateTime, primary_key=True)
61 | group: Optional[str] = Column(String(50))
62 | end_time: Optional[datetime] = Column(DateTime)
63 | date_group: Optional[str] = Column(String(50))
64 | issue: Optional[str] = Column(String(50)) # Issue id
65 | project: Optional[str] = Column(String(50)) # Project id
66 | title: str = Column(String(255), default="") # Title to show in timeline
67 | text: str = Column(Text()) # Text to show in timeline hover
68 | extra_data: dict = Column(PickleType) # For custom js
69 |
70 | def __init__(self, **kwargs):
71 | super(Entry, self).__init__(**kwargs)
72 | # Calculate date_group immediately
73 | item_time = self.start_time
74 | _cutoff = getattr(settings, 'CUTOFF_HOUR', 3)
75 | if item_time.hour >= _cutoff:
76 | self.date_group = item_time.strftime('%Y-%m-%d')
77 | else:
78 | self.date_group = (item_time - timedelta(days=1)).strftime('%Y-%m-%d')
79 |
80 | @property
81 | def duration(self) -> int:
82 | if not self.end_time:
83 | return 0
84 | return int((self.end_time - self.start_time).total_seconds())
85 |
86 | def to_dict(self):
87 | return {
88 | "start_time": self.start_time,
89 | "end_time": self.end_time,
90 | "id": self.id,
91 | "date_group": self.date_group,
92 | "issue": self.issue,
93 | "project": self.project,
94 | "title": self.title,
95 | "text": self.text,
96 | "extra_data": self.extra_data,
97 | "duration": self.duration,
98 | "group": self.group
99 | }
100 |
--------------------------------------------------------------------------------
/tracklater/settings_utils.py:
--------------------------------------------------------------------------------
1 | import json
2 | import appdirs
3 | import os
4 | import shutil
5 | from typing import Any
6 |
7 | import logging
8 | logger = logging.getLogger(__name__)
9 |
10 | DIRECTORY = os.path.dirname(os.path.realpath(__file__))
11 | user_config_path = os.path.join(appdirs.user_config_dir(), 'tracklater.json')
12 |
13 |
14 | class Dummy(object):
15 | pass
16 |
17 |
18 | settings_wrapper: Any = Dummy()
19 |
20 | if not os.path.exists(user_config_path):
21 | try:
22 | os.mkdir(appdirs.user_config_dir())
23 | except FileExistsError:
24 | pass
25 | shutil.copy(os.path.join(DIRECTORY, 'example_settings.json'), user_config_path)
26 | logger.error("No user settings file! Modify the example settings created (path: %s)",
27 | user_config_path)
28 |
29 | try:
30 | with open(user_config_path, 'rb') as f:
31 | for key, value in json.load(f).items():
32 | setattr(settings_wrapper, key, value)
33 | except json.JSONDecodeError as e:
34 | logger.exception("Error reading settings file: %s", e)
35 |
36 |
37 | def helper(module, key, group='global', default=None):
38 | # Try given group first
39 | if group in getattr(settings_wrapper, module):
40 | _tmp_dict = getattr(settings_wrapper, module)[group]
41 | if key in _tmp_dict:
42 | return _tmp_dict[key]
43 | # Try with 'global' group
44 | if 'global' in getattr(settings_wrapper, module):
45 | _tmp_dict = getattr(settings_wrapper, module)['global']
46 | if key in _tmp_dict:
47 | return _tmp_dict[key]
48 | if default:
49 | return default
50 |
51 | raise KeyError('No setting "{}" for module {} found'.format(key, module))
52 |
53 |
54 | settings_wrapper.helper = helper
55 |
--------------------------------------------------------------------------------
/tracklater/static/daytimeline.vue.js:
--------------------------------------------------------------------------------
1 | var daytimeline = Vue.component("daytimeline", {
2 | template: `
3 |
4 |
11 |
12 |
13 | `,
14 | props: ["entries"],
15 | data() {
16 | return {
17 | items: [],
18 | }
19 | },
20 | mounted() {
21 | this.items = this.entriesToItems(this.entries);
22 | },
23 | methods: {
24 | myChangedCallback(arg1, arg2, arg3) {
25 | console.log(arg1, arg2, arg3)
26 | },
27 | select(props) {
28 | const entry = this.entries[props.items[0]] || null;
29 | if (entry != null) {
30 | this.$store.commit('setInput', {title: entry.title, issue: entry.issue || this.findIssue(entry.title)})
31 | this.$store.commit('setSelectedEntry', entry);
32 | }
33 | },
34 | findIssue(title) {
35 | return this.$store.getters.findIssue(title)
36 | },
37 | onMove: function(item, callback) {
38 | if (this.modules[item.group].capabilities.includes('updateentry')) {
39 | let entry = this.entries[item.id];
40 | if (new Date(entry.start_time).getTime() === item.start.getTime() &&
41 | new Date(entry.end_time).getTime() === item.end.getTime()) {
42 | return;
43 | }
44 | entry.start_time = item.start
45 | entry.end_time = item.end
46 | this.$emit('updateEntry', entry)
47 | }
48 | },
49 | onRemove: function(item, callback) {
50 | if (this.modules[item.group].capabilities.includes('deleteentry')) {
51 | let entry = this.entries[item.id];
52 | this.$emit('deleteEntry', entry)
53 | }
54 | },
55 | onAdd: function(item, callback) {
56 | if (this.modules[item.group].capabilities.includes('addentry')) {
57 | let timeSnippet = this.generateTimeSnippet(item.start, item.group);
58 | if (!timeSnippet) {
59 | return;
60 | }
61 | let entry = {
62 | start_time: timeSnippet.start_time,
63 | end_time: timeSnippet.end_time,
64 | title: "Unnamed Entry",
65 | module: item.group,
66 | project: ''
67 | }
68 | let detectedIssue = this.detectIssue(timeSnippet);
69 | if (detectedIssue) {
70 | entry.title = detectedIssue.message;
71 | entry.project = detectedIssue.project;
72 | }
73 | this.$emit('addEntry', entry)
74 | }
75 | },
76 | entriesToItems(entries) {
77 | console.log("entriesToItems called for " + entries[0].start_time)
78 | return entries.map((entry, i) => {
79 | let row = {
80 | id: i,
81 | group: entry.module,
82 | start: new Date(entry.start_time),
83 | className: entry.module,
84 | content: entry.title,
85 | title: (entry.text || "").replace(/(?:\r\n|\r|\n)/g, '
'),
86 | editable: {
87 | updateTime: this.modules[entry.module].capabilities.includes('updateentry'),
88 | remove: this.modules[entry.module].capabilities.includes('deleteentry')
89 | },
90 | }
91 | if (entry.id && entry.id.startsWith("placeholderid")) {
92 | row.editable = false
93 | row.selectable = false;
94 | }
95 | let colorObj = this.modules[entry.module].color;
96 | color = colorObj[entry.group] || colorObj.global;
97 | if (entry.end_time != undefined) {
98 | row.end = new Date(entry.end_time);
99 | if (color != null) {
100 | row.style = `background-color: ${color}`
101 | }
102 | } else {
103 | row.type = 'point'
104 | if (color != null) {
105 | row.className += ` point-color-${color}`
106 | }
107 | }
108 | return row
109 | });
110 | },
111 | generateTimeSnippet(middle_time, activeModule) {
112 | // Go backwards and forwards unit "not much" is happening, and return the
113 | // start and end time. If nothing is happening, return an hour.
114 | const cutoffSeconds = 500;
115 | let ret = {
116 | start_time: middle_time.addHours(-1),
117 | end_time: middle_time.addHours(1),
118 | }
119 | let spanningEntries = [];
120 | let sorted = this.entries.slice().filter(i => this.timeEntryModules.includes(i.module)).sort((a, b) => {
121 | if (new Date(a.start_time) > new Date(b.start_time)) {
122 | return 1;
123 | }
124 | if (new Date(a.start_time) < new Date(b.start_time)) {
125 | return -1;
126 | }
127 | return 0;
128 | }).map(i => {
129 | i.start_time = new Date(i.start_time)
130 | i.end_time = !!i.end_time ? (new Date(i.end_time)) : null
131 | if (i.end_time) {
132 | spanningEntries.push(i)
133 | }
134 | return i
135 | })
136 | // Filter out dot-entries that overlap with spanning entries.
137 | sorted = sorted.filter(i => {
138 | if (!!i.end_time) {
139 | return true
140 | }
141 | for (let entry of spanningEntries) {
142 | // Could do a zip-style filter here for performance... But this is good enough
143 | if (entry.start_time <= i.start_time && entry.end_time >= i.start_time) {
144 | return false
145 | }
146 | }
147 | return true
148 | })
149 | function parseRet(_ret) {
150 | // Update ret to fix overlapping issues
151 | for (el of sorted) {
152 | if (el.module !== activeModule) {
153 | continue;
154 | }
155 | // If any toggl entry starts or ends between ret times, change ret.
156 | if (el.start_time < _ret.end_time && el.start_time > _ret.start_time) {
157 | _ret.end_time = el.start_time;
158 | }
159 | if (el.end_time < _ret.end_time && el.end_time > _ret.start_time) {
160 | _ret.start_time = el.end_time;
161 | }
162 | }
163 | if (_ret.start_time >= _ret.end_time) {
164 | return
165 | }
166 | return _ret
167 | }
168 | console.log('sorted: ', sorted);
169 | console.log('middle_time: ', middle_time);
170 | if (sorted.length == 0) {
171 | return parseRet(ret);
172 | }
173 | // Special case: first time entry is after middle_time. Not good
174 | if (sorted[0].start_time > middle_time) {
175 | return parseRet(ret);
176 | }
177 | // Special case: last time entry is before middle_time. Not good
178 | if (sorted[sorted.length - 1].start_time < middle_time) {
179 | return parseRet(ret);
180 | }
181 | // Find the middle time entry
182 | let middleIndex;
183 | for (let i in sorted) {
184 | if ((sorted[i].end_time || sorted[i].start_time).getTime() > (middle_time.getTime() - (cutoffSeconds * 1000))) {
185 | middleIndex = i;
186 | break;
187 | }
188 | }
189 | // Middle item is too far
190 | if (sorted[middleIndex].start_time.getTime() - middle_time.getTime() > cutoffSeconds * 1000) {
191 | return parseRet(ret);
192 | }
193 | console.log('middleIndex: ', middleIndex);
194 | if (!middleIndex) {
195 | return parseRet(ret);
196 | }
197 | // Go back
198 | let prevTime = sorted[middleIndex].start_time
199 | for (let i=middleIndex; i>=0; i--) {
200 | ret.start_time = prevTime.addHours(-0.7);
201 | const indexTime = sorted[i].end_time || sorted[i].start_time
202 | if (prevTime.getTime() - indexTime.getTime() > cutoffSeconds * 1000) {
203 | break;
204 | }
205 | if (sorted[i].module == activeModule) {
206 | // We reached another toggl entry! Return its end_time here for no overlap
207 | ret.start_time = indexTime
208 | break;
209 | }
210 | prevTime = sorted[i].start_time
211 | if (i == 0) {
212 | ret.start_time = prevTime.addHours(-0.7);
213 | }
214 | }
215 | // Go forward
216 | prevTime = sorted[middleIndex].end_time || sorted[middleIndex].start_time
217 | for (let i=middleIndex; i cutoffSeconds * 1000) {
220 | break;
221 | }
222 | if (sorted[i].module == activeModule) {
223 | // We reached another toggl entry! Return its start_time here for no overlap
224 | ret.end_time = sorted[i].start_time
225 | break;
226 | }
227 | prevTime = sorted[i].end_time || sorted[i].start_time
228 | if (i == (sorted.length - 1)) {
229 | ret.end_time = prevTime.addHours(0.7);
230 | }
231 | }
232 | return parseRet(ret);
233 | },
234 | detectIssue(timeSnippet) {
235 | const entries = this.entries.slice()
236 | .filter(i => ["gitmodule"]
237 | .includes(i.module))
238 | .filter(i => (new Date(i.start_time) < timeSnippet.end_time && new Date(i.start_time) > timeSnippet.start_time))
239 | .sort((a, b) => {
240 | if (new Date(a.start_time) > new Date(b.start_time)) {
241 | return 1;
242 | }
243 | if (new Date(a.start_time) < new Date(b.start_time)) {
244 | return -1;
245 | }
246 | return 0;
247 | })
248 | .reverse()
249 | if (entries.length == 0) {
250 | return null
251 | }
252 | let ret = {
253 | group: entries[0].group
254 | }
255 | let issueFound = false;
256 | entries.forEach(entry => {
257 | if (issueFound) {
258 | return;
259 | }
260 | // Try to parse issue
261 | let issueMatch = entry.text.match(/^\w* - ([^ ]+)(.*)/)
262 | if (issueMatch) {
263 | let issueSlug = issueMatch[1]
264 | let issue = this.$store.getters.findIssueByKey(issueSlug);
265 | if (issue) {
266 | ret.group = issue.group;
267 | ret.message = issue.key + " " + issue.title;
268 | issueFound = true;
269 | } else {
270 | ret.message = issueMatch[1] + issueMatch[2];
271 | ret.group = entry.group;
272 | }
273 | }
274 | });
275 | ret.project = this.$store.getters.getProjectId(ret.group);
276 | return ret
277 | },
278 | },
279 | watch: {
280 | entries(entries, oldEntries) {
281 | if (_.isEqual(entries, oldEntries)) {
282 | return
283 | }
284 | this.items = this.entriesToItems(entries);
285 | }
286 | },
287 | computed: {
288 | selection() {
289 | const selectedEntry = this.$store.state.selectedEntry;
290 | if (selectedEntry != null && selectedEntry.date_group === (this.entries[0] || {}).date_group) {
291 | for (let i=0; i this.modules[key].capabilities.includes("entries"));
305 | },
306 | groups() {
307 | ret = []
308 | for (let module_name in this.modules) {
309 | if (this.modules[module_name].capabilities.includes('entries')){
310 | ret.push({
311 | id: module_name,
312 | content: module_name,
313 | })
314 | }
315 | }
316 | return ret
317 | },
318 | options() {
319 | self = this
320 | let firstDate = new Date(this.entries[0].date_group);
321 | const day_start = firstDate.setHours(6, 0, 0, 0);
322 | const day_end = firstDate.setHours(26, 0, 0, 0);
323 |
324 | return {
325 | start: day_start,
326 | end: day_end,
327 | editable: true,
328 | zoomable: (screen.width < 960),
329 | showCurrentTime: false,
330 | horizontalScroll: false,
331 | moveable: true,
332 | margin: {
333 | item: 0
334 | },
335 | snap: null,
336 | onMove: self.onMove,
337 | onRemove: self.onRemove,
338 | onAdd:self.onAdd,
339 | tooltip: {
340 | delay: 1
341 | }
342 | }
343 | }
344 | },
345 | });
--------------------------------------------------------------------------------
/tracklater/static/home.vue.js:
--------------------------------------------------------------------------------
1 | var home = Vue.component("home", {
2 | template: `
3 |
25 | `,
26 | data() {
27 | return {
28 | toolbarHeight: '110px',
29 | toolbarSepHeight: '110px',
30 | }
31 | },
32 | computed: {
33 | modules() {
34 | return this.$store.state.modules;
35 | },
36 | entriesByDategroup() {
37 | // Return an array of objects, containing {dateGroup, entries}
38 | let keys = new Set()
39 | let ret;
40 | try {
41 | for (module_name in this.modules) {
42 | let entries = this.modules[module_name].entries || []
43 | for (let i=0; i a > b ? -1 : 1)) {
50 | ret.push({dateGroup, entries: []});
51 | }
52 | for (module_name in this.modules) {
53 | let entries = this.modules[module_name].entries || []
54 | for (let i=0; i item.dateGroup === entries[i].date_group);
57 | ret[index].entries.push(entries[i]);
58 | }
59 | }
60 | } catch (e) {
61 | console.log(e)
62 | }
63 | return ret;
64 | },
65 | debounceUpdateWeek() {
66 | return _.debounce(this.updateWeek, 500)
67 | }
68 | },
69 | methods: {
70 | fetchModule(module_name, parse) {
71 | if (parse == undefined) {
72 | parse = 1;
73 | }
74 | console.log(`Fetching ${module_name}`)
75 | this.$store.commit('setLoading', {module_name, loading: true});
76 |
77 | axios.get("fetchdata", {params: {
78 | parse: parse,
79 | keys: [module_name],
80 | from: this.$store.getters.getFrom,
81 | to: this.$store.getters.getTo,
82 | }}).then(response => {
83 | console.log(response)
84 | this.$store.commit('updateModules', response.data);
85 | this.$store.commit('setLoading', {module_name, loading: false});
86 | }).catch(() => {
87 | this.$store.commit('setLoading', {module_name, loading: false});
88 | })
89 | },
90 | parseTime(time) {
91 | if (typeof time === "string") {
92 | return new Date(time)
93 | }
94 | return time
95 | },
96 | updateEntry(entry) {
97 | this.$store.commit('setLoading', {module_name: 'updateentry', loading: true});
98 | updated_entries = this.$store.state.modules[entry.module].entries.filter((_entry) => _entry.id !== entry.id);
99 | let placeholderid;
100 | if (!entry.id) {
101 | placeholderid = "placeholderid" + Math.random();
102 | }
103 | updated_entries.push({
104 | id: entry.id || placeholderid,
105 | start_time: this.parseTime(entry.start_time),
106 | end_time: this.parseTime(entry.end_time),
107 | title: entry.title || "Placeholder",
108 | module: entry.module,
109 | date_group: this.parseTime(entry.start_time).toISOString().split('T')[0],
110 | })
111 | this.$store.commit('setEntries', {module_name: entry.module, entries: updated_entries});
112 | axios.post("updateentry", {
113 | 'module': entry.module,
114 | 'entry_id': entry.id,
115 | 'start_time': this.parseTime(entry.start_time).getTime(),
116 | 'end_time': this.parseTime(entry.end_time).getTime(),
117 | 'title': entry.title || "Placeholder",
118 | 'issue_id': (entry.issue || {}).id,
119 | 'project_id': entry.project || "0",
120 | 'extra_data': entry.extra_data,
121 | 'text': entry.text,
122 | }).then(response => {
123 | console.log(response)
124 | updated_entries = this.$store.state.modules[entry.module].entries.filter((_entry) => _entry.id !== entry.id && _entry.id !== placeholderid);
125 | updated_entries.push(response.data)
126 | this.$store.commit('setInput', {title: response.data.title, issue: null})
127 | this.$store.commit('setEntries', {module_name: entry.module, entries: updated_entries});
128 | this.$store.commit('setLoading', {module_name: 'updateentry', loading: false});
129 | }).catch(_handleFailure)
130 | },
131 | deleteEntry(entry) {
132 | this.$store.commit('setLoading', {module_name: 'deleteentry', loading: true});
133 | updated_entries = this.$store.state.modules[entry.module].entries.filter((_entry) => _entry.id !== entry.id);
134 | this.$store.commit('setEntries', {module_name: entry.module, entries: updated_entries});
135 | this.$store.commit('setSelectedEntry', null)
136 | axios.post('deleteentry', {
137 | 'module': entry.module,
138 | 'entry_id': entry.id
139 | }).then((response) => {
140 | console.log("deleted entry " + entry.id + ": " + response.data);
141 | updated_entries = this.$store.state.modules[entry.module].entries.filter((_entry) => _entry.id !== entry.id);
142 | this.$store.commit('setInput', {title: null, issue: null})
143 | this.$store.commit('setEntries', {module_name: entry.module, entries: updated_entries});
144 | this.$store.commit('setLoading', {module_name: 'deleteentry', loading: false});
145 | }).catch(_handleFailure)
146 | },
147 | setToolbarHeight(event) {
148 | this.toolbarHeight = `${event.height}px`;
149 | if (event.separator) {
150 | this.toolbarSepHeight = `${event.height}px`;
151 | }
152 | },
153 | updateWeek() {
154 | for (el of this.$refs.daytimelines) {
155 | el.$refs.timeline.unloadTimeline();
156 | }
157 | this.$store.commit('setSelectedEntry', null)
158 | this.$store.commit('setInput', {title: null, issue: null})
159 | this.fetchModule("all", 0)
160 | },
161 | },
162 | watch: {
163 | "$store.state.currentWeek"() {
164 | if (!this.$refs.daytimelines) {
165 | return;
166 | }
167 | this.debounceUpdateWeek();
168 | }
169 | },
170 | mounted() {
171 | axios.get("listmodules").then(response => {
172 | console.log(response)
173 | this.$store.commit('updateModules', response.data);
174 | })
175 | axios.get("getsettings").then(response => {
176 | console.log(response)
177 | this.$store.commit('setSettings', response.data);
178 | })
179 | this.$store.commit('setLoading', {module_name: 'fetchdata', loading: true});
180 | axios.get("fetchdata", {params: {
181 | parse: "0",
182 | from: this.$store.getters.getFrom,
183 | to: this.$store.getters.getTo,
184 | }}).then(response => {
185 | console.log("fetchdata (parse: 0)", response)
186 | this.$store.commit('updateModules', response.data);
187 | this.$store.commit('setLoading', {module_name: 'fetchdata', loading: false});
188 | }).catch(() => {
189 | this.$store.commit('setLoading', {module_name: 'fetchdata', loading: false});
190 | })
191 | }
192 | });
--------------------------------------------------------------------------------
/tracklater/static/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | VUE SPA
8 |
9 |
10 |
11 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
166 |
167 |
168 |
169 |
170 |
--------------------------------------------------------------------------------
/tracklater/static/timeline.vue.js:
--------------------------------------------------------------------------------
1 | var vuetimeline = Vue.component("vuetimeline", {
2 | template: `
3 |
4 |
5 | `,
6 | props: {
7 | groups: {
8 | type: [Array, DataView],
9 | default: () => []
10 | },
11 | items: {
12 | type: [Array, DataView],
13 | default: () => []
14 | },
15 | events: {
16 | type: [Array, DataView],
17 | default: () => []
18 | },
19 | options: {
20 | type: Object
21 | },
22 | selection: {
23 | type: Number,
24 | default: () => -1
25 | }
26 | },
27 | watch: {
28 | items(newItems) {
29 | this.parsedItems.clear();
30 | this.parsedItems.add(newItems);
31 | if (!this.timeline) {
32 | return;
33 | }
34 | this.timeline.setSelection(this.selection);
35 | },
36 | groups(newGroups) {
37 | this.parsedGroups.clear();
38 | this.parsedGroups.add(newGroups);
39 | },
40 | },
41 | data() {
42 | return {
43 | parsedItems: new vis.DataSet([]),
44 | parsedGroups: new vis.DataSet([]),
45 | timeline: null,
46 | }
47 | },
48 | methods: {
49 | loadTimeline() {
50 | if (this.timeline == null) {
51 | const container = this.$refs.visualization;
52 | this.timeline = new vis.Timeline(container, this.parsedItems, this.parsedGroups, this.options);
53 | this.events.forEach(eventName =>
54 | this.timeline.on(eventName, props => this.$emit(eventName.replace(/([a-z0-9])([A-Z])/g, '$1-$2').toLowerCase(), props))
55 | );
56 | }
57 | },
58 | unloadTimeline() {
59 | if (this.timeline != null) {
60 | this.timeline.destroy();
61 | this.timeline = null;
62 | }
63 | }
64 | },
65 | mounted() {
66 | this.parsedItems.add(this.items);
67 | this.parsedGroups.add(this.groups);
68 | this.loadTimeline()
69 | }
70 | });
71 |
--------------------------------------------------------------------------------
/tracklater/static/toolbar.vue.js:
--------------------------------------------------------------------------------
1 | var toolbar = Vue.component("toolbar", {
2 | template: `
3 |
4 |
5 |
9 | Fetch all
12 | {{ module }}
17 |
18 | <
19 | {{ currentWeek }}
20 | >
21 |
22 |
23 |
24 |
29 |
30 |
31 |
32 |
37 |
38 |
39 |
40 |
47 |
48 |
49 |
50 |
54 | done
55 |
56 |
57 |
58 |
59 |
60 | `,
61 | props: [],
62 | computed: {
63 | currentWeek: {
64 | get() {
65 | return this.$store.state.currentWeek
66 | },
67 | set(v) {
68 | this.$store.commit('setCurrentWeek', v);
69 | }
70 | },
71 | entryTitle: {
72 | get() {
73 | return this.$store.state.inputTitle
74 | },
75 | set(v) {
76 | this.$store.commit('setInput', {title: v, issue: this.findIssue(v)});
77 | if (this.$store.state.inputIssue !== null) {
78 | this.selectedProject = (this.getProject(this.$store.state.inputIssue) || {}).id;
79 | } else {
80 | this.selectedProject = (this.guessProject(this.entryTitle) || {}).id;
81 | }
82 | }
83 | },
84 | somethingLoading() {
85 | for (key in this.loading) {
86 | if (this.loading[key] === true) {
87 | return true;
88 | }
89 | }
90 | return false;
91 | },
92 | projects() {
93 | if (this.selectedModule == null) {
94 | return [];
95 | }
96 | return this.modules[this.selectedModule].projects;
97 | },
98 | selectedEntry() {
99 | let entry = this.$store.state.selectedEntry;
100 | return entry;
101 | },
102 | modules() {
103 | return this.$store.state.modules;
104 | },
105 | loading() {
106 | return this.$store.state.loading;
107 | },
108 | allIssues() {
109 | let ret = this.latestIssues.slice();
110 | for (let module_name in this.modules) {
111 | const _issues = this.modules[module_name].issues || [];
112 | for (let i=0; i<_issues.length; i++) {
113 | const newIssue = `${_issues[i].key} ${_issues[i].title}`
114 | if (ret.includes(newIssue)) {
115 | continue
116 | }
117 | ret.push(newIssue);
118 | }
119 | }
120 | return ret;
121 | },
122 | selectableModules() {
123 | let ret = [];
124 | for (let module_name in this.modules) {
125 | if ((this.modules[module_name].capabilities || []).includes('updateentry')) {
126 | ret.push(module_name);
127 | }
128 | }
129 | return ret;
130 | }
131 | },
132 | watch: {
133 | selectedEntry(entry, oldEntry) {
134 | this.selectedProject = (entry || {}).project;
135 | this.selectedModule = (entry || {}).module;
136 | },
137 | showButtons() {
138 | setTimeout(()=>{
139 | this.$emit('setToolbarHeight', {height: this.$refs.layout.clientHeight});
140 | }, 50);
141 | }
142 | },
143 | methods: {
144 | findIssue(title) {
145 | return this.$store.getters.findIssue(title)
146 | },
147 | fetchModule(module_name) {
148 | this.$emit('fetchModule', module_name)
149 | },
150 | fetchAllModules() {
151 | for (let module_name in this.modules) {
152 | this.$emit('fetchModule', module_name)
153 | }
154 | },
155 | getProject(issue) {
156 | // Get a matching project for issue
157 | for (const project of this.projects) {
158 | if (project.group === issue.group) {
159 | return project
160 | }
161 | }
162 | return null
163 | },
164 | guessProject(title) {
165 | // Guess project based on the title. return null if no guess
166 | for (const project of this.projects) {
167 | if (title.indexOf(project.group) > -1) {
168 | return project
169 | }
170 | }
171 | return null
172 | },
173 | exportEntry() {
174 | if (this.selectedEntry == null) {
175 | return;
176 | }
177 | this.latestIssues = this.latestIssues.filter(item => item !== this.entryTitle)
178 | this.latestIssues.unshift(this.entryTitle)
179 | this.$emit('exportEntry', Object.assign(this.selectedEntry, {
180 | issue: this.$store.state.inputIssue,
181 | title: this.entryTitle,
182 | module: this.selectedModule,
183 | project: this.selectedProject
184 | }));
185 | },
186 | onScroll() {
187 | const currentScrollPosition = window.pageYOffset || document.documentElement.scrollTop
188 | if (currentScrollPosition < 0) {
189 | return
190 | }
191 | this.showButtons = (currentScrollPosition < 2);
192 | },
193 | moveWeek(count) {
194 | let now;
195 | if (!this.currentWeek) {
196 | now = new Date();
197 | } else {
198 | now = new Date(Date.parse(this.currentWeek))
199 | }
200 | // I want monday as first day.
201 | let dayOfWeek = now.getDay() - 1;
202 | if (dayOfWeek == -1) {
203 | dayOfWeek = 6
204 | }
205 | let newTime = new Date();
206 | newTime.setTime((now.getTime() - ((24*60*60*1000) * (dayOfWeek + (count * -1) * 7))));
207 | this.currentWeek = newTime.toISOString().split('T')[0]
208 | }
209 | },
210 | data() {
211 | return {
212 | selectedModule: null,
213 | selectedProject: null,
214 | showButtons: true,
215 | latestIssues: [],
216 | }
217 | },
218 | mounted() {
219 | // Tried to use this.$nextTick here, but still didn't get the full height.
220 | // Terrible workaround is setTimeout...
221 | setTimeout(()=>{
222 | this.$emit('setToolbarHeight', {height: this.$refs.layout.clientHeight, separator: true});
223 | }, 500);
224 |
225 | window.addEventListener('scroll', this.onScroll)
226 | this.moveWeek(0)
227 | },
228 | beforeDestroy () {
229 | window.removeEventListener('scroll', this.onScroll)
230 | }
231 | });
--------------------------------------------------------------------------------
/tracklater/test_settings.py:
--------------------------------------------------------------------------------
1 |
2 | TESTING = True
3 |
4 | ENABLED_MODULES = ['jira', 'gitmodule', 'slack', 'taiga', 'toggl', 'thyme']
5 |
6 | from datetime import datetime, timedelta
7 |
8 | OVERRIDE_START = datetime.fromtimestamp(
9 | 1234560
10 | ) - timedelta(days=4)
11 | OVERRIDE_END = datetime.fromtimestamp(
12 | 1234560
13 | ) + timedelta(days=4)
14 |
15 | UI_SETTINGS = {
16 | 'toggl': {
17 | 'global': '#E01A22'
18 | },
19 | 'thyme': {
20 | 'global': '#1aef65'
21 | },
22 | 'gitmodule': {
23 | 'global': '#F44D27'
24 | },
25 | 'slack': {
26 | 'global': '#4A154B'
27 | }
28 | }
29 |
30 | # Test settings for Jira
31 | JIRA = {
32 | 'group1': {
33 | 'CREDENTIALS': ('', ''),
34 | 'URL': 'mock://jira.test',
35 | 'PROJECT_KEY': 'TEST',
36 | }
37 | }
38 |
39 | # Test settings for Git
40 | GIT = {
41 | 'global': {
42 | 'EMAILS': ['test.person@email.com'],
43 | },
44 | 'group1': {
45 | 'REPOS': ['path1', 'path2']
46 | },
47 | 'group2': {
48 | 'REPOS': ['path3']
49 | },
50 | }
51 |
52 | # Test settings for Slack
53 | SLACK = {
54 | 'global': {
55 | 'API_KEY': '',
56 | 'USER_ID': '1',
57 | }
58 | }
59 |
60 | # Test settings for Slack
61 | TAIGA = {
62 | 'global': {
63 | 'CREDENTIALS': 'test'
64 | },
65 | 'group2': {
66 | 'project_slug': 'test'
67 | }
68 | }
69 |
70 | TOGGL = {
71 | 'global': {
72 | 'API_KEY': 'your-api-key'
73 | },
74 | 'group1': {
75 | 'NAME': 'First Client',
76 | 'PROJECTS': {
77 | 'Development': 'default',
78 | 'Bug fixing': 'bug',
79 | }
80 | },
81 | 'group2': {
82 | 'NAME': 'Second Client',
83 | 'PROJECTS': {
84 | 'Development': 'default',
85 | 'Bug fixing': 'default',
86 | }
87 | }
88 | }
89 |
90 | THYME = { # type: ignore
91 | 'global': {
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/tracklater/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Eerovil/TrackLater/e19f3d3081e8329eb440967f122b4a3eeff8a18d/tracklater/tests/__init__.py
--------------------------------------------------------------------------------
/tracklater/tests/conftest.py:
--------------------------------------------------------------------------------
1 |
2 | import pytest
3 | from tracklater import settings
4 | from tracklater import test_settings
5 |
6 |
7 | @pytest.fixture(autouse=True)
8 | def mock_settings(monkeypatch):
9 | """
10 | Replace settings completely with test_settings
11 | """
12 | for module_setting in [item for item in dir(settings) if not item.startswith("__")]:
13 | if module_setting == 'helper':
14 | continue
15 | monkeypatch.setattr(
16 | 'tracklater.settings.{}'.format(module_setting),
17 | getattr(test_settings, module_setting, {})
18 | )
19 |
20 |
21 | @pytest.fixture()
22 | def db():
23 | from tracklater.database import db
24 | return db
25 |
--------------------------------------------------------------------------------
/tracklater/tests/test_activitywatch.py:
--------------------------------------------------------------------------------
1 | from tracklater.timemodules.activitywatch import Parser
2 |
3 | import pytest
4 | import os
5 | from datetime import datetime, timedelta
6 |
7 | DIRECTORY = os.path.dirname(os.path.realpath(__file__))
8 |
9 |
10 | @pytest.fixture()
11 | def parser():
12 | _parser = Parser(datetime.utcnow() - timedelta(days=7), datetime.utcnow())
13 | return _parser
14 |
15 |
16 | def test_get_entries(parser):
17 | """
18 | """
19 | data = parser.get_entries()
20 | assert len(data) == 1
21 |
--------------------------------------------------------------------------------
/tracklater/tests/test_app.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import os
3 |
4 | from typing import Any
5 | from datetime import datetime, timedelta
6 |
7 | from tracklater import create_app
8 | from tracklater.models import Entry, Issue, Project
9 |
10 | DIRECTORY = os.path.dirname(os.path.realpath(__file__))
11 |
12 | app = create_app()
13 |
14 |
15 | @pytest.fixture
16 | def client(db):
17 | app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}/database_testing.db'.format(DIRECTORY)
18 | client = app.test_client()
19 | db.init_app(app)
20 | with app.app_context():
21 | db.create_all()
22 |
23 | yield client
24 | os.remove("{}/database_testing.db".format(DIRECTORY))
25 |
26 |
27 | def test_app_root(client):
28 | response = client.get('/')
29 | assert response.status_code == 200
30 |
31 |
32 | def test_app_listmodules(client):
33 | response = client.get('/listmodules')
34 | assert response.status_code == 200
35 |
36 |
37 | def test_app_fetchdata(client):
38 | response = client.get('/fetchdata')
39 | assert response.status_code == 200
40 |
41 |
42 | def test_database_entries(client, db: Any):
43 | from tracklater.timemodules.toggl import Parser
44 | from tracklater.main import store_parser_to_database
45 | start_date = datetime.utcnow() - timedelta(days=7)
46 | end_date = datetime.utcnow()
47 | parser = Parser(start_date, end_date)
48 | parser.entries = parser.get_entries()
49 | with app.app_context():
50 | store_parser_to_database(parser, 'toggl', start_date, end_date)
51 |
52 | db_entries = Entry.query.all()
53 | assert len(parser.entries) == len(db_entries)
54 |
55 |
56 | def test_database_projects(client, db: Any):
57 | from tracklater.timemodules.toggl import Parser
58 | from tracklater.main import store_parser_to_database
59 | start_date = datetime.utcnow() - timedelta(days=7)
60 | end_date = datetime.utcnow()
61 | parser = Parser(start_date, end_date)
62 | parser.projects = parser.get_projects()
63 | with app.app_context():
64 | store_parser_to_database(parser, 'toggl', start_date, end_date)
65 |
66 | db_projects = Project.query.all()
67 | assert len(parser.projects) == len(db_projects)
68 |
69 |
70 | def test_database_issues(client, db: Any):
71 | from tracklater.timemodules.jira import Parser
72 | from tracklater.main import store_parser_to_database
73 | start_date = datetime.utcnow() - timedelta(days=7)
74 | end_date = datetime.utcnow()
75 | parser = Parser(start_date, end_date)
76 | parser.issues = parser.get_issues()
77 | with app.app_context():
78 | store_parser_to_database(parser, 'jira', start_date, end_date)
79 |
80 | db_issues = Issue.query.all()
81 | assert len(parser.issues) == len(db_issues)
82 |
83 |
84 | @pytest.mark.skip('Broken from commit 27b780c7d23261669e8e5a997403c6a80d6bbaeb')
85 | def test_database_jira_caching(client, db: Any):
86 | from tracklater.timemodules.jira import Parser
87 | from tracklater.main import store_parser_to_database, set_parser_caching_data
88 | start_date = datetime.utcnow() - timedelta(days=7)
89 | end_date = datetime.utcnow()
90 | parser = Parser(start_date, end_date)
91 | parser.issues = parser.get_issues()
92 | with app.app_context():
93 | store_parser_to_database(parser, 'jira', start_date, end_date)
94 |
95 | parser = Parser(start_date, end_date)
96 | # Skip caching first
97 | parser.issues = parser.get_issues()
98 | # We get 6 issues again
99 | assert len(parser.issues) == 6
100 |
101 | parser = Parser(start_date, end_date)
102 | with app.app_context():
103 | set_parser_caching_data(parser, 'jira') # Fetch caching values
104 | parser.issues = parser.get_issues()
105 | # Since all 6 are already in database, get_issues should just run once (check jira.py)
106 | # Hence, wwith testing data we get the 3 last issues as a response
107 | assert len(parser.issues) == 3
108 |
109 |
110 | @pytest.mark.skip('Broken from commit be24bce7a5c3e472c49c1a9e5712712501453ba5')
111 | def test_database_slack_caching(client, db: Any):
112 | from tracklater.timemodules.slack import Parser
113 | from tracklater.main import store_parser_to_database, set_parser_caching_data
114 | start_date = datetime.utcnow() - timedelta(days=7)
115 | end_date = datetime.utcnow()
116 | parser = Parser(start_date, end_date)
117 | parser.entries = parser.get_entries()
118 | with app.app_context():
119 | store_parser_to_database(parser, 'slack', start_date, end_date)
120 |
121 | parser = Parser(start_date, end_date)
122 | # Skip caching first
123 | parser.entries = parser.get_entries()
124 | # We get 2 entries again
125 | assert len(parser.entries) == 2
126 |
127 | parser = Parser(start_date, end_date)
128 | with app.app_context():
129 | set_parser_caching_data(parser, 'slack') # Fetch caching values
130 | parser.entries = parser.get_entries()
131 | # Since we already fetched entries for there dates we don't get any new ones
132 | assert len(parser.entries) == 0
133 |
134 | parser = Parser(start_date, end_date + timedelta(hours=1))
135 | with app.app_context():
136 | set_parser_caching_data(parser, 'slack') # Fetch caching values
137 | parser.entries = parser.get_entries()
138 | assert len(parser.entries) == 2
139 |
--------------------------------------------------------------------------------
/tracklater/tests/test_gitmodule.py:
--------------------------------------------------------------------------------
1 | from tracklater.timemodules.gitmodule import Parser
2 |
3 | import pytest
4 | import os
5 | from datetime import datetime, timedelta
6 |
7 | DIRECTORY = os.path.dirname(os.path.realpath(__file__))
8 |
9 |
10 | @pytest.fixture(autouse=True)
11 | def mock_git(monkeypatch):
12 | monkeypatch.setattr('tracklater.timemodules.gitmodule.git_time_to_datetime',
13 | lambda x: datetime.utcnow() - timedelta(days=4))
14 |
15 |
16 | @pytest.fixture()
17 | def parser():
18 | _parser = Parser(datetime.utcnow() - timedelta(days=7), datetime.utcnow())
19 | _parser.credentials = ('', '')
20 | return _parser
21 |
22 |
23 | def test_get_entries(parser):
24 | """
25 | No real tests for gitmodule... yet.
26 | """
27 | data = parser.get_entries()
28 | assert len(data) == 24
29 |
--------------------------------------------------------------------------------
/tracklater/tests/test_jira.py:
--------------------------------------------------------------------------------
1 | from tracklater.timemodules.jira import Parser, Provider
2 |
3 | import pytest
4 | import os
5 |
6 | from datetime import datetime, timedelta
7 |
8 | TEST_URL = 'mock://jira.test'
9 | TEST_KEY = 'TEST'
10 |
11 | DIRECTORY = os.path.dirname(os.path.realpath(__file__))
12 |
13 |
14 | @pytest.fixture()
15 | def parser():
16 | _parser = Parser(datetime.utcnow() - timedelta(days=7), datetime.utcnow())
17 | _parser.credentials = ('', '')
18 | return _parser
19 |
20 |
21 | @pytest.fixture()
22 | def provider():
23 | _provider = Provider(('', ''))
24 | return _provider
25 |
26 |
27 | def test_create_parser(parser: Parser):
28 | assert parser is not None
29 |
30 |
31 | def test_fetch_issues(provider: Provider):
32 | data = provider.fetch_issues(TEST_URL, TEST_KEY)
33 | assert data['issues'][0]['key'] == 'TEST-1'
34 |
35 | data = provider.fetch_issues(TEST_URL, TEST_KEY, start_from=3)
36 | assert data['issues'][0]['key'] == 'TEST-4'
37 |
38 |
39 | def test_get_group_issues(parser: Parser, provider: Provider):
40 | from tracklater import settings
41 | issues = parser.get_group_issues(provider, 'group1', settings.JIRA['group1'])
42 | assert len(issues) == 6
43 |
44 |
45 | def test_get_issues(parser: Parser):
46 | issues = parser.get_issues()
47 | assert len(issues) == 6
48 |
49 |
50 | def test_cache(parser: Parser):
51 | parser.get_issues()
52 | issues = parser.get_issues()
53 | assert len(issues) == 6
54 |
--------------------------------------------------------------------------------
/tracklater/tests/test_main.py:
--------------------------------------------------------------------------------
1 | from tracklater.timemodules.interfaces import AbstractProvider
2 |
3 | from tracklater.utils import obj_from_dict
4 |
5 |
6 | def test_obj_from_dict():
7 | _dict = {
8 | 'test(arg1,arg2)': {
9 | 'foo': {
10 | 'bar': 'The Crazy End'
11 | }
12 | },
13 | 'test(arg1)': {
14 | 'foo': {
15 | 'bar': 'The Good End'
16 | }
17 | },
18 | 'test()': {
19 | 'foo': {
20 | 'bar': 'The Best End'
21 | }
22 | }
23 | }
24 | obj = obj_from_dict(_dict)
25 |
26 | assert obj.test('arg1', 'arg2').foo.bar == 'The Crazy End'
27 | assert obj.test('arg1').foo.bar == 'The Good End'
28 | assert obj.test().foo.bar == 'The Best End'
29 |
30 |
31 | class TestProvider(AbstractProvider):
32 | def normal_method(self, arg1=None):
33 | return "normal"
34 |
35 | def test_normal_method(self, arg1=None):
36 | return "test"
37 |
38 |
39 | def test_provider_normal(monkeypatch):
40 | monkeypatch.setattr('tracklater.settings.TESTING', False)
41 | provider = TestProvider()
42 |
43 | assert provider.normal_method() == "normal"
44 |
45 |
46 | def test_provider_testing(monkeypatch):
47 | provider = TestProvider()
48 |
49 | assert provider.normal_method() == "test"
50 |
--------------------------------------------------------------------------------
/tracklater/tests/test_slack.py:
--------------------------------------------------------------------------------
1 | from tracklater.timemodules.slack import Parser
2 |
3 | import pytest
4 | import os
5 |
6 | from datetime import datetime, timedelta
7 |
8 | DIRECTORY = os.path.dirname(os.path.realpath(__file__))
9 |
10 |
11 | @pytest.fixture()
12 | def parser():
13 | _parser = Parser(datetime.utcnow() - timedelta(days=7), datetime.utcnow())
14 | return _parser
15 |
16 |
17 | def test_get_entries(parser):
18 | """
19 | """
20 | data = parser.get_entries()
21 | assert len(data) == 2
22 |
--------------------------------------------------------------------------------
/tracklater/tests/test_taiga.py:
--------------------------------------------------------------------------------
1 | from tracklater.timemodules.taiga import Parser
2 |
3 | import pytest
4 | import os
5 |
6 | from datetime import datetime, timedelta
7 |
8 | DIRECTORY = os.path.dirname(os.path.realpath(__file__))
9 |
10 |
11 | @pytest.fixture()
12 | def parser():
13 | _parser = Parser(datetime.utcnow() - timedelta(days=7), datetime.utcnow())
14 | return _parser
15 |
16 |
17 | def test_get_issues(parser):
18 | """
19 | """
20 | data = parser.get_issues()
21 | assert len(data) == 3
22 |
--------------------------------------------------------------------------------
/tracklater/tests/test_thyme.py:
--------------------------------------------------------------------------------
1 | from tracklater.timemodules.thyme import Parser
2 |
3 | import pytest
4 | import os
5 | from datetime import datetime, timedelta
6 |
7 | DIRECTORY = os.path.dirname(os.path.realpath(__file__))
8 |
9 |
10 | @pytest.fixture()
11 | def parser():
12 | _parser = Parser(datetime.utcnow() - timedelta(days=7), datetime.utcnow())
13 | return _parser
14 |
15 |
16 | def test_get_entries(parser):
17 | """
18 | """
19 | data = parser.get_entries()
20 | assert len(data) == 1
21 |
--------------------------------------------------------------------------------
/tracklater/tests/test_toggl.py:
--------------------------------------------------------------------------------
1 | from tracklater.timemodules.toggl import Parser
2 |
3 | import pytest
4 | import os
5 |
6 | from datetime import datetime, timedelta
7 | from tracklater.models import Entry
8 |
9 | DIRECTORY = os.path.dirname(os.path.realpath(__file__))
10 |
11 |
12 | @pytest.fixture()
13 | def parser():
14 | _parser = Parser(datetime.utcnow() - timedelta(days=7), datetime.utcnow())
15 | return _parser
16 |
17 |
18 | def test_toggl_get_entries(parser):
19 | """
20 | """
21 | data = parser.get_entries()
22 | assert len(data) == 3
23 |
24 |
25 | def test_toggl_get_projects(parser):
26 | """
27 | """
28 | data = parser.get_projects()
29 | assert len(data) == 4
30 |
31 |
32 | def test_toggl_add_modify_delete(parser: Parser):
33 | parser.entries = parser.get_entries()
34 | entry = Entry(
35 | id="4",
36 | start_time=datetime.utcnow() - timedelta(hours=2),
37 | end_time=datetime.utcnow() - timedelta(hours=1),
38 | title="Toggl new entry (4)",
39 | project="10",
40 | )
41 | entry = parser.create_entry(entry, None)
42 | assert entry.id == "4"
43 |
44 | entry.title = "Toggl modified entry"
45 | entry = parser.update_entry("4", entry, None)
46 | assert entry.title == "Toggl modified entry"
47 |
48 | parser.delete_entry("4")
49 |
--------------------------------------------------------------------------------
/tracklater/timemodules/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Eerovil/TrackLater/e19f3d3081e8329eb440967f122b4a3eeff8a18d/tracklater/timemodules/__init__.py
--------------------------------------------------------------------------------
/tracklater/timemodules/activitywatch.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | from datetime import timedelta
4 | from typing import List, Any, Optional
5 | import requests
6 |
7 | from tracklater import settings
8 | from tracklater.utils import parse_time
9 | from .interfaces import EntryMixin, AbstractParser, AbstractProvider
10 | from tracklater.models import Entry
11 |
12 | import logging
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | def get_setting(key, default=None, group='global') -> Any:
17 | return settings.helper('ACTIVITYWATCH', key, group=group, default=default)
18 |
19 |
20 | DEFAULTS = {
21 | 'IDLE': 900,
22 | 'CUTOFF': 300,
23 | }
24 |
25 |
26 | def get_window(entry) -> Optional[str]:
27 | if entry.get('data') and entry['data'].get('app'):
28 | parts = [entry['data']['app']]
29 | if entry['data'].get('title'):
30 | parts.append(entry['data']['title'])
31 | if entry['data'].get('url'):
32 | parts.append(entry['data']['url'])
33 | return ' - '.join(parts)
34 | return None
35 |
36 |
37 | class Parser(EntryMixin, AbstractParser):
38 | """
39 | Only implements "get".
40 | """
41 |
42 | def get_entries(self) -> List[Entry]:
43 | raw_events: List[dict] = self._fetch_events()
44 | return self._generate_sessions(raw_events)
45 |
46 | def _fetch_events(self) -> List[dict]:
47 | provider = Provider()
48 | _ret: List[dict] = []
49 | for raw_event in provider.fetch_events(self.start_date, self.end_date):
50 | entry = self._parse_raw_event(raw_event)
51 | if entry:
52 | _ret.append(entry)
53 | return _ret
54 |
55 | def _parse_raw_event(self, entry):
56 | active_window = get_window(entry)
57 | if active_window is None:
58 | return None
59 | time = parse_time(entry['timestamp'])
60 | end_time = time + timedelta(seconds=(entry['duration'] or 0))
61 | return {
62 | 'active_window': active_window,
63 | 'time': time,
64 | 'end_time': end_time,
65 | 'category': 'work',
66 | }
67 |
68 | def _generate_sessions(self, entries):
69 | def _init_session(entry):
70 | return Entry(
71 | start_time=entry['time'],
72 | end_time=entry['end_time'],
73 | extra_data={
74 | 'windows': {},
75 | 'group': {},
76 | }
77 | )
78 |
79 | def _end_session(session, entry):
80 | session.start_time = entry['time']
81 | extra = session.extra_data
82 | extra['windows'] = [
83 | {'name': window, 'time': extra['windows'][window]}
84 | for window in extra['windows']
85 | ]
86 | session.text = "\n".join([
87 | "{}s - {}".format(int(data['time']), data['name'])
88 | for data in sorted(extra['windows'], key=lambda x: x["time"], reverse=True)
89 | ])
90 |
91 | sorted_groups = sorted(extra['group'].items(), key=lambda val: val[1], reverse=True)
92 |
93 | session.extra_data['groups'] = sorted_groups
94 | if sorted_groups:
95 | session.group = sorted_groups[0][0]
96 | session.text = session.group + "\n" + session.text
97 |
98 | def _add_window(session, window_name, seconds):
99 | if window_name not in session.extra_data['windows']:
100 | session.extra_data['windows'][window_name] = 0
101 | session.extra_data['windows'][window_name] += seconds
102 |
103 | for key in settings.ACTIVITYWATCH:
104 | for keyword in settings.ACTIVITYWATCH[key].get('KEYWORDS', []):
105 | if keyword in window_name:
106 | if key not in session.extra_data['group']:
107 | session.extra_data['group'][key] = 0
108 | session.extra_data['group'][key] += seconds
109 |
110 | return session
111 |
112 | if not entries:
113 | return []
114 | next_entry = entries[0]
115 | # Were going backwards in time while looping these!!!
116 | sessions = [_init_session(next_entry)]
117 |
118 | for entry in entries[1:]:
119 | if next_entry['active_window'] == entry['active_window']:
120 | continue
121 |
122 | # Time spent in window
123 | diff = abs((entry['end_time'] - next_entry['end_time']).total_seconds())
124 | session_length = abs((next_entry['end_time'] - sessions[-1].start_time).total_seconds())
125 |
126 | # Add window name and time spent to extra data
127 | _add_window(sessions[-1], next_entry['active_window'], diff)
128 |
129 | _idle = get_setting('IDLE', DEFAULTS['IDLE'])
130 | _cutoff = get_setting('CUTOFF', DEFAULTS['CUTOFF'])
131 |
132 | if (diff >= _idle or session_length >= _cutoff):
133 | _end_session(sessions[-1], next_entry)
134 | sessions.append(_init_session(entry))
135 |
136 | next_entry = entry
137 |
138 | _end_session(sessions[-1], next_entry)
139 |
140 | logger.warning("sessions: %s", sessions)
141 |
142 | return sessions
143 |
144 |
145 | class Provider(AbstractProvider):
146 | def fetch_events(self, start_date, end_date) -> List[dict]:
147 | parsed_events = []
148 |
149 | url = get_setting('EVENTS_URL')
150 | resp = requests.get(url, headers={'HOST': '127.0.0.1'})
151 | for event in resp.json():
152 | parsed_events.append(event)
153 | return parsed_events
154 |
155 | def test_fetch_events(self, start_date=None, end_date=None):
156 | return [
157 | {
158 | "id": 46,
159 | "timestamp": "2022-09-04T04:41:31.063000+00:00",
160 | "duration": 10.464,
161 | "data": {
162 | "app": "Google Chrome",
163 | "url": "http://localhost:5600/api/0/buckets/",
164 | "title": "",
165 | "incognito": False
166 | }
167 | },
168 | {
169 | "id": 45,
170 | "timestamp": "2022-09-04T04:41:29.999000+00:00",
171 | "duration": 0.0,
172 | "data": {
173 | "app": "Google Chrome",
174 | "url": "chrome://downloads/",
175 | "title": "Downloads",
176 | "incognito": False
177 | }
178 | }
179 | ]
180 |
--------------------------------------------------------------------------------
/tracklater/timemodules/clockify.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import json
3 | from typing import List, Union, cast, Any, Optional
4 | from datetime import timedelta
5 |
6 | from tracklater.utils import parse_time, _str
7 | from tracklater import settings
8 | from .interfaces import (
9 | EntryMixin, AddEntryMixin, UpdateEntryMixin, DeleteEntryMixin, ProjectMixin, AbstractParser,
10 | AbstractProvider
11 | )
12 | from tracklater.models import Entry, Project, Issue
13 |
14 |
15 | import logging
16 | logger = logging.getLogger(__name__)
17 |
18 |
19 | def get_setting(key, default=None, group='global'):
20 | return settings.helper('CLOCKIFY', key, group=group, default=default)
21 |
22 |
23 | class Parser(EntryMixin, AddEntryMixin, UpdateEntryMixin, DeleteEntryMixin, ProjectMixin,
24 | AbstractParser):
25 | def __init__(self, *args, **kwargs):
26 | super(Parser, self).__init__(*args, **kwargs)
27 | self.provider = Provider(get_setting('API_KEY'))
28 | workspaces = self.provider.request(
29 | 'workspaces', method='GET'
30 | )
31 | if len(workspaces) > 1:
32 | if 'WORKSPACE' in settings.CLOCKIFY['global']:
33 | for workspace in workspaces:
34 | if workspace['id'] == settings.CLOCKIFY['global']['WORKSPACE']:
35 | self.provider.workspace = workspace
36 | else:
37 | self.provider.workspace = workspaces[0]
38 | logger.warning(
39 | "More than one clockify workspace... Using just one (%s)",
40 | self.provider.workspace['id']
41 | )
42 |
43 | self.user = self.provider.request(
44 | 'user', method='GET'
45 | )
46 |
47 |
48 | def get_entries(self) -> List[Entry]:
49 |
50 | if self.start_date:
51 | params = {'start': self.start_date.isoformat() + 'Z',
52 | 'end': self.end_date.isoformat() + 'Z'}
53 | else:
54 | params = {}
55 |
56 | time_entries = []
57 | data = self.provider.workspace_request(
58 | 'user/{user_id}/time-entries'.format(
59 | user_id=self.user['id']
60 | ), params=params, method='GET'
61 | )
62 | for entry in data:
63 | time_entries.append(Entry(
64 | id=entry['id'],
65 | start_time=parse_time(entry['timeInterval']['start']),
66 | end_time=parse_time(entry['timeInterval']['end']),
67 | title=entry['description'],
68 | project=entry['projectId'],
69 | ))
70 |
71 | return time_entries
72 |
73 | def get_projects(self) -> List[Project]:
74 | projects = []
75 | clients = self.provider.workspace_request('clients', method='GET')
76 | _settings = cast(Any, settings.CLOCKIFY)
77 | for client in clients:
78 | group = None
79 | for project, data in _settings.items():
80 | if data.get('NAME', None) == client['name']:
81 | group = project
82 | if not group:
83 | continue
84 | resp = self.provider.workspace_request(
85 | 'projects?clients={}'.format(client['id']), method='GET'
86 | )
87 | for project in resp:
88 | if project['name'] not in _settings[group]['PROJECTS']:
89 | continue
90 | projects.append(Project(
91 | pid=project['id'],
92 | title="{} - {}".format(client['name'], project['name']),
93 | group=group
94 | ))
95 | return projects
96 |
97 | def create_entry(self, new_entry: Entry, issue: Optional[Issue]) -> Entry:
98 | if not new_entry.project or new_entry.project == '0':
99 | new_entry.project = None
100 | if new_entry.end_time is None:
101 | raise ValueError("No end_time")
102 | entry = self.provider.workspace_request(
103 | 'time-entries',
104 | data=json.dumps({
105 | 'start': new_entry.start_time.isoformat() + 'Z',
106 | 'end': new_entry.end_time.isoformat() + 'Z',
107 | 'description': new_entry.title,
108 | 'projectId': new_entry.project,
109 | }),
110 | method='POST'
111 | )
112 | return Entry(
113 | id=_str(entry['id']),
114 | start_time=parse_time(entry['timeInterval']['start']),
115 | end_time=parse_time(entry['timeInterval']['end']),
116 | title=entry['description'],
117 | project=entry['projectId'],
118 | )
119 |
120 | def update_entry(self, entry_id: str, new_entry: Entry, issue: Optional[Issue]) -> Entry:
121 | if not new_entry.project or new_entry.project == '0':
122 | new_entry.project = None
123 | if new_entry.end_time is None:
124 | raise ValueError("No end_time")
125 | entry = self.provider.workspace_request(
126 | 'time-entries/{}'.format(entry_id),
127 | data=json.dumps({
128 | 'start': new_entry.start_time.isoformat() + 'Z',
129 | 'end': new_entry.end_time.isoformat() + 'Z',
130 | 'description': new_entry.title,
131 | 'projectId': new_entry.project,
132 | }),
133 | method='PUT'
134 | )
135 | return Entry(
136 | id=_str(entry['id']),
137 | start_time=parse_time(entry['timeInterval']['start']),
138 | end_time=parse_time(entry['timeInterval']['end']),
139 | title=entry['description'],
140 | project=entry['projectId'],
141 | )
142 |
143 | def delete_entry(self, entry_id: str) -> None:
144 | self.provider.workspace_request(
145 | 'time-entries/{}'.format(entry_id),
146 | data=json.dumps({}),
147 | method="DELETE"
148 | )
149 |
150 |
151 | class Provider(AbstractProvider):
152 | def __init__(self, api_key):
153 | self.api_key = api_key
154 | self.id_counter = 4
155 | self.workspace = None
156 |
157 | def workspace_request(self, endpoint: str, **kwargs) -> Union[List[dict], dict]:
158 | return self.request('workspaces/{workspace_id}/{endpoint}'.format(
159 | workspace_id=self.workspace['id'],
160 | endpoint=endpoint
161 | ), **kwargs)
162 |
163 | def request(self, endpoint: str, **kwargs) -> Union[List[dict], dict]:
164 | url = 'https://api.clockify.me/api/v1/{}'.format(endpoint)
165 | kwargs['headers'] = kwargs.get('headers', {
166 | "Content-Type": "application/json"
167 | })
168 | kwargs['headers']['X-Api-Key'] = self.api_key
169 |
170 | method = kwargs.get('method', 'POST').lower()
171 | try:
172 | del kwargs['method']
173 | except KeyError:
174 | pass
175 | response = getattr(requests, method)(url, **kwargs)
176 | if method == 'delete':
177 | return {}
178 | try:
179 | ret = response.json()
180 | return ret
181 | except Exception as e:
182 | logger.exception("%s: %s", response.content, e)
183 | raise
184 |
185 | def test_request(self, endpoint: str, **kwargs) -> Union[List[dict], dict, str]:
186 | method = kwargs.get('method', 'POST').lower()
187 | return [{}]
188 |
--------------------------------------------------------------------------------
/tracklater/timemodules/fixture/git_test_data.json:
--------------------------------------------------------------------------------
1 | {
2 | "commits": [
3 | {
4 | "author": {
5 | "email": "test.person@email.com"
6 | },
7 | "message": "commit: Repo 1 Branch 1 First commit message",
8 | "authored_datetime": "datetime(1234561)"
9 | },
10 | {
11 | "author": {
12 | "email": "test.person@email.com"
13 | },
14 | "message": "commit: Repo 1 Branch 1 Second commit message",
15 | "authored_datetime": "datetime(1234562)"
16 | },
17 | {
18 | "author": {
19 | "email": "test.person@email.com"
20 | },
21 | "message": "commit: Repo 1 Branch 2 First commit message",
22 | "authored_datetime": "datetime(1234563)"
23 | },
24 | {
25 | "author": {
26 | "email": "test.person@email.com"
27 | },
28 | "message": "commit: Repo 1 Branch 2 Second commit message",
29 | "authored_datetime": "datetime(1234564)"
30 | },
31 | {
32 | "author": {
33 | "email": "other.guy@email.com"
34 | },
35 | "message": "commit: Repo 1 Branch 2 commit by someone else",
36 | "authored_datetime": "datetime(1234565)"
37 | },
38 | {
39 | "author": {
40 | "email": "test.person@email.com"
41 | },
42 | "message": "commit: Repo 2 Branch 1 First commit message",
43 | "authored_datetime": "datetime(1234566)"
44 | },
45 | {
46 | "author": {
47 | "email": "test.person@email.com"
48 | },
49 | "message": "commit: Repo 2 Branch 1 Second commit message",
50 | "authored_datetime": "datetime(1234567)"
51 | },
52 | {
53 | "author": {
54 | "email": "test.person@email.com"
55 | },
56 | "message": "commit: Repo 3 Branch 1 First commit message",
57 | "authored_datetime": "datetime(1234568)"
58 | },
59 | {
60 | "author": {
61 | "email": "test.person@email.com"
62 | },
63 | "message": "commit: Repo 3 Branch 1 Second commit message",
64 | "authored_datetime": "datetime(1234569)"
65 | }
66 | ]
67 | }
68 |
--------------------------------------------------------------------------------
/tracklater/timemodules/fixture/search_results.json:
--------------------------------------------------------------------------------
1 | {
2 | "expand": "schema,names",
3 | "startAt": 0,
4 | "maxResults": 100,
5 | "total": 6,
6 | "issues": [
7 | {
8 | "expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields",
9 | "id": "1",
10 | "self": "mock://jira.test/rest/api/2/issue/1",
11 | "key": "TEST-1",
12 | "fields": {
13 | "summary": "Example feature 1",
14 | "issuetype": {
15 | "self": "mock://jira.test/rest/api/2/issuetype/1",
16 | "id": "1",
17 | "description": "Feature that adds extra functionality",
18 | "iconUrl": "mock://jira.test/secure/viewavatar?size=xsmall&avatarId=1&avatarType=issuetype",
19 | "name": "Feature",
20 | "subtask": false,
21 | "avatarId": 1
22 | }
23 | }
24 | },
25 | {
26 | "expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields",
27 | "id": "2",
28 | "self": "mock://jira.test/rest/api/2/issue/2",
29 | "key": "TEST-2",
30 | "fields": {
31 | "summary":"Example feature 2",
32 | "issuetype": {
33 | "self": "mock://jira.test/rest/api/2/issuetype/1",
34 | "id": "1",
35 | "description": "Feature that adds extra functionality",
36 | "iconUrl": "mock://jira.test/secure/viewavatar?size=xsmall&avatarId=1&avatarType=issuetype",
37 | "name": "Feature",
38 | "subtask": false,
39 | "avatarId": 1
40 | }
41 | }
42 | },
43 | {
44 | "expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields",
45 | "id": "3",
46 | "self": "mock://jira.test/rest/api/2/issue/3",
47 | "key": "TEST-3",
48 | "fields": {
49 | "summary": "Example bug 1",
50 | "issuetype": {
51 | "self": "mock://jira.test/rest/api/2/issuetype/2",
52 | "id": "2",
53 | "description": "Bug that breaks stuff",
54 | "iconUrl": "mock://jira.test/secure/viewavatar?size=xsmall&avatarId=2&avatarType=issuetype",
55 | "name": "Bug",
56 | "subtask": false,
57 | "avatarId": 2
58 | }
59 | }
60 | }
61 | ]
62 | }
--------------------------------------------------------------------------------
/tracklater/timemodules/fixture/search_results_2.json:
--------------------------------------------------------------------------------
1 | {
2 | "expand": "schema,names",
3 | "startAt": 4,
4 | "maxResults": 100,
5 | "total": 6,
6 | "issues": [
7 | {
8 | "expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields",
9 | "id": "4",
10 | "self": "mock://jira.test/rest/api/2/issue/4",
11 | "key": "TEST-4",
12 | "fields": {
13 | "summary": "Example feature 3",
14 | "issuetype": {
15 | "self": "mock://jira.test/rest/api/2/issuetype/1",
16 | "id": "1",
17 | "description": "Feature that adds extra functionality",
18 | "iconUrl": "mock://jira.test/secure/viewavatar?size=xsmall&avatarId1&avatarType=issuetype",
19 | "name": "Feature",
20 | "subtask": false,
21 | "avatarId": 1
22 | }
23 | }
24 | },
25 | {
26 | "expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields",
27 | "id": "5",
28 | "self": "mock://jira.test/rest/api/2/issue/5",
29 | "key": "TEST-5",
30 | "fields": {
31 | "summary":"Example feature 4",
32 | "issuetype": {
33 | "self": "mock://jira.test/rest/api/2/issuetype/1",
34 | "id": "1",
35 | "description": "Feature that adds extra functionality",
36 | "iconUrl": "mock://jira.test/secure/viewavatar?size=xsmall&avatarId=1&avatarType=issuetype",
37 | "name": "Feature",
38 | "subtask": false,
39 | "avatarId": 1
40 | }
41 | }
42 | },
43 | {
44 | "expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields",
45 | "id": "6",
46 | "self": "mock://jira.test/rest/api/2/issue/6",
47 | "key": "TEST-6",
48 | "fields": {
49 | "summary": "Example bug 2",
50 | "issuetype": {
51 | "self": "mock://jira.test/rest/api/2/issuetype/2",
52 | "id": "2",
53 | "description": "Bug that breaks stuff",
54 | "iconUrl": "mock://jira.test/secure/viewavatar?size=xsmall&avatarId=2&avatarType=issuetype",
55 | "name": "Bug",
56 | "subtask": false,
57 | "avatarId": 2
58 | }
59 | }
60 | }
61 | ]
62 | }
--------------------------------------------------------------------------------
/tracklater/timemodules/github.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import json
3 | from typing import List, cast, Any
4 |
5 | from tracklater import settings
6 | from .interfaces import IssueMixin, AbstractParser, AbstractProvider
7 | from tracklater.models import Issue
8 |
9 | import logging
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 | API_URL = "https://api.github.com/graphql"
14 |
15 |
16 | class Parser(IssueMixin, AbstractParser):
17 | def get_issues(self) -> List[Issue]:
18 | self.github_login()
19 | issues: List[Issue] = []
20 | for github_project in self.github_projects:
21 | for issue in self.provider.get_issues(
22 | github_project["repo"], github_project["id"]
23 | ):
24 | issues.append(
25 | Issue(
26 | key="#{}".format(issue["ref"]),
27 | id=issue["id"],
28 | title=issue["subject"],
29 | group=github_project["group"],
30 | )
31 | )
32 | return issues
33 |
34 | def github_login(self) -> None:
35 | github_settings = cast(Any, settings.GITHUB)
36 | self.provider = Provider(github_settings["global"]["TOKEN"])
37 | self.github_projects: List[dict] = []
38 | id_counter = 0
39 | for group, data in github_settings.items():
40 | if "repo" not in data:
41 | continue
42 | self.github_projects.append(
43 | {"id": id_counter, "group": group, "repo": data["repo"]}
44 | )
45 | id_counter += 1
46 |
47 |
48 | class Provider(AbstractProvider):
49 | def __init__(self, token):
50 | self.token = token
51 | self.headers = {"Authorization": "Bearer {}".format(self.token)}
52 |
53 | def get_issues(self, repo, project_id):
54 | query = """
55 | query {
56 | repository(owner:"#{repo_owner}", name:"#{repo_name}") {
57 | issues(first:100#{cursor}) {
58 | totalCount
59 | edges {
60 | node {
61 | title
62 | number
63 | id
64 | }
65 | }
66 | pageInfo {
67 | endCursor
68 | hasNextPage
69 | }
70 | }
71 | }
72 | }
73 | """
74 | issues: List[dict] = []
75 | cursor = ""
76 | while True:
77 | logger.error(
78 | json.dumps(
79 | {
80 | "query": query.replace("#{repo_owner}", repo[0])
81 | .replace("#{repo_name}", repo[1])
82 | .replace("#{cursor}", cursor)
83 | }
84 | )
85 | )
86 | response = requests.post(
87 | API_URL,
88 | data=json.dumps(
89 | {
90 | "query": query.replace("#{repo_owner}", repo[0])
91 | .replace("#{repo_name}", repo[1])
92 | .replace("#{cursor}", cursor)
93 | }
94 | ),
95 | headers=self.headers,
96 | )
97 | logger.error(response.json())
98 | data = response.json()["data"]
99 | issues += [
100 | {
101 | "ref": edge["node"]["number"],
102 | "subject": edge["node"]["title"],
103 | "id": edge["node"]["id"],
104 | }
105 | for edge in data["repository"]["issues"]["edges"]
106 | ]
107 | # Paginate
108 | if data["repository"]["issues"]["pageInfo"]["hasNextPage"]:
109 | cursor = ' after:"{}"'.format(
110 | data["repository"]["issues"]["pageInfo"]["endCursor"]
111 | )
112 | else:
113 | break
114 | return issues
115 |
--------------------------------------------------------------------------------
/tracklater/timemodules/gitmodule.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 | import git
3 | import pytz
4 | import os
5 | import json
6 | from datetime import datetime
7 |
8 | from tracklater.utils import obj_from_dict
9 | from tracklater import settings
10 | from tracklater.timemodules.interfaces import EntryMixin, AbstractParser, AbstractProvider
11 | from tracklater.models import Entry
12 |
13 | import logging
14 | logger = logging.getLogger(__name__)
15 |
16 |
17 | def get_setting(key, default=None, group='global'):
18 | return settings.helper('GIT', key, group=group, default=default)
19 |
20 |
21 | FIXTURE_DIR = os.path.dirname(os.path.realpath(__file__)) + "/fixture"
22 |
23 |
24 | def git_time_to_datetime(_datetime):
25 | # Git has timezone-aware unix timestamps, convert that to a UTC datetime
26 | return _datetime.astimezone(pytz.utc).replace(tzinfo=None)
27 |
28 |
29 | class Parser(EntryMixin, AbstractParser):
30 | def get_entries(self) -> List[Entry]:
31 | start_date = self.start_date
32 | end_date = self.end_date
33 | log = []
34 | provider = Provider()
35 | for group, data in settings.GIT.items():
36 | for repo_path in data.get('REPOS', []):
37 | for log_entry in provider.get_log_entries(repo_path, start_date=start_date):
38 | if log_entry.author.email not in settings.GIT['global']['EMAILS']:
39 | logger.info(log_entry.author.email)
40 | continue
41 | time = git_time_to_datetime(log_entry.authored_datetime)
42 | if time < start_date or time > end_date:
43 | continue
44 |
45 | log.append(Entry(
46 | text="{} - {}".format(repo_path.split('/')[-1], log_entry.message),
47 | start_time=time,
48 | group=group,
49 | ))
50 | return log
51 |
52 |
53 | class Provider(AbstractProvider):
54 | def get_log_entries(self, repo_path, start_date=None):
55 | repo = git.Repo(repo_path)
56 | for head in repo.heads:
57 | iterator = repo.iter_commits(head)
58 | for commit in iterator:
59 | try:
60 | if start_date and git_time_to_datetime(commit.authored_datetime) < start_date:
61 | break
62 | except Exception as e:
63 | logger.warning(e)
64 | continue
65 | yield commit
66 |
67 | def test_get_log_entries(self, repo_path, start_date=None):
68 | with open(FIXTURE_DIR + '/git_test_data.json', 'r') as f:
69 | _git = obj_from_dict(json.load(f))
70 |
71 | for commit in _git.commits:
72 | yield commit
73 |
--------------------------------------------------------------------------------
/tracklater/timemodules/interfaces.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 | from datetime import datetime
3 | from abc import ABCMeta, abstractmethod
4 | from dataclasses import dataclass
5 |
6 | from tracklater import settings
7 | from tracklater.models import Entry, Issue, Project
8 |
9 |
10 | @dataclass
11 | class CachingData:
12 | start_date: Optional[datetime] = None
13 | end_date: Optional[datetime] = None
14 | issue_count: Optional[int] = None
15 | entry_count: Optional[int] = None
16 | project_count: Optional[int] = None
17 |
18 |
19 | def testing_decorator(func):
20 | def _func(*args, **kwargs):
21 | self = args[0]
22 |
23 | if not getattr(settings, 'TESTING', False):
24 | return func(*args, **kwargs)
25 |
26 | if func.__name__.startswith("test_") or func.__name__.startswith("__"):
27 | return func(*args, **kwargs)
28 |
29 | if not hasattr(self, "test_{}".format(func.__name__)):
30 | raise NotImplementedError("No test method for {}.{}".format(self, func.__name__))
31 |
32 | return getattr(self, "test_{}".format(func.__name__))(*args[1:], **kwargs)
33 |
34 | return _func
35 |
36 |
37 | class ProviderMetaclass(type):
38 | def __new__(cls, name, bases, local):
39 | for attr in local:
40 | value = local[attr]
41 | if callable(value):
42 | local[attr] = testing_decorator(value)
43 | return type.__new__(cls, name, bases, local)
44 |
45 |
46 | class AbstractProvider(metaclass=ProviderMetaclass):
47 | """
48 | Will run methods prefixed "test_" when TESTING == True
49 | """
50 | pass
51 |
52 |
53 | class AbstractParser(metaclass=ABCMeta):
54 | def __init__(self, start_date: datetime, end_date: datetime) -> None:
55 | self.start_date: datetime = start_date
56 | self.end_date: datetime = end_date
57 | self.entries: List[Entry] = []
58 | self.projects: List[Project] = []
59 | self.issues: List[Issue] = []
60 | self.caching: CachingData = CachingData()
61 |
62 | def set_database_values(self, start_date=None, end_date=None, issue_count=None,
63 | entry_count=None, project_count=None) -> None:
64 | self.caching.start_date = start_date
65 | self.caching.end_date = end_date
66 | self.caching.issue_count = issue_count
67 | self.caching.entry_count = entry_count
68 | self.caching.project_count = project_count
69 |
70 | def get_offset_dates(self):
71 | """
72 | Use cached api call start and end date to get a smart timeframe to use
73 | e.g. We already have an api call for (Tue-Fri), and we try to get data for
74 | (Mon-Wed). In this case this method returns (Mon-Tue).
75 | """
76 | if not self.caching.start_date or not self.caching.end_date:
77 | return (self.start_date, self.end_date)
78 |
79 | # ---a---c---a---c------
80 | # ( )
81 | # ---a---c-------x------
82 | # ( )
83 | # -a---a-c-------c------
84 | # ( )
85 | if (self.caching.start_date > self.start_date
86 | and self.caching.end_date >= self.end_date):
87 | return (self.start_date, self.caching.start_date)
88 |
89 | # ------c-a--a---c------
90 | #
91 | # ------x--------x------
92 | #
93 | if (self.caching.start_date <= self.start_date
94 | and self.caching.end_date >= self.end_date):
95 | return (None, None)
96 |
97 | # ------c---a----c--a---
98 | # ( )
99 | # ------c--------c-a--a-
100 | # ( )
101 | # ------x--------c---a-
102 | # ( )
103 | if (self.caching.start_date <= self.start_date
104 | and self.caching.end_date < self.end_date):
105 | return (self.caching.end_date, self.end_date)
106 |
107 | # Other cases, just skip caching
108 | return (self.start_date, self.end_date)
109 |
110 | @property
111 | def capabilities(self) -> List[str]:
112 | return []
113 |
114 | @abstractmethod
115 | def parse(self) -> None:
116 | pass
117 |
118 |
119 | class EntryMixin(AbstractParser):
120 | def parse(self) -> None:
121 | self.entries = self.get_entries()
122 | super(EntryMixin, self).parse()
123 |
124 | @property
125 | def capabilities(self) -> List[str]:
126 | _ret = super(EntryMixin, self).capabilities
127 | return _ret + ['entries']
128 |
129 | @abstractmethod
130 | def get_entries(self) -> List[Entry]:
131 | raise NotImplementedError()
132 |
133 |
134 | class ProjectMixin(AbstractParser):
135 | def parse(self) -> None:
136 | self.projects = self.get_projects()
137 | super(ProjectMixin, self).parse()
138 |
139 | @property
140 | def capabilities(self) -> List[str]:
141 | _ret = super(ProjectMixin, self).capabilities
142 | return _ret + ['projects']
143 |
144 | @abstractmethod
145 | def get_projects(self) -> List[Project]:
146 | raise NotImplementedError()
147 |
148 |
149 | class IssueMixin(AbstractParser):
150 | def parse(self) -> None:
151 | self.issues = self.get_issues()
152 | super(IssueMixin, self).parse()
153 |
154 | @property
155 | def capabilities(self) -> List[str]:
156 | _ret = super(IssueMixin, self).capabilities
157 | return _ret + ['issues']
158 |
159 | @abstractmethod
160 | def get_issues(self) -> List[Issue]:
161 | raise NotImplementedError()
162 |
163 | def find_issue(self, uuid) -> Optional[Issue]:
164 | for issue in self.issues:
165 | if issue.uuid == uuid:
166 | return issue
167 | return None
168 |
169 |
170 | class AddEntryMixin(AbstractParser):
171 | @abstractmethod
172 | def create_entry(self, new_entry: Entry, issue: Issue) -> Entry:
173 | raise NotImplementedError()
174 |
175 | @property
176 | def capabilities(self) -> List[str]:
177 | _ret = super(AddEntryMixin, self).capabilities
178 | return _ret + ['addentry']
179 |
180 |
181 | class DeleteEntryMixin(AbstractParser):
182 | @abstractmethod
183 | def delete_entry(self, entry_id: str) -> None:
184 | raise NotImplementedError()
185 |
186 | @property
187 | def capabilities(self) -> List[str]:
188 | _ret = super(DeleteEntryMixin, self).capabilities
189 | return _ret + ['deleteentry']
190 |
191 |
192 | class UpdateEntryMixin(AbstractParser):
193 | @abstractmethod
194 | def update_entry(self, entry_id: str, new_entry: Entry, issue: Issue) -> Entry:
195 | raise NotImplementedError()
196 |
197 | @property
198 | def capabilities(self) -> List[str]:
199 | _ret = super(UpdateEntryMixin, self).capabilities
200 | return _ret + ['updateentry']
201 |
--------------------------------------------------------------------------------
/tracklater/timemodules/jira.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import os
3 | import json
4 |
5 | from .interfaces import IssueMixin, AbstractParser, AbstractProvider
6 |
7 | from tracklater import settings
8 | from tracklater.models import Issue
9 |
10 | from typing import Any, List
11 |
12 | import logging
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | FIXTURE_DIR = os.path.dirname(os.path.realpath(__file__)) + "/fixture"
17 |
18 |
19 | def get_setting(key, default=None, group='global') -> Any:
20 | return settings.helper('JIRA', key, group=group, default=default)
21 |
22 |
23 | class Parser(IssueMixin, AbstractParser):
24 | def __init__(self, *args, **kwargs) -> None:
25 | super(Parser, self).__init__(*args, **kwargs)
26 |
27 | def get_issues(self) -> List[Issue]:
28 | issues: List[Issue] = []
29 | for group, group_settings in settings.JIRA.items():
30 | provider = Provider(get_setting('CREDENTIALS', group=group))
31 | issues += self.get_group_issues(provider, group, group_settings)
32 | return issues
33 |
34 | def get_group_issues(self, provider, group, group_settings) -> List[Issue]:
35 | cached_issues = self.caching.issue_count or 0
36 | # Go back a few pages to be sure
37 | cached_issues = max(cached_issues - provider.ISSUES_PER_PAGE * 2, 0)
38 | issues: List[Issue] = []
39 | latest_issues = provider.fetch_issues(
40 | project_key=group_settings['PROJECT_KEY'],
41 | url=group_settings['URL']
42 | )
43 | run_once = False
44 | while latest_issues['total'] - (cached_issues + len(issues)) > 0 or not run_once:
45 | run_once = True
46 | logger.warning(
47 | 'Fetching issues %s to %s',
48 | cached_issues + len(issues), cached_issues + len(issues) + provider.ISSUES_PER_PAGE
49 | )
50 | new_issues = provider.fetch_issues(
51 | url=group_settings['URL'],
52 | project_key=group_settings['PROJECT_KEY'],
53 | start_from=(cached_issues + len(issues))
54 | )['issues']
55 | logger.warning(new_issues)
56 | for issue in new_issues:
57 | exists = False
58 | for existing_issue in issues:
59 | if existing_issue.key == issue['key']:
60 | exists = True
61 | break
62 | if exists:
63 | continue
64 | issues.append(Issue(
65 | key=issue['key'],
66 | title=issue['fields']['summary'],
67 | group=group,
68 | extra_data={'type': issue['fields']['issuetype']['name']},
69 | ))
70 | return issues
71 |
72 |
73 | class Provider(AbstractProvider):
74 | def __init__(self, credentials):
75 | self.credentials = (credentials[0], credentials[1])
76 | self.ISSUES_PER_PAGE = 100 if not getattr(settings, 'TESTING', False) else 3
77 |
78 | def fetch_issues(self, url, project_key, start_from=None) -> dict:
79 | start_str = '&startAt={}'.format(start_from) if (start_from and start_from > 0) else ''
80 | response = requests.get(
81 | '{JIRA_URL}/rest/api/2/search?jql=project={JIRA_KEY}+order+by+id&fields=key,summary'
82 | ',issuetype&maxResults={ISSUES_PER_PAGE}{start_str}'.format(
83 | JIRA_URL=url, JIRA_KEY=project_key, start_str=start_str,
84 | ISSUES_PER_PAGE=self.ISSUES_PER_PAGE
85 | ), auth=self.credentials
86 | )
87 | try:
88 | return response.json()
89 | except Exception:
90 | logger.error(response.text)
91 | return {}
92 |
93 | def test_fetch_issues(self, url, project_key, start_from=None) -> dict:
94 | print(start_from)
95 | if start_from == 3:
96 | with open(FIXTURE_DIR + '/search_results_2.json', 'r') as f:
97 | return json.load(f)
98 |
99 | with open(FIXTURE_DIR + '/search_results.json', 'r') as f:
100 | return json.load(f)
101 |
--------------------------------------------------------------------------------
/tracklater/timemodules/slack.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 | import asyncio
3 |
4 | from slack_sdk.web.async_client import AsyncWebClient
5 | from slack_sdk import WebClient
6 | from datetime import datetime
7 | import pytz
8 | from typing import List
9 |
10 | from tracklater import settings
11 | from .interfaces import EntryMixin, AbstractParser, AbstractProvider
12 | from tracklater.models import Entry
13 |
14 | import logging
15 | logger = logging.getLogger(__name__)
16 |
17 |
18 | class Parser(EntryMixin, AbstractParser):
19 |
20 | async def async_parse_channel(self, provider, channel, user_id, users, group):
21 | logger.warning("Getting channel %s for group %s", channel['id'], group)
22 | history = await provider.api_call(
23 | "conversations.history",
24 | data={
25 | 'channel': channel['id'],
26 | 'oldest': (self.start_date - datetime(1970, 1, 1)).total_seconds(),
27 | 'latest': (self.end_date - datetime(1970, 1, 1)).total_seconds()
28 | }
29 | )
30 | # Get either Istant Message recipient or channel name
31 | if channel.get('is_im', False) and channel.get('user', ''):
32 | channel_info = users.get(channel.get('user', ''), None)
33 | else:
34 | channel_info = channel.get('name_normalized', channel['id'])
35 |
36 | for message in history['messages']:
37 | if message.get('user', '') == user_id:
38 | start_time = datetime.fromtimestamp(float(message['ts']))
39 | # "Guess" That the timestamp has an offset equal to settings.TIMEZONE
40 | if getattr(settings, 'TIMEZONE', None):
41 | start_time = pytz.timezone("Europe/Helsinki").localize(
42 | start_time
43 | ).astimezone(pytz.utc).replace(tzinfo=None)
44 | # Replace @User id with the name
45 | for _user_id in users.keys():
46 | if _user_id in message['text']:
47 | message['text'] = message['text'].replace(
48 | _user_id, users[_user_id]
49 | )
50 | logger.warning("Found message %s", "{} - {} \n {}".format(group, channel_info, message['text']))
51 | self.async_entries.append(Entry(
52 | start_time=start_time,
53 | title='',
54 | text="{} - {} \n {}".format(group, channel_info, message['text']),
55 | group=group
56 | ))
57 |
58 | def get_entries(self) -> List[Entry]:
59 | # start_date, end_date = self.get_offset_dates()
60 | # if not start_date and not end_date:
61 | # return []
62 | start_date = self.start_date
63 | end_date = self.end_date
64 | for group, group_data in settings.SLACK.items():
65 | slack_token = group_data['API_KEY']
66 | user_id = group_data['USER_ID']
67 |
68 | provider = Provider(slack_token)
69 | users_list = provider.api_call("users.list")
70 | users = {}
71 | for user in users_list['members']:
72 | users[user['id']] = (
73 | user['profile'].get('first_name', 'NULL') + ' ' +
74 | user['profile'].get('last_name', 'NULL')
75 | )
76 | im_channels = provider.api_call(
77 | "conversations.list", data={'types': 'mpim,im'}
78 | )['channels']
79 | channels = im_channels + [{"id": channel_id} for channel_id in group_data.get('CHANNELS', [])]
80 | self.async_entries: List[Entry] = []
81 | async_provider = AsyncProvider(slack_token)
82 | async def get_channels(channels):
83 | await asyncio.gather(*[
84 | self.async_parse_channel(async_provider, channel, user_id, users, group)
85 | for channel in channels
86 | ])
87 |
88 | asyncio.run(get_channels(channels))
89 |
90 | return self.async_entries
91 |
92 |
93 | class AsyncProvider(AbstractProvider):
94 | def __init__(self, slack_token):
95 | self.sc = AsyncWebClient(slack_token)
96 |
97 | async def api_call(self, *args, **kwargs):
98 | try:
99 | return await self.sc.api_call(*args, **kwargs)
100 | except Exception:
101 | sleep(1)
102 | return await self.sc.api_call(*args, **kwargs)
103 |
104 |
105 | class Provider(AbstractProvider):
106 | def __init__(self, slack_token):
107 | self.sc = WebClient(slack_token)
108 |
109 | def api_call(self, *args, **kwargs):
110 | try:
111 | return self.sc.api_call(*args, **kwargs)
112 | except Exception:
113 | sleep(1)
114 | return self.sc.api_call(*args, **kwargs)
115 |
116 | def test_api_call(self, *args, **kwargs):
117 | if args[0] == "users.list":
118 | return {
119 | "members": [
120 | {
121 | "id": "1",
122 | "profile": {
123 | "first_name": "Firstname",
124 | "last_name": "Lastename"
125 | }
126 | },
127 | {
128 | "id": "2",
129 | "profile": {
130 | "first_name": "Secondname",
131 | "last_name": "Lastename"
132 | }
133 | }
134 | ]
135 | }
136 | if args[0] == "conversations.list":
137 | return {"channels": [{"id": "1"}]}
138 | if args[0] == "conversations.history":
139 | return {
140 | "messages": [
141 | {
142 | "user": "1",
143 | "text": "First Message",
144 | "ts": "1234567"
145 | },
146 | {
147 | "user": "1",
148 | "text": "Second Message",
149 | "ts": "1234568"
150 | },
151 | {
152 | "user": "2",
153 | "text": "Third Message",
154 | "ts": "1234569"
155 | }
156 | ]
157 | }
158 |
--------------------------------------------------------------------------------
/tracklater/timemodules/taiga.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from typing import List, cast, Any
3 |
4 | from tracklater import settings
5 | from .interfaces import IssueMixin, AbstractParser, AbstractProvider
6 | from tracklater.models import Issue
7 |
8 | import logging
9 | logger = logging.getLogger(__name__)
10 |
11 | AUTH_URL = 'https://api.taiga.io/api/v1/auth'
12 | ISSUE_URL = 'https://api.taiga.io/api/v1/userstories'
13 | PROJECT_URL = 'https://api.taiga.io/api/v1/projects/by_slug?slug={}'
14 |
15 |
16 | class Parser(IssueMixin, AbstractParser):
17 | def get_issues(self) -> List[Issue]:
18 | self.taiga_login()
19 | issues: List[Issue] = []
20 |
21 | latest_issues = self.taiga_fetch_issues()
22 | for issue in latest_issues:
23 | taiga_project = [p for p in self.taiga_projects if p['id'] == issue['project']][0]
24 | issues.append(Issue(
25 | key="#{}".format(issue['ref']),
26 | id=issue['id'],
27 | title=issue['subject'],
28 | group=taiga_project['group']
29 | ))
30 | return issues
31 |
32 | def taiga_login(self) -> None:
33 | taiga_settings = cast(Any, settings.TAIGA)
34 | self.provider = Provider(taiga_settings['global']['CREDENTIALS'])
35 | self.taiga_projects: List[dict] = []
36 | # Get taiga project id for all clients
37 | # "No client" not supported yet
38 | for group, data in taiga_settings.items():
39 | if 'project_slug' not in data:
40 | continue
41 | project = self.provider.get_project(data['project_slug'])
42 | self.taiga_projects.append({
43 | 'id': project['id'],
44 | 'group': group
45 | })
46 |
47 | def taiga_fetch_issues(self, start_from=None):
48 | issues: List[dict] = []
49 | for taiga_project in self.taiga_projects:
50 | issues += self.provider.get_issues(taiga_project['id'])
51 | return issues
52 |
53 |
54 | class Provider(AbstractProvider):
55 | def __init__(self, credentials):
56 | self.token = self.login(credentials)
57 | self.headers = {
58 | "Authorization": "Bearer {}".format(self.token), 'x-disable-pagination': 'True'
59 | }
60 |
61 | def login(self, credentials):
62 | response = requests.post(
63 | AUTH_URL, data={
64 | 'type': 'normal',
65 | 'username': credentials[0],
66 | 'password': credentials[1]
67 | }
68 | )
69 | return response.json()['auth_token']
70 |
71 | def test_login(self, credentials):
72 | return ""
73 |
74 | def get_project(self, project_slug):
75 | response = requests.get(
76 | PROJECT_URL.format(project_slug),
77 | headers=self.headers
78 | )
79 | return response.json()
80 |
81 | def test_get_project(self, credentials):
82 | return {'id': "1"}
83 |
84 | def get_issues(self, project_id):
85 | response = requests.get(
86 | '{ISSUE_URL}?project={id}'.format(
87 | ISSUE_URL=ISSUE_URL,
88 | id=project_id
89 | ), headers=self.headers
90 | )
91 | return response.json()
92 |
93 | def test_get_issues(self, credentials):
94 | return [
95 | {
96 | "ref": "1",
97 | "id": "1",
98 | "subject": "Taiga issue 1",
99 | "project": "1"
100 | },
101 | {
102 | "ref": 2,
103 | "id": "2",
104 | "subject": "Taiga issue 2",
105 | "project": "1"
106 | },
107 | {
108 | "ref": 3,
109 | "id": "3",
110 | "subject": "Taiga issue 3",
111 | "project": "1"
112 | }
113 | ]
114 |
--------------------------------------------------------------------------------
/tracklater/timemodules/thyme.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | from datetime import timedelta
4 | from typing import List, Any, Optional
5 |
6 | from tracklater import settings
7 | from tracklater.utils import parse_time
8 | from .interfaces import EntryMixin, AbstractParser, AbstractProvider
9 | from tracklater.models import Entry
10 |
11 | import logging
12 | logger = logging.getLogger(__name__)
13 |
14 |
15 | def get_setting(key, default=None, group='global') -> Any:
16 | return settings.helper('THYME', key, group=group, default=default)
17 |
18 |
19 | DEFAULTS = {
20 | 'IDLE': 900,
21 | 'CUTOFF': 300,
22 | }
23 |
24 |
25 | def get_window(entry, id) -> Optional[dict]:
26 | for w in entry['Windows']:
27 | if w['ID'] == id:
28 | return w
29 | return None
30 |
31 |
32 | class Parser(EntryMixin, AbstractParser):
33 | """
34 | Only implements "get".
35 | """
36 |
37 | def get_entries(self) -> List[Entry]:
38 | snapshot_entries: List[Entry] = self._read_files()
39 | return self._generate_sessions(snapshot_entries)
40 |
41 | def _read_files(self):
42 | provider = Provider()
43 | _ret: List[dict] = []
44 | for snapshot in provider.read_files(self.start_date, self.end_date):
45 | entry = self._parse_snapshot_entry(snapshot)
46 | if entry:
47 | _ret.append(entry)
48 | return _ret
49 |
50 | def _parse_snapshot_entry(self, entry):
51 | active_window = get_window(entry, entry['Active'])
52 | if active_window is None:
53 | return None
54 | return {
55 | 'active_window': active_window,
56 | 'time': parse_time(entry['Time']),
57 | 'category': 'work',
58 | }
59 |
60 | def _generate_sessions(self, entries):
61 | def _init_session(entry):
62 | return Entry(
63 | start_time=entry['time'],
64 | extra_data={
65 | 'windows': {},
66 | 'group': {},
67 | }
68 | )
69 |
70 | def _end_session(session, entry):
71 | session.end_time = entry['time']
72 | extra = session.extra_data
73 | extra['windows'] = [
74 | {'name': window, 'time': extra['windows'][window]}
75 | for window in extra['windows']
76 | ]
77 | session.text = "\n".join([
78 | "{}s - {}".format(int(data['time']), data['name'])
79 | for data in sorted(extra['windows'], key=lambda x: x["time"], reverse=True)
80 | ])
81 |
82 | sorted_groups = sorted(extra['group'].items(), key=lambda val: val[1], reverse=True)
83 |
84 | session.extra_data['groups'] = sorted_groups
85 | if sorted_groups:
86 | session.group = sorted_groups[0][0]
87 | session.text = session.group + "\n" + session.text
88 |
89 | def _add_window(session, window, seconds):
90 | if 'eero@eero-ThinkPad-L470' in window['Name']:
91 | return session
92 |
93 | if window['Name'] not in session.extra_data['windows']:
94 | session.extra_data['windows'][window['Name']] = 0
95 | session.extra_data['windows'][window['Name']] += seconds
96 |
97 | for key in settings.THYME:
98 | for keyword in settings.THYME[key].get('KEYWORDS', []):
99 | if keyword in window['Name']:
100 | if key not in session.extra_data['group']:
101 | session.extra_data['group'][key] = 0
102 | session.extra_data['group'][key] += seconds
103 |
104 | return session
105 |
106 | if not entries:
107 | return []
108 | prev_entry = entries[0]
109 | sessions = [_init_session(prev_entry)]
110 |
111 | for entry in entries[1:]:
112 | if prev_entry['active_window'] == entry['active_window']:
113 | continue
114 |
115 | # Time spent in previous window
116 | diff = (entry['time'] - prev_entry['time']).total_seconds()
117 | session_length = (prev_entry['time'] - sessions[-1].start_time).total_seconds()
118 |
119 | # Add window name and time spent to extra data
120 | _add_window(sessions[-1], prev_entry['active_window'], diff)
121 |
122 | _idle = get_setting('IDLE', DEFAULTS['IDLE'])
123 | _cutoff = get_setting('CUTOFF', DEFAULTS['CUTOFF'])
124 |
125 | if (diff >= _idle or session_length >= _cutoff):
126 | _end_session(sessions[-1], prev_entry)
127 | sessions.append(_init_session(entry))
128 |
129 | prev_entry = entry
130 |
131 | _end_session(sessions[-1], prev_entry)
132 |
133 | return sessions
134 |
135 |
136 | class Provider(AbstractProvider):
137 | def read_files(self, start_date, end_date) -> List[dict]:
138 | snapshot_entries = []
139 | filenames = []
140 | date = start_date
141 | while date <= end_date:
142 | filenames.append('{}/{}.json'.format(get_setting('DIR'), date.strftime('%Y-%m-%d')))
143 | date = date + timedelta(days=1)
144 |
145 | for filename in filenames:
146 | logging.info("opening file {}".format(filename))
147 | if not os.path.exists(filename):
148 | continue
149 | with open(filename) as f:
150 | try:
151 | data = json.load(f)
152 | except Exception as e:
153 | logger.exception(e)
154 | continue
155 | entries = data.get('Snapshots')
156 | for entry in entries:
157 | snapshot_entries.append(entry)
158 | return snapshot_entries
159 |
160 | def test_read_files(self, start_date=None, end_date=None):
161 | return [
162 | {
163 | "Time": "2019-05-23T12:00:01.242072673+03:00",
164 | "Windows": [
165 | {
166 | "ID": 1,
167 | "Desktop": -1,
168 | "Name": "Chrome - github"
169 | },
170 | {
171 | "ID": 2,
172 | "Desktop": 0,
173 | "Name": "VSCode"
174 | }
175 | ],
176 | "Active": 1,
177 | "Visible": [
178 | 1,
179 | 2,
180 | ]
181 | },
182 | {
183 | "Time": "2019-05-23T12:04:01.242072673+03:00",
184 | "Windows": [
185 | {
186 | "ID": 1,
187 | "Desktop": -1,
188 | "Name": "Chrome - github"
189 | },
190 | {
191 | "ID": 2,
192 | "Desktop": 0,
193 | "Name": "VSCode"
194 | }
195 | ],
196 | "Active": 2,
197 | "Visible": [
198 | 1,
199 | 2,
200 | ]
201 | },
202 | {
203 | "Time": "2019-05-23T12:08:01.242072673+03:00",
204 | "Windows": [
205 | {
206 | "ID": 1,
207 | "Desktop": -1,
208 | "Name": "Chrome - github"
209 | },
210 | {
211 | "ID": 2,
212 | "Desktop": 0,
213 | "Name": "VSCode"
214 | }
215 | ],
216 | "Active": 1,
217 | "Visible": [
218 | 1,
219 | 2,
220 | ]
221 | },
222 | ]
223 |
--------------------------------------------------------------------------------
/tracklater/timemodules/toggl.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import json
3 | import time
4 | from typing import List, Union, cast, Any, Optional
5 | from datetime import timedelta
6 |
7 | from tracklater.utils import parse_time, _str
8 | from tracklater import settings
9 | from .interfaces import (
10 | EntryMixin, AddEntryMixin, UpdateEntryMixin, DeleteEntryMixin, ProjectMixin, AbstractParser,
11 | AbstractProvider
12 | )
13 | from tracklater.models import Entry, Project, Issue
14 |
15 |
16 | import logging
17 | logger = logging.getLogger(__name__)
18 |
19 |
20 | def get_setting(key, default=None, group='global'):
21 | return settings.helper('TOGGL', key, group=group, default=default)
22 |
23 |
24 | class Parser(EntryMixin, AddEntryMixin, UpdateEntryMixin, DeleteEntryMixin, ProjectMixin,
25 | AbstractParser):
26 | def __init__(self, *args, **kwargs):
27 | super(Parser, self).__init__(*args, **kwargs)
28 | self.provider = Provider(get_setting('API_KEY'))
29 |
30 | def get_entries(self) -> List[Entry]:
31 | if self.start_date:
32 | params = {'start_date': self.start_date.isoformat() + "+00:00",
33 | 'end_date': self.end_date.isoformat() + "+00:00"}
34 | else:
35 | params = {}
36 | data = self.provider.request(
37 | 'time_entries', params=params, method='GET'
38 | )
39 | time_entries = []
40 | for entry in data:
41 | time_entries.append(Entry(
42 | id=entry['id'],
43 | start_time=parse_time(entry['start']),
44 | end_time=parse_time(entry['stop']),
45 | title=entry.get('description', ''),
46 | project=entry.get('pid', None),
47 | ))
48 |
49 | return time_entries
50 |
51 | def get_projects(self) -> List[Project]:
52 | clients = self.provider.request('clients', method='GET')
53 | projects = []
54 | toggl_settings = cast(Any, settings.TOGGL)
55 | for client in clients:
56 | groups = [] # Possible groups
57 | for project, data in toggl_settings.items():
58 | if data.get('NAME', None) == client['name']:
59 | groups.append(project)
60 | if not groups:
61 | continue
62 | resp = self.provider.request(
63 | 'clients/{}/projects'.format(client['id']), method='GET'
64 | )
65 | for project in resp:
66 | for group in groups:
67 | if project['name'] in toggl_settings[group]['PROJECTS']:
68 | break
69 | else:
70 | continue
71 | projects.append(Project(
72 | pid=project['id'],
73 | title="{} - {}".format(client['name'], project['name']),
74 | group=group
75 | ))
76 | return projects
77 |
78 | def create_entry(self, new_entry: Entry, issue: Optional[Issue]) -> Entry:
79 | entry = self.push_session(
80 | session={
81 | 'start_time': new_entry.start_time,
82 | 'end_time': new_entry.end_time,
83 | },
84 | name=new_entry.title,
85 | project_id=new_entry.project
86 | )
87 | return Entry(
88 | id=_str(entry['id']),
89 | start_time=parse_time(entry['start']),
90 | end_time=parse_time(entry['stop']),
91 | title=entry['description'],
92 | project=entry.get('pid', None),
93 | group=new_entry.group,
94 | )
95 |
96 | def update_entry(self, entry_id: str, new_entry: Entry, issue: Optional[Issue]) -> Entry:
97 | updated_entry = self.push_session(
98 | session={
99 | 'start_time': new_entry.start_time,
100 | 'end_time': new_entry.end_time,
101 | },
102 | entry_id=entry_id,
103 | name=new_entry.title,
104 | project_id=new_entry.project
105 | )
106 |
107 | return Entry(
108 | id=_str(updated_entry['id']),
109 | start_time=parse_time(updated_entry['start']),
110 | end_time=parse_time(updated_entry['stop']),
111 | title=updated_entry['description'],
112 | project=_str(updated_entry.get('pid', None)),
113 | group=new_entry.group,
114 | )
115 |
116 | def delete_entry(self, entry_id: str) -> None:
117 | removed_id = self.delete_time_entry(entry_id)
118 |
119 | for i, entry in enumerate(self.entries):
120 | if entry.id == str(removed_id):
121 | del self.entries[i]
122 | break
123 |
124 | def push_session(self, session: dict, name: str, entry_id: str = '', project_id: str = None):
125 | headers = {
126 | "Content-Type": "application/json"
127 | }
128 | data = {
129 | 'time_entry': {
130 | "description": name,
131 | "start": session['start_time'].isoformat() + "+00:00",
132 | "duration": int((session['end_time'] - session['start_time']).total_seconds()),
133 | "created_with": "thyme-toggl-cli"
134 | }
135 | }
136 | if project_id:
137 | data['time_entry']['pid'] = project_id
138 | if entry_id:
139 | return self.update_time_entry(entry_id, data)
140 |
141 | response = self.provider.request(
142 | 'time_entries', data=json.dumps(data), headers=headers,
143 | )
144 | print(u'Pushed session to toggl: {}'.format(response))
145 | entry = response['data']
146 | entry['start_time'] = parse_time(entry['start'])
147 | entry['end_time'] = parse_time(entry['stop'])
148 | return entry
149 |
150 | def update_time_entry(self, entry_id: str, data: dict):
151 | response = self.provider.request(
152 | 'time_entries/{}'.format(entry_id), data=json.dumps(data), method='PUT'
153 | )
154 | print(u'Updated session to toggl: {}'.format(response))
155 | entry = response['data']
156 | entry['start_time'] = parse_time(entry['start'])
157 | entry['end_time'] = parse_time(entry['stop'])
158 | return entry
159 |
160 | def delete_time_entry(self, entry_id):
161 | logger.info('deleting %s', entry_id)
162 | response = self.provider.request(
163 | 'time_entries/{}'.format(entry_id), method='DELETE'
164 | )
165 | return response
166 |
167 |
168 | class Provider(AbstractProvider):
169 | def __init__(self, api_key):
170 | self.api_key = api_key
171 | self.id_counter = 4
172 |
173 | def request(self, endpoint: str, **kwargs) -> Union[List[dict], dict]:
174 | url = 'https://api.track.toggl.com/api/v8/{}'.format(endpoint)
175 | kwargs['headers'] = kwargs.get('headers', {
176 | "Content-Type": "application/json"
177 | })
178 | kwargs['auth'] = kwargs.get('auth', (self.api_key, 'api_token'))
179 |
180 | method = kwargs.get('method', 'POST').lower()
181 | try:
182 | del kwargs['method']
183 | except KeyError:
184 | pass
185 | response = getattr(requests, method)(url, **kwargs)
186 | try:
187 | return response.json()
188 | except Exception as e:
189 | logger.exception("%s: %s", response.content, e)
190 | raise
191 |
192 | def test_request(self, endpoint: str, **kwargs) -> Union[List[dict], dict, str]:
193 | method = kwargs.get('method', 'POST').lower()
194 | if endpoint == "time_entries" and method == 'get':
195 | return [
196 | {
197 | "id": "1",
198 | "pid": "10",
199 | "start": "2019-05-09T08:00:00+00:00",
200 | "stop": "2019-05-09T09:00:00+00:00",
201 | "description": "Toggl entry 1",
202 | },
203 | {
204 | "id": "2",
205 | "pid": "11",
206 | "start": "2019-05-13T07:42:55+00:00",
207 | "stop": "2019-05-13T08:34:52+00:00",
208 | "description": "Toggl entry 2",
209 | },
210 | {
211 | "id": "3",
212 | "pid": "20",
213 | "start": "2019-05-13T09:35:11+00:00",
214 | "stop": "2019-05-13T10:34:02+00:00",
215 | "description": "Toggl entry 3",
216 | }
217 | ]
218 | elif endpoint == "clients" and method == 'get':
219 | return [
220 | {
221 | "id": "1",
222 | "name": "First Client",
223 | },
224 | {
225 | "id": "2",
226 | "name": "Second Client",
227 | },
228 | ]
229 | elif endpoint.startswith("clients") and method == 'get':
230 | _clid = endpoint[8]
231 | return [
232 | {
233 | "id": str(int(_clid) * 10),
234 | "name": "Development"
235 | },
236 | {
237 | "id": str(int(_clid) * 10 + 1),
238 | "name": "Bug fixing"
239 | },
240 | ]
241 | elif endpoint == "time_entries" and method == 'post':
242 | entry = json.loads(kwargs['data'])['time_entry']
243 | entry['stop'] = (
244 | parse_time(entry['start']) + timedelta(seconds=entry['duration'])
245 | ).isoformat() + "+00:00"
246 | entry['id'] = self.id_counter
247 | self.id_counter += 1
248 | return {'data': entry}
249 | elif endpoint.startswith("time_entries") and method == 'put':
250 | entry = json.loads(kwargs['data'])['time_entry']
251 | entry['stop'] = (
252 | parse_time(entry['start']) + timedelta(seconds=entry['duration'])
253 | ).isoformat() + "+00:00"
254 | entry['id'] = endpoint[13:]
255 | return {'data': entry}
256 | elif endpoint.startswith("time_entries") and method == 'delete':
257 | return endpoint[13:]
258 | return [{}]
259 |
--------------------------------------------------------------------------------
/tracklater/utils.py:
--------------------------------------------------------------------------------
1 | import re
2 | from datetime import datetime, timedelta, tzinfo
3 | from dateutil import parser as dateparser
4 | import pytz
5 | from typing import Any, Optional
6 |
7 | import logging
8 | logger = logging.getLogger(__name__)
9 |
10 |
11 | def parse_time(timestr: str):
12 | return dateparser.parse(timestr).astimezone(pytz.utc).replace(tzinfo=None)
13 |
14 |
15 | def _str(obj: Any) -> Optional[str]:
16 | return str(obj) if obj else None
17 |
18 |
19 | class FixedOffset(tzinfo):
20 | """Fixed offset in minutes west from UTC."""
21 |
22 | def __init__(self, offset, name):
23 | self.__offset = timedelta(seconds=-offset)
24 | self.__name = name
25 |
26 | def utcoffset(self, dt):
27 | return self.__offset
28 |
29 | def tzname(self, dt):
30 | return self.__name
31 |
32 | def dst(self, dt):
33 | return timedelta(0)
34 |
35 |
36 | def obj_from_dict(d):
37 | top = type('new', (object,), d)
38 | seqs = tuple, list, set, frozenset
39 | callables = {}
40 | for i, j in d.items():
41 | if isinstance(j, dict):
42 | value = obj_from_dict(j)
43 | elif isinstance(j, seqs):
44 | value = type(j)(obj_from_dict(sj) if isinstance(sj, dict) else sj for sj in j)
45 | else:
46 | value = j
47 | _match = re.match(r'(.*)\((.*)\)', i)
48 | _match2 = re.match(r'datetime\((.*)\)', j) if isinstance(j, str) else None
49 | if _match:
50 | _key = _match.groups()[0]
51 | # Matched parentheses, so we will need to build a function (later)
52 | # for _key
53 | _args = []
54 | if len(_match.groups()) > 1 and _match.groups()[1] != '':
55 | _args = _match.groups()[1].split(",")
56 | callables[_key] = callables.get(_key, [])
57 | # Store a list of possible arguments and their corresponding value
58 | callables[_key].append((_args, value))
59 | elif _match2:
60 | _datetimeval = _match2.groups()[0]
61 | value = datetime.fromtimestamp(int(_datetimeval))
62 | setattr(top, i, value)
63 | else:
64 | setattr(top, i, value)
65 |
66 | for _key, arg_list in callables.items():
67 | def _func(*args, **kwargs):
68 | for _data in arg_list:
69 | if list(args[:len(_data[0])]) == _data[0]:
70 | return _data[1]
71 | setattr(top, _key, _func)
72 |
73 | return top
74 |
--------------------------------------------------------------------------------
/tracklater/views.py:
--------------------------------------------------------------------------------
1 | from flask import request, Blueprint
2 | from tracklater.utils import _str
3 | from datetime import datetime, timedelta, date
4 | import json
5 | import pytz
6 | from typing import Dict, Any
7 |
8 | from tracklater.database import db
9 | from tracklater.main import Parser
10 | from tracklater import settings
11 | from tracklater.models import Entry, Issue, Project, ApiCall # noqa
12 | from tracklater.timemodules.interfaces import AddEntryMixin, UpdateEntryMixin
13 |
14 | import logging
15 | logger = logging.getLogger(__name__)
16 |
17 |
18 | bp = Blueprint("main", __name__, static_folder="static", static_url_path="/static/views")
19 |
20 |
21 | @bp.route('/', methods=['GET'])
22 | def index():
23 | return bp.send_static_file('index.html')
24 |
25 |
26 | def json_serial(obj):
27 | """JSON serializer for objects not serializable by default json code"""
28 |
29 | if isinstance(obj, (datetime, date)):
30 | if obj.tzinfo and obj.tzinfo is not pytz.utc:
31 | logger.warning("Trying to serialize timezone aware date %s", obj)
32 | obj.replace(tzinfo=None)
33 | # No idea, but for some reason using isoformat() here did *not* work.
34 | # Just add utc timezone manually then... (This will make js convert it automatically)
35 | return obj.isoformat() + "+00:00"
36 | raise TypeError("Type %s not serializable" % type(obj))
37 |
38 |
39 | class MyEncoder(json.JSONEncoder):
40 | def default(self, o):
41 | return {k.lstrip('_'): v for k, v in vars(o).items()}
42 |
43 |
44 | @bp.route('/listmodules', methods=['GET'])
45 | def listmodules() -> Any:
46 | if request.method == 'GET':
47 | data = {}
48 | parser = Parser(None, None)
49 | for module_name in settings.ENABLED_MODULES:
50 | data[module_name] = {
51 | 'color': settings.UI_SETTINGS.get(module_name, {}),
52 | 'capabilities': parser.modules[module_name].capabilities,
53 | }
54 | return json.dumps(data, default=json_serial)
55 | return None
56 |
57 |
58 | @bp.route('/getsettings', methods=['GET'])
59 | def getsettings() -> Any:
60 | return json.dumps(settings, cls=MyEncoder)
61 |
62 |
63 | @bp.route('/fetchdata', methods=['GET'])
64 | def fetchdata() -> Any:
65 | if request.method == 'GET':
66 | keys = request.values.getlist('keys[]')
67 | parse = request.values.get('parse', '1')
68 | now = datetime.utcnow()
69 | if 'from' in request.values:
70 | from_date = parseTimestamp(request.values['from'])
71 | else:
72 | from_date = now - timedelta(days=41)
73 | if 'to' in request.values:
74 | to_date = parseTimestamp(request.values['to'])
75 | else:
76 | to_date = now
77 |
78 | if getattr(settings, 'OVERRIDE_START', None):
79 | from_date = settings.OVERRIDE_START
80 | if getattr(settings, 'OVERRIDE_END', None):
81 | to_date = settings.OVERRIDE_END
82 |
83 | if keys and keys[0] == "all":
84 | keys = None
85 | parser = Parser(from_date, to_date, modules=keys)
86 | if parse == '1':
87 | parser.parse()
88 | data: Dict[str, Dict] = {}
89 | for key in settings.ENABLED_MODULES:
90 | if not keys or key in keys:
91 | data[key] = {}
92 | data[key]['entries'] = [entry.to_dict()
93 | for entry in Entry.query.filter(
94 | Entry.module == key,
95 | Entry.start_time >= from_date,
96 | Entry.start_time <= to_date
97 | )]
98 | data[key]['projects'] = [project.to_dict()
99 | for project in Project.query.filter(
100 | Project.module == key
101 | )]
102 | data[key]['issues'] = [issue.to_dict()
103 | for issue in Issue.query.filter(
104 | Issue.module == key
105 | )]
106 | data[key]['capabilities'] = parser.modules[key].capabilities
107 | data[key]['color'] = settings.UI_SETTINGS.get(key, {})
108 | return json.dumps(data, default=json_serial)
109 | return None
110 |
111 |
112 | def parseTimestamp(stamp):
113 | if not stamp:
114 | return None
115 | date = datetime.fromtimestamp(int(stamp) / 1e3)
116 | return date
117 |
118 |
119 | @bp.route('/updateentry', methods=['POST'])
120 | def updateentry() -> Any:
121 | if request.method == 'POST':
122 | data = request.get_json()
123 | module = data.get('module')
124 | entry_id = _str(data.get('entry_id', None))
125 | project = data.get('project_id', None)
126 | if project == "null":
127 | project = None
128 | project_to_group = {project.pid: project.group for project in Project.query.all()}
129 | new_entry = Entry(
130 | start_time=parseTimestamp(data['start_time']),
131 | end_time=parseTimestamp(data.get('end_time', None)),
132 | id=entry_id,
133 | issue=data.get('issue_id', None),
134 | project=project,
135 | title=data.get('title', ''),
136 | text=data.get('text', ""),
137 | extra_data=data.get('extra_data', {}),
138 | group=project_to_group.get(str(project), None)
139 | )
140 | issue = None
141 | if new_entry.issue:
142 | issue = Issue.query.filter(Issue.uuid == new_entry.issue).first()
143 |
144 | parser = Parser(None, None)
145 |
146 | if not entry_id:
147 | # Check that create is allowed
148 | assert isinstance(parser.modules[module], AddEntryMixin)
149 | new_entry = parser.modules[module].create_entry( # type: ignore
150 | new_entry=new_entry,
151 | issue=issue
152 | )
153 | else:
154 | # Check that update is allowed
155 | assert isinstance(parser.modules[module], UpdateEntryMixin)
156 | new_entry = parser.modules[module].update_entry( # type: ignore
157 | entry_id=new_entry.id,
158 | new_entry=new_entry,
159 | issue=issue
160 | )
161 | data = "error"
162 |
163 | if new_entry:
164 | Entry.query.filter(Entry.id == new_entry.id).delete()
165 | new_entry.module = module
166 | db.session.merge(new_entry)
167 | db.session.commit()
168 | data = new_entry.to_dict()
169 |
170 | return json.dumps(data, default=json_serial)
171 | return None
172 |
173 |
174 | @bp.route('/deleteentry', methods=['POST'])
175 | def deleteentry() -> Any:
176 | if request.method == 'POST':
177 | data = request.get_json()
178 | module = data.get('module')
179 | entry_id = data.get('entry_id')
180 |
181 | parser = Parser(None, None)
182 | # Check that delete is allowed
183 | assert isinstance(parser.modules[module], AddEntryMixin)
184 | ret = parser.modules[module].delete_entry( # type: ignore
185 | entry_id=entry_id
186 | )
187 |
188 | Entry.query.filter(Entry.id == entry_id).delete()
189 | db.session.commit()
190 |
191 | return json.dumps(ret, default=json_serial)
192 | return None
193 |
--------------------------------------------------------------------------------