├── .dockerignore
├── .gitignore
├── README.md
├── backend
├── Dockerfile
├── crontab
├── docker-entrypoint.sh
├── requirements.txt
├── source
│ ├── archive
│ │ ├── __init__.py
│ │ ├── apps.py
│ │ ├── migrations
│ │ │ ├── 0001_initial.py
│ │ │ ├── 0002_auto_20210125_1640.py
│ │ │ ├── 0003_twitterarchive.py
│ │ │ ├── 0004_jsonarchive.py
│ │ │ ├── 0005_gpxarchive.py
│ │ │ ├── 0006_n26archive.py
│ │ │ ├── 0007_auto_20210502_1244.py
│ │ │ ├── 0008_auto_20210502_1258.py
│ │ │ ├── 0009_telegramarchive.py
│ │ │ ├── 0010_auto_20210603_1102.py
│ │ │ ├── 0011_facebookarchive.py
│ │ │ ├── 0012_auto_20210716_1352.py
│ │ │ ├── 0013_icalendararchive.py
│ │ │ ├── 0014_auto_20220516_1313.py
│ │ │ ├── 0015_redditarchive.py
│ │ │ └── __init__.py
│ │ ├── models
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── facebook.py
│ │ │ ├── google_takeout.py
│ │ │ ├── gpx.py
│ │ │ ├── icalendar.py
│ │ │ ├── json.py
│ │ │ ├── n26.py
│ │ │ ├── reddit.py
│ │ │ ├── telegram.py
│ │ │ └── twitter.py
│ │ ├── serializers.py
│ │ ├── urls.py
│ │ └── views.py
│ ├── authentication
│ │ ├── __init__.py
│ │ ├── apps.py
│ │ ├── management
│ │ │ ├── __init__.py
│ │ │ └── commands
│ │ │ │ ├── __init__.py
│ │ │ │ ├── assert_app_has_users.py
│ │ │ │ └── get_or_create_oauth_app.py
│ │ ├── templates
│ │ │ └── registration
│ │ │ │ └── login.html
│ │ └── urls.py
│ ├── backend
│ │ ├── __init__.py
│ │ ├── asgi.py
│ │ ├── settings.py
│ │ ├── urls.py
│ │ └── wsgi.py
│ ├── cron-tasks.sh
│ ├── destination
│ │ ├── __init__.py
│ │ ├── apps.py
│ │ ├── management
│ │ │ ├── __init__.py
│ │ │ └── commands
│ │ │ │ ├── __init__.py
│ │ │ │ └── export.py
│ │ ├── migrations
│ │ │ ├── 0001_initial.py
│ │ │ ├── 0002_rename_backups_to_destination.py
│ │ │ └── __init__.py
│ │ ├── models
│ │ │ ├── __init__.py
│ │ │ ├── destination.py
│ │ │ └── rsync.py
│ │ ├── serializers.py
│ │ ├── urls.py
│ │ └── views.py
│ ├── manage.py
│ ├── source
│ │ ├── __init__.py
│ │ ├── apps.py
│ │ ├── management
│ │ │ ├── __init__.py
│ │ │ └── commands
│ │ │ │ ├── __init__.py
│ │ │ │ └── import.py
│ │ ├── migrations
│ │ │ ├── 0001_initial.py
│ │ │ ├── 0002_backupsource_key.py
│ │ │ ├── 0003_auto_20201021_0811.py
│ │ │ ├── 0004_twittersource.py
│ │ │ ├── 0005_redditsource.py
│ │ │ ├── 0006_hackernewssource.py
│ │ │ ├── 0007_rsssource.py
│ │ │ ├── 0008_auto_20210127_1215.py
│ │ │ ├── 0009_auto_20210316_1316.py
│ │ │ ├── 0010_filesystemsource.py
│ │ │ ├── 0011_auto_20210408_1021.py
│ │ │ ├── 0012_rsyncdestination.py
│ │ │ ├── 0013_auto_20210419_1110.py
│ │ │ ├── 0014_auto_20210502_0933.py
│ │ │ ├── 0015_auto_20210502_1007.py
│ │ │ ├── 0016_auto_20210502_1244.py
│ │ │ ├── 0017_gitsource.py
│ │ │ ├── 0018_rename_backups_to_source.py
│ │ │ ├── 0019_delete_rsyncdestination.py
│ │ │ ├── 0020_traktsource.py
│ │ │ ├── 0021_auto_20220516_1313.py
│ │ │ └── __init__.py
│ │ ├── models
│ │ │ ├── __init__.py
│ │ │ ├── filesystem.py
│ │ │ ├── git.py
│ │ │ ├── hackernews.py
│ │ │ ├── oauth.py
│ │ │ ├── reddit.py
│ │ │ ├── rss.py
│ │ │ ├── rsync.py
│ │ │ ├── source.py
│ │ │ ├── trakt.py
│ │ │ └── twitter.py
│ │ ├── parsers.py
│ │ ├── serializers.py
│ │ ├── urls.py
│ │ ├── utils
│ │ │ ├── __init__.py
│ │ │ ├── datetime.py
│ │ │ ├── files.py
│ │ │ ├── geo.py
│ │ │ ├── models.py
│ │ │ ├── preprocessing.py
│ │ │ └── ssh.py
│ │ └── views.py
│ └── timeline
│ │ ├── __init__.py
│ │ ├── apps.py
│ │ ├── management
│ │ ├── __init__.py
│ │ └── commands
│ │ │ └── __init__.py
│ │ ├── migrations
│ │ ├── 0001_initial.py
│ │ ├── 0002_auto_20201020_1203.py
│ │ ├── 0003_auto_20201020_1203.py
│ │ ├── 0004_remove_entry_date_modified.py
│ │ ├── 0005_auto_20210110_1817.py
│ │ ├── 0006_auto_20210127_1256.py
│ │ ├── 0007_source_name_fix.py
│ │ ├── 0008_auto_20210603_1102.py
│ │ └── __init__.py
│ │ ├── models.py
│ │ ├── permissions.py
│ │ ├── renderers.py
│ │ ├── serializers.py
│ │ ├── urls.py
│ │ ├── utils
│ │ ├── __init__.py
│ │ ├── files.py
│ │ └── postprocessing.py
│ │ └── views.py
└── ssh_config
├── docker-compose.homeserver.yml
├── docker-compose.override.yml
├── docker-compose.yml
├── frontend
├── Dockerfile
├── nginx.conf
└── source
│ ├── css
│ ├── fontawesome.css
│ └── style.css
│ ├── fonts
│ ├── fa-brands-400.eot
│ ├── fa-brands-400.svg
│ ├── fa-brands-400.ttf
│ ├── fa-brands-400.woff
│ ├── fa-brands-400.woff2
│ ├── fa-regular-400.eot
│ ├── fa-regular-400.svg
│ ├── fa-regular-400.ttf
│ ├── fa-regular-400.woff
│ ├── fa-regular-400.woff2
│ ├── fa-solid-900.eot
│ ├── fa-solid-900.svg
│ ├── fa-solid-900.ttf
│ ├── fa-solid-900.woff
│ └── fa-solid-900.woff2
│ ├── images
│ └── favicon.png
│ ├── index.html
│ └── js
│ ├── app.js
│ ├── components
│ ├── entries
│ │ ├── activity.js
│ │ ├── commit.js
│ │ ├── entry-icon.js
│ │ ├── gallery.js
│ │ ├── journal.js
│ │ ├── message.js
│ │ ├── motion.js
│ │ ├── post.js
│ │ ├── text.js
│ │ ├── trakt.js
│ │ └── transaction.js
│ ├── filter.js
│ ├── googleMap.js
│ ├── journalEditor.js
│ ├── preview.js
│ ├── previews
│ │ ├── geolocation.js
│ │ ├── image.js
│ │ ├── pdf.js
│ │ └── video.js
│ ├── settings
│ │ ├── archive.js
│ │ ├── archives.js
│ │ ├── settings.js
│ │ ├── source.js
│ │ ├── sourceTrakt.js
│ │ └── sources.js
│ ├── spinner.js
│ ├── thumbnails
│ │ ├── image.js
│ │ └── video.js
│ ├── timeline-nav.js
│ └── timeline.js
│ ├── config.js
│ ├── libs
│ ├── marked.min.js
│ ├── moment.min.js
│ ├── vue-router.js
│ ├── vue.js
│ ├── vuex-router-sync.js
│ └── vuex.min.js
│ ├── models
│ ├── filters.js
│ └── requests.js
│ ├── routes.js
│ ├── services
│ ├── api-service.js
│ ├── archive-service.js
│ ├── auth-service.js
│ ├── googlemaps.js
│ ├── object-service.js
│ ├── source-service.js
│ └── timeline-service.js
│ ├── store
│ ├── archives.js
│ ├── auth.js
│ ├── sources.js
│ ├── store.js
│ └── timeline.js
│ └── utils
│ └── entries.js
├── geolocation-client
├── Dockerfile
├── requirements.txt
└── source
│ └── main.py
├── mqtt-broker
├── Dockerfile
└── docker-entrypoint.sh
├── proxy
├── Dockerfile
└── nginx.conf
├── schemas.md
└── scripts
├── copy-production-db.sh
├── db-backup.sh
├── db-migrate.sh
├── db-restore.sh
├── dev-env.sh
├── timeline-create-user.sh
├── timeline-export.sh
├── timeline-import.sh
└── timeline-shell.sh
/.dockerignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .git
3 | __pycache__
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .env
3 | __pycache__
--------------------------------------------------------------------------------
/backend/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python
2 |
3 | # Install dependencies
4 | RUN apt-get update \
5 | && apt-get install -y --no-install-recommends \
6 | cron \
7 | ffmpeg \
8 | ghostscript \
9 | imagemagick \
10 | mime-support \
11 | netcat \
12 | openssh-client \
13 | rsync \
14 | sshpass \
15 | util-linux \
16 | wget \
17 | libpq-dev \
18 | && rm -rf /var/lib/apt/lists/*
19 |
20 | # cron: To run the backups on a schedule
21 | # ffmpeg: To make image and video previews
22 | # ghostscript: To make PDF previews
23 | # mime-support: To populate the list of mime types, and figure out file types
24 | # netcat: To wait for the DB before starting Django
25 | # openssh-client: To copy SSH keys to sources
26 | # rsync: To backup files
27 | # sshpass: To connect to ssh with a password without user interaction
28 | # util-linux: For flock, which prevents a job from running multiple instances at once
29 | # wget: To download more mimetypes
30 |
31 | # https://stackoverflow.com/questions/52998331/imagemagick-security-policy-pdf-blocking-conversion
32 | # https://askubuntu.com/questions/1181762/imagemagickconvert-im6-q16-no-images-defined
33 | # These solutions do not work. Removing the file entirely does.
34 | # RUN sed -i_bak 's/rights="none" pattern="PDF"/rights="read | write" pattern="PDF"/' /etc/ImageMagick-6/policy.xml
35 | RUN rm /etc/ImageMagick-6/policy.xml
36 |
37 | COPY requirements.txt ./
38 | RUN pip install -r requirements.txt
39 |
40 | # Get a longer list of mimetypes. The default IANA list is missing important ones like GPX
41 | RUN wget https://svn.apache.org/repos/asf/httpd/httpd/branches/1.3.x/conf/mime.types -O /usr/local/etc/mime.types
42 |
43 | COPY ssh_config /etc/ssh/ssh_config
44 | COPY crontab /etc/timeline-crontab
45 |
46 | RUN mkdir -p /data/mounts
47 |
48 | # Start the backend
49 | WORKDIR /usr/src/app
50 | EXPOSE 80
51 | COPY ./docker-entrypoint.sh /
52 | ENTRYPOINT ["/docker-entrypoint.sh"]
--------------------------------------------------------------------------------
/backend/crontab:
--------------------------------------------------------------------------------
1 | # Leave a newline before the end of this file, or crontab will quietly stop working
2 | # Output to /tmp/stdout to show output in the docker logs
3 | SHELL=/bin/bash
4 |
5 | 0 * * * * /usr/src/app/cron-tasks.sh
6 |
--------------------------------------------------------------------------------
/backend/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Wait for database
4 | until nc -z timeline-db 5432; do echo Waiting for PostgreSQL; sleep 1; done
5 |
6 | python manage.py migrate # Apply database migrations
7 |
8 | # Prepare log files and start outputting logs to stdout
9 | mkdir -p /var/log/backend
10 | touch /var/log/backend/gunicorn.log
11 | touch /var/log/backend/access.log
12 | tail -n 0 -f /var/log/backend/*.log &
13 |
14 | # Activate cron with all Django environment variables
15 | > /etc/timeline-cronenv
16 | printf "export BACKEND_SECRET_KEY=%q\n" "${BACKEND_SECRET_KEY}" >> /etc/timeline-cronenv
17 | printf "export BACKEND_DEBUG=%q\n" "${BACKEND_DEBUG}" >> /etc/timeline-cronenv
18 |
19 | mkfifo /tmp/stdout /tmp/stderr
20 | chmod 0666 /tmp/stdout /tmp/stderr
21 | tail -f /tmp/stdout &
22 | tail -f /tmp/stderr >&2 &
23 |
24 | crontab /etc/timeline-crontab
25 | service cron start
26 |
27 | # Make sure that there is an OAuth application for the frontend
28 | python manage.py get_or_create_oauth_app \
29 | --name='Frontend app' \
30 | --client-id="${FRONTEND_CLIENT_ID}" \
31 | --client-type="public" \
32 | --authorization-grant='authorization-code' \
33 | --redirect-uri="https://${FRONTEND_DOMAIN}/oauth-redirect"
34 |
35 | # Make sure that there is an OAuth application for the geolocation client
36 | python manage.py get_or_create_oauth_app \
37 | --name='Geolocation client' \
38 | --client-id="${GEOLOCATION_CLIENT_ID}" \
39 | --client-secret="${GEOLOCATION_CLIENT_SECRET}" \
40 | --client-type="confidential" \
41 | --authorization-grant='client-credentials'
42 |
43 | # Warn the user if there is no user
44 | python manage.py assert_app_has_users
45 |
46 | # Start Gunicorn processes
47 | echo Starting Gunicorn.
48 | exec gunicorn backend.wsgi:application \
49 | --name backend \
50 | --timeout 1200 \
51 | --reload \
52 | --bind 0.0.0.0:80 \
53 | --workers 3 \
54 | --log-level=info \
55 | --log-file=/var/log/backend/gunicorn.log \
56 | --access-logfile=/var/log/backend/access.log \
57 | "$@"
58 |
--------------------------------------------------------------------------------
/backend/requirements.txt:
--------------------------------------------------------------------------------
1 | coloredlogs==14.0
2 | django-filter==2.4.0
3 | Django==3.1.2
4 | django-oauth-toolkit==2.0.0
5 | djangorestframework==3.12.1
6 | feedparser==6.0.2
7 | FormEncode==2.0.0
8 | gpxpy==1.5.0
9 | gunicorn==20.0.4
10 | icalendar==4.0.9
11 | phonenumbers==8.12.23
12 | Pillow==8.0.1
13 | praw==7.6.0
14 | psycopg2-binary==2.8.6
15 | pydriller==2.0
16 | requests==2.25.0
17 | rest-framework-generic-relations==2.0.0
18 | trakt.py==4.4.0
19 | tweepy==3.9.0
20 |
--------------------------------------------------------------------------------
/backend/source/archive/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/archive/__init__.py
--------------------------------------------------------------------------------
/backend/source/archive/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 |
3 |
4 | class ArchiveConfig(AppConfig):
5 | name = 'archive'
6 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-01-25 16:36
2 |
3 | from django.db import migrations, models
4 |
5 | import archive.models
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | initial = True
11 |
12 | dependencies = [
13 | ]
14 |
15 | operations = [
16 | migrations.CreateModel(
17 | name='GoogleTakeoutArchive',
18 | fields=[
19 | ('key', models.SlugField(allow_unicode=True, max_length=80, primary_key=True, serialize=False)),
20 | ('description', models.TextField()),
21 | ('date_processed', models.DateTimeField()),
22 | ('archive_file', models.FileField(upload_to=archive.models.archive_path)),
23 | ],
24 | options={
25 | 'abstract': False,
26 | },
27 | ),
28 | ]
29 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0002_auto_20210125_1640.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-01-25 16:40
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('archive', '0001_initial'),
10 | ]
11 |
12 | operations = [
13 | migrations.AlterField(
14 | model_name='googletakeoutarchive',
15 | name='date_processed',
16 | field=models.DateTimeField(null=True),
17 | ),
18 | ]
19 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0003_twitterarchive.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-02-03 12:33
2 |
3 | from django.db import migrations, models
4 |
5 | import archive.models
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('archive', '0002_auto_20210125_1640'),
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='TwitterArchive',
17 | fields=[
18 | ('key', models.SlugField(allow_unicode=True, max_length=80, primary_key=True, serialize=False)),
19 | ('description', models.TextField()),
20 | ('date_processed', models.DateTimeField(null=True)),
21 | ('archive_file', models.FileField(upload_to=archive.models.archive_path)),
22 | ],
23 | options={
24 | 'abstract': False,
25 | },
26 | ),
27 | ]
28 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0004_jsonarchive.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-02-05 13:18
2 |
3 | from django.db import migrations, models
4 |
5 | import archive.models.base
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('archive', '0003_twitterarchive'),
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='JsonArchive',
17 | fields=[
18 | ('key', models.SlugField(allow_unicode=True, max_length=80, primary_key=True, serialize=False)),
19 | ('description', models.TextField()),
20 | ('date_processed', models.DateTimeField(null=True)),
21 | ('archive_file', models.FileField(upload_to=archive.models.base.archive_path)),
22 | ],
23 | options={
24 | 'abstract': False,
25 | },
26 | ),
27 | ]
28 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0005_gpxarchive.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-02-06 09:49
2 |
3 | from django.db import migrations, models
4 |
5 | import archive.models.base
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('archive', '0004_jsonarchive'),
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='GpxArchive',
17 | fields=[
18 | ('key', models.SlugField(allow_unicode=True, max_length=80, primary_key=True, serialize=False)),
19 | ('description', models.TextField()),
20 | ('date_processed', models.DateTimeField(null=True)),
21 | ('archive_file', models.FileField(upload_to=archive.models.base.archive_path)),
22 | ],
23 | options={
24 | 'abstract': False,
25 | },
26 | ),
27 | ]
28 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0006_n26archive.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-02-07 19:04
2 |
3 | from django.db import migrations, models
4 |
5 | import archive.models.base
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('archive', '0005_gpxarchive'),
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='N26CsvArchive',
17 | fields=[
18 | ('key', models.SlugField(allow_unicode=True, max_length=80, primary_key=True, serialize=False)),
19 | ('description', models.TextField()),
20 | ('date_processed', models.DateTimeField(null=True)),
21 | ('archive_file', models.FileField(upload_to=archive.models.base.archive_path)),
22 | ],
23 | options={
24 | 'abstract': False,
25 | },
26 | ),
27 | ]
28 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0007_auto_20210502_1244.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-05-02 12:44
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('archive', '0006_n26archive'),
10 | ]
11 |
12 | operations = [
13 | migrations.AlterField(
14 | model_name='googletakeoutarchive',
15 | name='key',
16 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
17 | ),
18 | migrations.AlterField(
19 | model_name='gpxarchive',
20 | name='key',
21 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
22 | ),
23 | migrations.AlterField(
24 | model_name='jsonarchive',
25 | name='key',
26 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
27 | ),
28 | migrations.AlterField(
29 | model_name='n26csvarchive',
30 | name='key',
31 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
32 | ),
33 | migrations.AlterField(
34 | model_name='twitterarchive',
35 | name='key',
36 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
37 | ),
38 | ]
39 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0008_auto_20210502_1258.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-05-02 12:58
2 |
3 | from django.db import migrations
4 |
5 | from archive.models import archive_path
6 |
7 |
8 | def update_path(apps, schema_editor):
9 |
10 | for gpx_archive in apps.get_model('archive', 'GpxArchive').objects.all():
11 | gpx_archive.archive_file = str(archive_path(gpx_archive, gpx_archive.archive_file.path))
12 | gpx_archive.save()
13 |
14 | for json_archive in apps.get_model('archive', 'JsonArchive').objects.all():
15 | json_archive.archive_file = str(archive_path(json_archive, json_archive.archive_file.path))
16 | json_archive.save()
17 |
18 | for n26_archive in apps.get_model('archive', 'N26CsvArchive').objects.all():
19 | n26_archive.archive_file = str(archive_path(n26_archive, n26_archive.archive_file.path))
20 | n26_archive.save()
21 |
22 | for twitter_archive in apps.get_model('archive', 'TwitterArchive').objects.all():
23 | twitter_archive.archive_file = str(archive_path(twitter_archive, twitter_archive.archive_file.path))
24 | twitter_archive.save()
25 |
26 | for googletakeout_archive in apps.get_model('archive', 'GoogleTakeoutArchive').objects.all():
27 | googletakeout_archive.archive_file = str(archive_path(googletakeout_archive, googletakeout_archive.archive_file.path))
28 | googletakeout_archive.save()
29 |
30 |
31 | class Migration(migrations.Migration):
32 |
33 | dependencies = [
34 | ('archive', '0007_auto_20210502_1244'),
35 | ]
36 |
37 | operations = [
38 | migrations.RunPython(update_path),
39 | ]
40 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0009_telegramarchive.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-05-27 10:36
2 |
3 | from django.db import migrations, models
4 |
5 | import archive.models.base
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('archive', '0008_auto_20210502_1258'),
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='TelegramArchive',
17 | fields=[
18 | ('key', models.SlugField(max_length=80, primary_key=True, serialize=False)),
19 | ('description', models.TextField()),
20 | ('date_processed', models.DateTimeField(null=True)),
21 | ('archive_file', models.FileField(upload_to=archive.models.base.archive_path)),
22 | ('include_supergroup_chats', models.BooleanField(default=False, verbose_name='Include supergroup chats')),
23 | ('include_group_chats', models.BooleanField(default=True, verbose_name='Include private group chats')),
24 | ],
25 | options={
26 | 'abstract': False,
27 | },
28 | ),
29 | ]
30 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0010_auto_20210603_1102.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-06-03 11:02
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('archive', '0009_telegramarchive'),
10 | ]
11 |
12 | operations = [
13 | migrations.AlterField(
14 | model_name='telegramarchive',
15 | name='include_group_chats',
16 | field=models.BooleanField(default=True, verbose_name='Include group chats'),
17 | ),
18 | ]
19 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0011_facebookarchive.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-06-14 12:53
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('archive', '0010_auto_20210603_1102'),
10 | ]
11 |
12 | operations = [
13 | migrations.CreateModel(
14 | name='FacebookArchive',
15 | fields=[
16 | ('key', models.SlugField(max_length=80, primary_key=True, serialize=False)),
17 | ('description', models.TextField()),
18 | ('date_processed', models.DateTimeField(null=True)),
19 | ],
20 | options={
21 | 'abstract': False,
22 | },
23 | ),
24 | ]
25 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0012_auto_20210716_1352.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-07-16 13:52
2 | import django.db.models.deletion
3 | from django.db import migrations, models
4 | from django.db.transaction import atomic
5 |
6 | import archive.models.base
7 | from archive.models import ArchiveFile
8 |
9 |
10 | @atomic
11 | def move_archive_files(app, schema_editor):
12 | """
13 | Convert single file archives to multi-file archives
14 | """
15 | archive_instances = (
16 | list(app.get_model('archive', 'GpxArchive').objects.all())
17 | + list(app.get_model('archive', 'GoogleTakeoutArchive').objects.all())
18 | + list(app.get_model('archive', 'JsonArchive').objects.all())
19 | + list(app.get_model('archive', 'N26CsvArchive').objects.all())
20 | + list(app.get_model('archive', 'TelegramArchive').objects.all())
21 | + list(app.get_model('archive', 'TwitterArchive').objects.all())
22 | )
23 | for archive_instance in archive_instances:
24 | ArchiveFile.objects.create(
25 | archive=archive_instance,
26 | archive_file=archive_instance.archive_file
27 | )
28 |
29 |
30 | class Migration(migrations.Migration):
31 |
32 | dependencies = [
33 | ('contenttypes', '0002_remove_content_type_name'),
34 | ('archive', '0011_facebookarchive'),
35 | ]
36 |
37 | operations = [
38 | migrations.CreateModel(
39 | name='ArchiveFile',
40 | fields=[
41 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
42 | ('archive_key', models.CharField(max_length=50)),
43 | ('archive_file', models.FileField(upload_to=archive.models.base.archive_path)),
44 | ('archive_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
45 | ],
46 | ),
47 | migrations.RunPython(move_archive_files),
48 | migrations.RemoveField(
49 | model_name='googletakeoutarchive',
50 | name='archive_file',
51 | ),
52 | migrations.RemoveField(
53 | model_name='gpxarchive',
54 | name='archive_file',
55 | ),
56 | migrations.RemoveField(
57 | model_name='jsonarchive',
58 | name='archive_file',
59 | ),
60 | migrations.RemoveField(
61 | model_name='n26csvarchive',
62 | name='archive_file',
63 | ),
64 | migrations.RemoveField(
65 | model_name='telegramarchive',
66 | name='archive_file',
67 | ),
68 | migrations.RemoveField(
69 | model_name='twitterarchive',
70 | name='archive_file',
71 | ),
72 | ]
73 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0013_icalendararchive.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2022-01-11 21:40
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('archive', '0012_auto_20210716_1352'),
10 | ]
11 |
12 | operations = [
13 | migrations.CreateModel(
14 | name='ICalendarArchive',
15 | fields=[
16 | ('key', models.SlugField(max_length=80, primary_key=True, serialize=False)),
17 | ('description', models.TextField()),
18 | ('date_processed', models.DateTimeField(null=True)),
19 | ],
20 | options={
21 | 'abstract': False,
22 | },
23 | ),
24 | ]
25 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0014_auto_20220516_1313.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2022-05-16 13:13
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('archive', '0013_icalendararchive'),
10 | ]
11 |
12 | operations = [
13 | migrations.AddField(
14 | model_name='facebookarchive',
15 | name='date_from',
16 | field=models.DateTimeField(null=True),
17 | ),
18 | migrations.AddField(
19 | model_name='facebookarchive',
20 | name='date_until',
21 | field=models.DateTimeField(null=True),
22 | ),
23 | migrations.AddField(
24 | model_name='googletakeoutarchive',
25 | name='date_from',
26 | field=models.DateTimeField(null=True),
27 | ),
28 | migrations.AddField(
29 | model_name='googletakeoutarchive',
30 | name='date_until',
31 | field=models.DateTimeField(null=True),
32 | ),
33 | migrations.AddField(
34 | model_name='gpxarchive',
35 | name='date_from',
36 | field=models.DateTimeField(null=True),
37 | ),
38 | migrations.AddField(
39 | model_name='gpxarchive',
40 | name='date_until',
41 | field=models.DateTimeField(null=True),
42 | ),
43 | migrations.AddField(
44 | model_name='icalendararchive',
45 | name='date_from',
46 | field=models.DateTimeField(null=True),
47 | ),
48 | migrations.AddField(
49 | model_name='icalendararchive',
50 | name='date_until',
51 | field=models.DateTimeField(null=True),
52 | ),
53 | migrations.AddField(
54 | model_name='jsonarchive',
55 | name='date_from',
56 | field=models.DateTimeField(null=True),
57 | ),
58 | migrations.AddField(
59 | model_name='jsonarchive',
60 | name='date_until',
61 | field=models.DateTimeField(null=True),
62 | ),
63 | migrations.AddField(
64 | model_name='n26csvarchive',
65 | name='date_from',
66 | field=models.DateTimeField(null=True),
67 | ),
68 | migrations.AddField(
69 | model_name='n26csvarchive',
70 | name='date_until',
71 | field=models.DateTimeField(null=True),
72 | ),
73 | migrations.AddField(
74 | model_name='telegramarchive',
75 | name='date_from',
76 | field=models.DateTimeField(null=True),
77 | ),
78 | migrations.AddField(
79 | model_name='telegramarchive',
80 | name='date_until',
81 | field=models.DateTimeField(null=True),
82 | ),
83 | migrations.AddField(
84 | model_name='twitterarchive',
85 | name='date_from',
86 | field=models.DateTimeField(null=True),
87 | ),
88 | migrations.AddField(
89 | model_name='twitterarchive',
90 | name='date_until',
91 | field=models.DateTimeField(null=True),
92 | ),
93 | ]
94 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/0015_redditarchive.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2022-05-16 15:01
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('archive', '0014_auto_20220516_1313'),
10 | ]
11 |
12 | operations = [
13 | migrations.CreateModel(
14 | name='RedditArchive',
15 | fields=[
16 | ('key', models.SlugField(max_length=80, primary_key=True, serialize=False)),
17 | ('date_from', models.DateTimeField(null=True)),
18 | ('date_until', models.DateTimeField(null=True)),
19 | ('description', models.TextField()),
20 | ('date_processed', models.DateTimeField(null=True)),
21 | ],
22 | options={
23 | 'abstract': False,
24 | },
25 | ),
26 | ]
27 |
--------------------------------------------------------------------------------
/backend/source/archive/migrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/archive/migrations/__init__.py
--------------------------------------------------------------------------------
/backend/source/archive/models/__init__.py:
--------------------------------------------------------------------------------
1 | from archive.models.base import archive_path, ArchiveFile
2 | from archive.models.facebook import FacebookArchive
3 | from archive.models.google_takeout import GoogleTakeoutArchive
4 | from archive.models.gpx import GpxArchive
5 | from archive.models.icalendar import ICalendarArchive
6 | from archive.models.json import JsonArchive
7 | from archive.models.n26 import N26CsvArchive
8 | from archive.models.reddit import RedditArchive
9 | from archive.models.telegram import TelegramArchive
10 | from archive.models.twitter import TwitterArchive
11 |
12 |
13 | __all__ = [
14 | 'archive_path',
15 | 'ArchiveFile',
16 | 'FacebookArchive',
17 | 'GoogleTakeoutArchive',
18 | 'GpxArchive',
19 | 'ICalendarArchive',
20 | 'JsonArchive',
21 | 'N26CsvArchive',
22 | 'RedditArchive',
23 | 'TelegramArchive',
24 | 'TwitterArchive',
25 | ]
26 |
--------------------------------------------------------------------------------
/backend/source/archive/models/gpx.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from typing import Generator
3 |
4 | import gpxpy as gpxpy
5 |
6 | from archive.models.base import FileArchive
7 | from source.utils.datetime import datetime_to_json
8 | from timeline.models import Entry
9 |
10 | logger = logging.getLogger(__name__)
11 |
12 |
13 | class GpxArchive(FileArchive):
14 | """
15 | A single GPX file
16 | """
17 | def entry_from_point(self, point) -> Entry:
18 | return Entry(
19 | schema='activity.location',
20 | source=self.entry_source,
21 | title=getattr(point, 'name') or '',
22 | description=getattr(point, 'description') or getattr(point, 'comment') or '',
23 | extra_attributes={
24 | 'location': {
25 | 'latitude': point.latitude,
26 | 'longitude': point.longitude,
27 | 'altitude': point.elevation,
28 | },
29 | },
30 | date_on_timeline=datetime_to_json(point.time)
31 | )
32 |
33 | def extract_entries(self) -> Generator[Entry, None, None]:
34 | for gpx_file in self.get_archive_files():
35 | gpx = gpxpy.parse(gpx_file)
36 | for track in gpx.tracks:
37 | for segment in track.segments:
38 | for point in segment.points:
39 | yield self.entry_from_point(point)
40 |
41 | for route in gpx.routes:
42 | for point in route.points:
43 | yield self.entry_from_point(point)
44 |
45 | for point in gpx.waypoints:
46 | yield self.entry_from_point(point)
47 |
--------------------------------------------------------------------------------
/backend/source/archive/models/icalendar.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from collections import defaultdict
3 | from datetime import datetime, date
4 | from typing import Generator
5 |
6 | import pytz
7 | from icalendar import Calendar
8 |
9 | from archive.models.base import FileArchive
10 | from source.utils.datetime import datetime_to_json
11 | from timeline.models import Entry
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | class ICalendarArchive(FileArchive):
17 | @staticmethod
18 | def normalize_date(date_obj: datetime):
19 | if type(date_obj) == date:
20 | # TODO: What time should an all-day event have?
21 | date_obj = pytz.utc.localize(datetime(year=date_obj.year, month=date_obj.month, day=date_obj.day, hour=12))
22 | return date_obj
23 |
24 | def extract_entries(self) -> Generator[Entry, None, None]:
25 | for ics_file in self.get_archive_files():
26 | with open(ics_file, 'r') as file:
27 | calendar = Calendar.from_ical(file.read())
28 | for event in calendar.walk('VEVENT'):
29 | event_metadata = defaultdict(dict)
30 | event_metadata['event']['start_date'] = datetime_to_json(self.normalize_date(event['DTSTART'].dt))
31 |
32 | if event.get('DTEND'):
33 | event_metadata['event']['end_date'] = datetime_to_json(self.normalize_date(event['DTEND'].dt))
34 |
35 | if event.get('DTSTAMP'):
36 | event_metadata['event']['creation_date'] = datetime_to_json(self.normalize_date(event['DTSTAMP'].dt))
37 |
38 | if event.get('LOCATION'):
39 | event_metadata['location']['name'] = event['LOCATION']
40 |
41 | yield Entry(
42 | source=self.entry_source,
43 | schema='event',
44 | title=str(event.get('SUMMARY', '')),
45 | description=str(event.get('DESCRIPTION', '')),
46 | date_on_timeline=self.normalize_date(event['DTSTART'].dt),
47 | extra_attributes=dict(event_metadata),
48 | )
--------------------------------------------------------------------------------
/backend/source/archive/models/json.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | from typing import Generator
4 |
5 | from archive.models.base import FileArchive
6 | from timeline.models import Entry
7 | from timeline.serializers import EntrySerializer
8 |
9 | logger = logging.getLogger(__name__)
10 |
11 |
12 | class JsonArchive(FileArchive):
13 | """
14 | A list of JSON entries, as returned by the API
15 | """
16 | def extract_entries(self) -> Generator[Entry, None, None]:
17 | for json_file in self.get_archive_files():
18 | json_entries = json.load(json_file)
19 | for json_entry in json_entries:
20 | json_entry['source'] = self.entry_source
21 | json_entry.pop('id', None)
22 | serializer = EntrySerializer(data=json_entry)
23 | assert serializer.is_valid()
24 | yield Entry(**serializer.validated_data)
25 |
--------------------------------------------------------------------------------
/backend/source/archive/models/n26.py:
--------------------------------------------------------------------------------
1 | import codecs
2 | import csv
3 | import logging
4 | from typing import Generator
5 | from datetime import datetime
6 | from decimal import Decimal
7 |
8 | import pytz
9 |
10 | from archive.models.base import FileArchive
11 | from timeline.models import Entry
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | class N26CsvArchive(FileArchive):
17 | """
18 | Reads CSV transaction lists exported by N26
19 | """
20 | def extract_entries(self) -> Generator[Entry, None, None]:
21 | default_currency = 'EUR'
22 | default_timezone = 'Europe/Berlin' # TODO: If this thing gets a million users, that assumption could be wrong
23 | income_types = ('Income', 'Direct Debit Reversal')
24 |
25 | for csv_file in self.get_archive_files():
26 | account_iban = None
27 |
28 | # Loop once to find account owner's iban
29 | for line in csv.DictReader(codecs.iterdecode(csv_file.open('rb'), 'utf-8'), delimiter=',', quotechar='"'):
30 | if line['Transaction type'] in income_types and line['Account number']:
31 | account_iban = line['Account number']
32 | break
33 |
34 | for line in csv.DictReader(codecs.iterdecode(csv_file.open('rb'), 'utf-8'), delimiter=',', quotechar='"'):
35 | schema = 'finance.income' if line['Transaction type'] in income_types else 'finance.expense'
36 |
37 | first_party = {
38 | 'name': None,
39 | 'amount': str(Decimal(line['Amount (EUR)']).copy_abs()),
40 | 'currency': default_currency,
41 | }
42 | if account_iban:
43 | first_party['iban'] = account_iban
44 |
45 | third_party = {
46 | 'name': line['Payee'],
47 | 'amount': str(Decimal(line['Amount (Foreign Currency)'] or line['Amount (EUR)']).copy_abs()),
48 | 'currency': line['Type Foreign Currency'] or default_currency,
49 | }
50 | if line['Account number']:
51 | third_party['iban'] = line['Account number']
52 |
53 | # The transactions don't have a time. Set it to noon, local timezone
54 | entry_date = pytz.timezone(default_timezone)\
55 | .localize(datetime.strptime(line['Date'], '%Y-%m-%d'))\
56 | .replace(hour=12)\
57 | .astimezone(pytz.UTC)
58 |
59 | yield Entry(
60 | schema=schema,
61 | source=self.entry_source,
62 | title=line['Transaction type'],
63 | description='' if line['Payment reference'] == '-' else line['Payment reference'],
64 | extra_attributes={
65 | 'bank': {'name': 'N26'},
66 | 'sender': first_party if schema == 'finance.expense' else third_party,
67 | 'recipient': third_party if schema == 'finance.expense' else first_party,
68 | },
69 | date_on_timeline=entry_date
70 | )
71 |
--------------------------------------------------------------------------------
/backend/source/archive/models/twitter.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import shutil
4 | from datetime import datetime
5 | from pathlib import Path
6 | from typing import Generator
7 |
8 | import pytz
9 |
10 | from archive.models.base import CompressedFileArchive
11 | from timeline.models import Entry
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | def remove_twitter_js(input_path: Path, output_path: Path):
17 | """
18 | The files are not JSON files, but JavaScript files. They're just a big JS object assigned to a variable. If we
19 | replace the first line, we get valid JSON.
20 | """
21 | with input_path.open('r') as input_file, output_path.open('w') as output_file:
22 | input_file.readline() # Discard the first line, "window.YTD.tweet.part0 = [ {"
23 | output_file.write('[ {')
24 | shutil.copyfileobj(input_file, output_file) # Write rest of file
25 |
26 |
27 | def twitter_date_to_datetime(twitter_date: str) -> datetime:
28 | return pytz.utc.localize(datetime.strptime(twitter_date, '%a %b %d %H:%M:%S +0000 %Y'))
29 |
30 |
31 | class TwitterArchive(CompressedFileArchive):
32 | """
33 | A Twitter data dump import
34 | """
35 | def get_account_info(self):
36 | js_file = self.extracted_files_path / 'data/account.js'
37 | json_file = self.extracted_files_path / 'data/account.json'
38 | remove_twitter_js(js_file, json_file)
39 |
40 | with json_file.open(encoding='utf-8') as json_file_handle:
41 | return json.load(json_file_handle)[0]['account']
42 |
43 | def extract_entries(self) -> Generator[Entry, None, None]:
44 | account_info = self.get_account_info()
45 |
46 | js_file_path = self.extracted_files_path / 'data/tweet.js'
47 | json_file_path = self.extracted_files_path / 'data/tweet.json'
48 | remove_twitter_js(js_file_path, json_file_path)
49 |
50 | with json_file_path.open('r', encoding='utf-8') as json_file:
51 | json_entries = [t['tweet'] for t in json.load(json_file)]
52 |
53 | logger.info(f"Adding tweets found in {str(json_file_path)}")
54 | for tweet in json_entries:
55 | entry = Entry(
56 | schema='social.twitter.tweet',
57 | title='',
58 | description=tweet['full_text'],
59 | date_on_timeline=twitter_date_to_datetime(tweet['created_at']),
60 | extra_attributes={
61 | "post_id": tweet['id'],
62 | "post_user": account_info['username'],
63 | "source": self.entry_source,
64 | },
65 | source=self.entry_source,
66 | )
67 |
68 | if tweet.get('in_reply_to_status_id'):
69 | entry.extra_attributes['post_parent_id'] = tweet['in_reply_to_status_id']
70 |
71 | yield entry
72 |
--------------------------------------------------------------------------------
/backend/source/archive/serializers.py:
--------------------------------------------------------------------------------
1 | from django.db import transaction
2 | from rest_framework import serializers
3 |
4 | from archive.models import JsonArchive, GpxArchive, N26CsvArchive, TelegramArchive, FacebookArchive, ICalendarArchive, \
5 | RedditArchive
6 | from archive.models.base import ArchiveFile
7 | from archive.models.google_takeout import GoogleTakeoutArchive
8 | from archive.models.twitter import TwitterArchive
9 | from source.serializers import BaseSourceSerializer
10 |
11 |
12 | class ArchiveFileSerializer(serializers.ModelSerializer):
13 | url = serializers.SerializerMethodField('get_file_url')
14 |
15 | def get_file_url(self, archive_file):
16 | return archive_file.archive_file.url
17 |
18 | class Meta:
19 | model = ArchiveFile
20 | fields = ('id', 'url')
21 |
22 |
23 | class ArchiveFileRelatedField(serializers.RelatedField):
24 | def to_representation(self, archive_file: ArchiveFile):
25 | return {
26 | 'url': archive_file.archive_file.url,
27 | 'id': archive_file.id,
28 | }
29 |
30 | def to_internal_value(self, data) -> ArchiveFile:
31 | return ArchiveFile(archive_file=data)
32 |
33 |
34 | class BaseArchiveSerializer(BaseSourceSerializer):
35 | archive_files = ArchiveFileRelatedField(many=True, queryset=ArchiveFile.objects.all())
36 | source_name = serializers.ReadOnlyField()
37 |
38 | def create(self, validated_data):
39 | with transaction.atomic():
40 | archive_files_uploads = validated_data.pop('archive_files')
41 | archive = self.Meta.model.objects.create(**validated_data)
42 | for archive_file in archive_files_uploads:
43 | archive_file.archive = archive
44 | archive_file.save()
45 | return archive
46 |
47 | def update(self, instance, validated_data):
48 | with transaction.atomic():
49 | archive_files_uploads = validated_data.pop('archive_files')
50 | for archive_file in archive_files_uploads:
51 | archive_file.archive = instance
52 | archive_file.save()
53 | return super().update(instance, validated_data)
54 |
55 |
56 | class GoogleTakeoutArchiveSerializer(BaseArchiveSerializer):
57 | class Meta:
58 | model = GoogleTakeoutArchive
59 | fields = '__all__'
60 |
61 |
62 | class TwitterArchiveSerializer(BaseArchiveSerializer):
63 | class Meta:
64 | model = TwitterArchive
65 | fields = '__all__'
66 |
67 |
68 | class JsonArchiveSerializer(BaseArchiveSerializer):
69 | class Meta:
70 | model = JsonArchive
71 | fields = '__all__'
72 |
73 |
74 | class GpxArchiveSerializer(BaseArchiveSerializer):
75 | class Meta:
76 | model = GpxArchive
77 | fields = '__all__'
78 |
79 |
80 | class N26CsvArchiveSerializer(BaseArchiveSerializer):
81 | class Meta:
82 | model = N26CsvArchive
83 | fields = '__all__'
84 |
85 |
86 | class TelegramArchiveSerializer(BaseArchiveSerializer):
87 | class Meta:
88 | model = TelegramArchive
89 | fields = '__all__'
90 |
91 |
92 | class FacebookArchiveSerializer(BaseArchiveSerializer):
93 | class Meta:
94 | model = FacebookArchive
95 | fields = '__all__'
96 |
97 |
98 | class ICalendarArchiveSerializer(BaseArchiveSerializer):
99 | class Meta:
100 | model = ICalendarArchive
101 | fields = '__all__'
102 |
103 |
104 | class RedditArchiveSerializer(BaseArchiveSerializer):
105 | class Meta:
106 | model = RedditArchive
107 | fields = '__all__'
108 |
--------------------------------------------------------------------------------
/backend/source/archive/urls.py:
--------------------------------------------------------------------------------
1 | from django.urls import include, path
2 | from rest_framework import routers
3 |
4 | from .views import GoogleTakeoutArchiveViewSet, TwitterArchiveViewSet, JsonArchiveViewSet, GpxArchiveViewSet, \
5 | N26CsvArchiveViewSet, TelegramArchiveViewSet, FacebookArchiveViewSet, ArchiveFileViewSet, ICalendarArchiveViewSet, \
6 | RedditArchiveViewSet
7 |
8 | router = routers.DefaultRouter()
9 | router.register(r'facebook', FacebookArchiveViewSet)
10 | router.register(r'googletakeout', GoogleTakeoutArchiveViewSet)
11 | router.register(r'gpx', GpxArchiveViewSet)
12 | router.register(r'json', JsonArchiveViewSet)
13 | router.register(r'n26csv', N26CsvArchiveViewSet)
14 | router.register(r'telegram', TelegramArchiveViewSet)
15 | router.register(r'twitter', TwitterArchiveViewSet)
16 | router.register(r'icalendar', ICalendarArchiveViewSet)
17 | router.register(r'reddit', RedditArchiveViewSet)
18 | router.register(r'archivefile', ArchiveFileViewSet)
19 |
20 | urlpatterns = [
21 | path('', include(router.urls)),
22 | ]
23 |
--------------------------------------------------------------------------------
/backend/source/archive/views.py:
--------------------------------------------------------------------------------
1 | from rest_framework import viewsets
2 |
3 | from archive.models import JsonArchive, GpxArchive, N26CsvArchive, TelegramArchive, FacebookArchive, ArchiveFile, \
4 | ICalendarArchive, RedditArchive
5 | from archive.models.google_takeout import GoogleTakeoutArchive
6 | from archive.models.twitter import TwitterArchive
7 | from archive.serializers import GoogleTakeoutArchiveSerializer, TwitterArchiveSerializer, JsonArchiveSerializer, \
8 | GpxArchiveSerializer, N26CsvArchiveSerializer, TelegramArchiveSerializer, FacebookArchiveSerializer, \
9 | ArchiveFileSerializer, ICalendarArchiveSerializer, RedditArchiveSerializer
10 |
11 |
12 | class ArchiveModelViewSet(viewsets.ModelViewSet):
13 | required_alternate_scopes = {
14 | "GET": [["archive:read"]],
15 | "POST": [["archive:write"]],
16 | "PUT": [["archive:write"]],
17 | "DELETE": [["archive:write"]],
18 | }
19 |
20 |
21 | class GoogleTakeoutArchiveViewSet(ArchiveModelViewSet):
22 | queryset = GoogleTakeoutArchive.objects.all()
23 | serializer_class = GoogleTakeoutArchiveSerializer
24 |
25 |
26 | class TwitterArchiveViewSet(ArchiveModelViewSet):
27 | queryset = TwitterArchive.objects.all()
28 | serializer_class = TwitterArchiveSerializer
29 |
30 |
31 | class JsonArchiveViewSet(ArchiveModelViewSet):
32 | queryset = JsonArchive.objects.all()
33 | serializer_class = JsonArchiveSerializer
34 |
35 |
36 | class GpxArchiveViewSet(ArchiveModelViewSet):
37 | queryset = GpxArchive.objects.all()
38 | serializer_class = GpxArchiveSerializer
39 |
40 |
41 | class N26CsvArchiveViewSet(ArchiveModelViewSet):
42 | queryset = N26CsvArchive.objects.all()
43 | serializer_class = N26CsvArchiveSerializer
44 |
45 |
46 | class TelegramArchiveViewSet(ArchiveModelViewSet):
47 | queryset = TelegramArchive.objects.all()
48 | serializer_class = TelegramArchiveSerializer
49 |
50 |
51 | class FacebookArchiveViewSet(ArchiveModelViewSet):
52 | queryset = FacebookArchive.objects.all()
53 | serializer_class = FacebookArchiveSerializer
54 |
55 |
56 | class ICalendarArchiveViewSet(ArchiveModelViewSet):
57 | queryset = ICalendarArchive.objects.all()
58 | serializer_class = ICalendarArchiveSerializer
59 |
60 |
61 | class RedditArchiveViewSet(ArchiveModelViewSet):
62 | queryset = RedditArchive.objects.all()
63 | serializer_class = RedditArchiveSerializer
64 |
65 |
66 | class ArchiveFileViewSet(ArchiveModelViewSet):
67 | queryset = ArchiveFile.objects.all()
68 | serializer_class = ArchiveFileSerializer
69 | http_method_names = ['get', 'list', 'delete']
--------------------------------------------------------------------------------
/backend/source/authentication/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/authentication/__init__.py
--------------------------------------------------------------------------------
/backend/source/authentication/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 |
3 | class AuthenticationConfig(AppConfig):
4 | name = 'authentication'
5 |
--------------------------------------------------------------------------------
/backend/source/authentication/management/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/authentication/management/__init__.py
--------------------------------------------------------------------------------
/backend/source/authentication/management/commands/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/authentication/management/commands/__init__.py
--------------------------------------------------------------------------------
/backend/source/authentication/management/commands/assert_app_has_users.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import sys
3 |
4 | from django.core.management import BaseCommand, CommandError
5 | from django.contrib.auth import get_user_model
6 |
7 |
8 | logger = logging.getLogger(__name__)
9 |
10 |
11 | class Command(BaseCommand):
12 | help = 'Ensures that the application has at least one user'
13 |
14 | def handle(self, *args, **options):
15 | User = get_user_model()
16 | if User.objects.all().count() == 0:
17 | logger.error('Application has no users. You will not be able to login. '
18 | 'Create a user with scripts/timeline-create-user.sh')
19 | sys.exit(1)
--------------------------------------------------------------------------------
/backend/source/authentication/management/commands/get_or_create_oauth_app.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import secrets
3 | import string
4 |
5 | from django.core.management import BaseCommand
6 | from oauth2_provider.models import Application
7 |
8 | logger = logging.getLogger(__name__)
9 |
10 |
11 | def generate_random_string(length):
12 | alphabet = string.ascii_letters + string.digits
13 | return ''.join(secrets.choice(alphabet) for i in range(length))
14 |
15 |
16 | class Command(BaseCommand):
17 | help = 'Ensures that an OAuth application with the specified client_id exists. ' \
18 | 'If the app does not exist, it is created.'
19 |
20 | def handle(self, *args, **options):
21 | app, is_new = Application.objects.get_or_create(
22 | client_id=options['client_id'] or generate_random_string(20),
23 | defaults={
24 | 'name': options['name'],
25 | 'client_secret': options['client_secret'] or generate_random_string(20),
26 | 'client_type': options['client_type'],
27 | 'redirect_uris': options['redirect_uri'] or '',
28 | 'authorization_grant_type': options['authorization_grant'],
29 | })
30 | if is_new:
31 | logger.info(f"New OAuth application created for client_id {options['client_id']}")
32 | else:
33 | logger.info(f"There is already an OAuth application with client_id {options['client_id']}")
34 |
35 | def add_arguments(self, parser):
36 | parser.add_argument('--name', type=str)
37 | parser.add_argument('--client-id', type=str)
38 | parser.add_argument('--client-type', type=str)
39 | parser.add_argument('--client-secret', type=str)
40 | parser.add_argument('--redirect-uri', type=str)
41 | parser.add_argument('--authorization-grant', type=str)
42 |
--------------------------------------------------------------------------------
/backend/source/authentication/templates/registration/login.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Timeline
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/backend/source/authentication/urls.py:
--------------------------------------------------------------------------------
1 | from django.urls import include, path
2 |
3 | urlpatterns = [
4 | path('', include('django.contrib.auth.urls')),
5 | ]
6 |
--------------------------------------------------------------------------------
/backend/source/backend/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/backend/__init__.py
--------------------------------------------------------------------------------
/backend/source/backend/asgi.py:
--------------------------------------------------------------------------------
1 | """
2 | ASGI config for backend project.
3 |
4 | It exposes the ASGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.asgi import get_asgi_application
13 |
14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
15 |
16 | application = get_asgi_application()
17 |
--------------------------------------------------------------------------------
/backend/source/backend/urls.py:
--------------------------------------------------------------------------------
1 | from django.urls import path, include
2 | from django.contrib import admin
3 |
4 | urlpatterns = [
5 | path('archive/', include('archive.urls')),
6 | path('auth/', include('authentication.urls')),
7 | path('oauth/', include('oauth2_provider.urls', namespace='oauth2_provider')),
8 | path('timeline/', include('timeline.urls')),
9 | path('source/', include('source.urls')),
10 | path('destination/', include('destination.urls')),
11 | ]
12 |
--------------------------------------------------------------------------------
/backend/source/backend/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for backend project.
3 |
4 | It exposes the WSGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.wsgi import get_wsgi_application
13 |
14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
15 |
16 | application = get_wsgi_application()
17 |
--------------------------------------------------------------------------------
/backend/source/cron-tasks.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | function logline {
3 | echo -e -n "\x1b[32m`date +'%Y-%m-%d %H:%M:%S'` \x1b[90mINFO \x1b[93m[cron-tasks.sh]\x1b[0m "
4 | echo $1
5 | }
6 |
7 | logline "Running cron-tasks.sh..."
8 |
9 | # flock prevents multiple instances of this script from running at the same time
10 | (
11 | flock -n 200 || exit 111;
12 | source /etc/timeline-cronenv;
13 | /usr/local/bin/python /usr/src/app/manage.py import > /tmp/stdout 2>&1;
14 | /usr/local/bin/python /usr/src/app/manage.py export > /tmp/stdout 2>&1;
15 | ) 200>/etc/cronjobs.lock
16 |
17 | exit_code=$?
18 | if [ $exit_code -ne 0 ]; then
19 | if [ $exit_code -eq 111 ]; then
20 | logline "cron-tasks.sh did not run - another instance is already running"
21 | else
22 | logline "cron-tasks.sh failed - exit code was ${exit_code}"
23 | fi
24 | else
25 | logline "cron-tasks.sh finished without errors"
26 | fi
27 |
--------------------------------------------------------------------------------
/backend/source/destination/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/destination/__init__.py
--------------------------------------------------------------------------------
/backend/source/destination/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 |
3 |
4 | class DestinationConfig(AppConfig):
5 | name = 'destination'
6 |
--------------------------------------------------------------------------------
/backend/source/destination/management/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/destination/management/__init__.py
--------------------------------------------------------------------------------
/backend/source/destination/management/commands/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/destination/management/commands/__init__.py
--------------------------------------------------------------------------------
/backend/source/destination/management/commands/export.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from source.management.commands import ModelProcessingCommand
4 | from destination.models.destination import BaseDestination
5 |
6 | logger = logging.getLogger(__name__)
7 |
8 |
9 | class Command(ModelProcessingCommand):
10 | class_name = 'destination'
11 | default_class = BaseDestination
12 |
--------------------------------------------------------------------------------
/backend/source/destination/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2022-04-19 13:58
2 |
3 | import django.core.validators
4 | from django.db import migrations, models
5 |
6 |
7 | class Migration(migrations.Migration):
8 |
9 | initial = True
10 |
11 | dependencies = [
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='RsyncDestination',
17 | fields=[
18 | ('user', models.CharField(max_length=80)),
19 | ('host', models.CharField(max_length=255)),
20 | ('port', models.PositiveIntegerField(default=22, validators=[django.core.validators.MaxValueValidator(65535)])),
21 | ('path', models.TextField()),
22 | ('key_exchange_method', models.CharField(choices=[('hetzner', 'hetzner'), ('ssh-copy-id', 'ssh-copy-id')], default='ssh-copy-id', max_length=20)),
23 | ('key', models.SlugField(max_length=80, primary_key=True, serialize=False)),
24 | ],
25 | options={
26 | 'abstract': False,
27 | },
28 | ),
29 | ]
30 |
--------------------------------------------------------------------------------
/backend/source/destination/migrations/0002_rename_backups_to_destination.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from django.db import migrations, ProgrammingError, connection
4 | from django.apps import apps
5 | from django.db.backends.utils import truncate_name
6 |
7 | logger = logging.getLogger(__name__)
8 |
9 |
10 | def rename_app(app, schema_editor):
11 | old_app_name = 'backup'
12 | new_app_name = 'destination'
13 |
14 | schema_editor.execute(
15 | "SELECT * FROM django_content_type "
16 | f"where app_label='{new_app_name}'"
17 | )
18 |
19 | schema_editor.execute(
20 | f"UPDATE django_content_type SET app_label='{new_app_name}' "
21 | f"WHERE app_label='{old_app_name}'"
22 | )
23 | schema_editor.execute(
24 | f"UPDATE django_migrations SET app='{new_app_name}' "
25 | f"WHERE app='{old_app_name}'"
26 | )
27 | models = apps.all_models[new_app_name]
28 | models.update(apps.all_models[old_app_name])
29 | with connection.cursor() as cursor:
30 | for model_name in models:
31 | old_table_name = truncate_name(f"{old_app_name}_{model_name}", connection.ops.max_name_length())
32 | new_table_name = truncate_name(f"{new_app_name}_{model_name}", connection.ops.max_name_length())
33 | cursor.execute(f"SELECT * FROM information_schema.tables "
34 | f"WHERE table_schema LIKE 'public'"
35 | f"AND table_type LIKE 'BASE TABLE' "
36 | f"AND table_name = '{old_table_name}'")
37 | old_table_exists = cursor.fetchone()
38 | if old_table_exists:
39 | logger.info(f"Moving old table {old_table_name} to {new_table_name}")
40 | delete_query = f"DROP TABLE {new_table_name}"
41 | try:
42 | schema_editor.execute(delete_query)
43 | except ProgrammingError:
44 | logger.error('Query failed: "%s"', delete_query, exc_info=True)
45 |
46 | rename_query = f"ALTER TABLE {old_table_name} RENAME TO {new_table_name}"
47 | try:
48 | schema_editor.execute(rename_query)
49 | except ProgrammingError:
50 | logger.error('Query failed: "%s"', rename_query, exc_info=True)
51 | else:
52 | logger.warning(f"Did not find old table {old_table_name}. "
53 | f"If you are starting this project for the first time, this is fine. "
54 | f"If you are updating the app, something went wrong when renaming tables.")
55 |
56 |
57 | class Migration(migrations.Migration):
58 | # Commits 620b36 and c83309 split the /backup app into /source and /destination apps. This migrates the old tables.
59 |
60 | dependencies = [
61 | ('destination', '0001_initial'),
62 | ]
63 |
64 | operations = [
65 | migrations.RunPython(rename_app),
66 | ]
67 |
--------------------------------------------------------------------------------
/backend/source/destination/migrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/destination/migrations/__init__.py
--------------------------------------------------------------------------------
/backend/source/destination/models/__init__.py:
--------------------------------------------------------------------------------
1 | from destination.models.rsync import RsyncDestination
2 |
3 | __all__ = [
4 | 'RsyncDestination',
5 | ]
--------------------------------------------------------------------------------
/backend/source/destination/models/destination.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from typing import Iterable
3 |
4 | from django.db import models
5 |
6 | logger = logging.getLogger(__name__)
7 |
8 |
9 | class BaseDestination(models.Model):
10 | key = models.SlugField(max_length=80, primary_key=True)
11 |
12 | class Meta:
13 | abstract = True
14 |
15 | @property
16 | def destination_name(self) -> str:
17 | return type(self).__name__
18 |
19 | def __str__(self) -> str:
20 | return f"{self.destination_name}/{self.key}"
21 |
22 | def get_preprocessing_tasks(self) -> Iterable:
23 | return []
24 |
25 | def get_postprocessing_tasks(self) -> Iterable:
26 | return []
27 |
28 | def process(self, force=False):
29 | raise NotImplementedError
--------------------------------------------------------------------------------
/backend/source/destination/models/rsync.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import subprocess
3 |
4 | from django.conf import settings
5 |
6 | from source.models.rsync import RsyncConnectionMixin, pathlib_to_rsync_path, str_to_rsync_path, remote_rsync_path
7 | from source.utils.preprocessing import dump_entries
8 | from destination.models.destination import BaseDestination
9 |
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | class RsyncDestination(RsyncConnectionMixin, BaseDestination):
15 | """
16 | Backs up the timeline using rsync
17 | """
18 | def get_preprocessing_tasks(self):
19 | return [
20 | dump_entries,
21 | ]
22 |
23 | def process(self, force=False):
24 | source_dir = pathlib_to_rsync_path(settings.DATA_ROOT)
25 | destination_dir = str_to_rsync_path(self.path)
26 | remote_destination = remote_rsync_path(self.user, self.host, destination_dir)
27 | logger.info(f"Exporting data with rsync to {remote_destination}")
28 | rsync_command = [
29 | "rsync",
30 | "-az",
31 | "-H", # Preserve hard links. Avoids retransfering hard linked files in incremental backups
32 | "--delete",
33 | "-e", f"ssh -p{self.port}",
34 | "--timeout", "120",
35 | source_dir,
36 | remote_destination,
37 | ]
38 | subprocess.check_call(rsync_command)
39 |
--------------------------------------------------------------------------------
/backend/source/destination/serializers.py:
--------------------------------------------------------------------------------
1 | from rest_framework import serializers
2 |
3 | from destination.models.rsync import RsyncDestination
4 |
5 |
6 | class BaseDestinationSerializer(serializers.HyperlinkedModelSerializer):
7 | key = serializers.CharField()
8 |
9 |
10 | class RsyncDestinationSerializer(BaseDestinationSerializer):
11 | password = serializers.CharField(write_only=True, style={'input_type': 'password'})
12 |
13 | class Meta:
14 | model = RsyncDestination
15 | fields = '__all__'
16 |
--------------------------------------------------------------------------------
/backend/source/destination/urls.py:
--------------------------------------------------------------------------------
1 | from django.urls import include, path
2 | from rest_framework import routers
3 |
4 | from destination.views import RsyncDestinationViewSet
5 |
6 | router = routers.DefaultRouter()
7 | router.register(r'rsync', RsyncDestinationViewSet)
8 |
9 | urlpatterns = [
10 | path('', include(router.urls)),
11 | ]
12 |
--------------------------------------------------------------------------------
/backend/source/destination/views.py:
--------------------------------------------------------------------------------
1 | from destination.models import RsyncDestination
2 | from destination.serializers import RsyncDestinationSerializer
3 | from source.views import RsyncSourceViewSet
4 |
5 |
6 | class RsyncDestinationViewSet(RsyncSourceViewSet):
7 | required_alternate_scopes = {
8 | "GET": [["destination:read"]],
9 | "POST": [["destination:write"]],
10 | "PUT": [["destination:write"]],
11 | "DELETE": [["destination:write"]],
12 | }
13 |
14 | queryset = RsyncDestination.objects.all().order_by('key')
15 | serializer_class = RsyncDestinationSerializer
16 |
--------------------------------------------------------------------------------
/backend/source/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Django's command-line utility for administrative tasks."""
3 | import os
4 | import sys
5 |
6 |
7 | def main():
8 | """Run administrative tasks."""
9 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
10 | try:
11 | from django.core.management import execute_from_command_line
12 | except ImportError as exc:
13 | raise ImportError(
14 | "Couldn't import Django. Are you sure it's installed and "
15 | "available on your PYTHONPATH environment variable? Did you "
16 | "forget to activate a virtual environment?"
17 | ) from exc
18 | execute_from_command_line(sys.argv)
19 |
20 |
21 | if __name__ == '__main__':
22 | main()
23 |
--------------------------------------------------------------------------------
/backend/source/source/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/source/__init__.py
--------------------------------------------------------------------------------
/backend/source/source/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 |
3 |
4 | class SourceConfig(AppConfig):
5 | name = 'source'
6 |
--------------------------------------------------------------------------------
/backend/source/source/management/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/source/management/__init__.py
--------------------------------------------------------------------------------
/backend/source/source/management/commands/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from itertools import chain
3 |
4 | from django.core.management import BaseCommand
5 |
6 | from source.utils.models import get_models_by_name
7 |
8 | logger = logging.getLogger(__name__)
9 |
10 |
11 | class ModelProcessingCommand(BaseCommand):
12 | class_name = 'models'
13 | default_class = None
14 |
15 | def handle(self, *args, **options):
16 | class_names = options.get('classes_to_process') or [self.default_class.__name__]
17 | classes_to_process = get_models_by_name(class_names)
18 | if not classes_to_process:
19 | raise ValueError(f"No classes of types {class_names} found")
20 |
21 | logger.info(f"Processing {self.class_name} types: {[model.__name__ for model in classes_to_process]}")
22 | force_message = ' (with --force)' if options['force'] else ''
23 |
24 | instances_to_process = list(chain.from_iterable([c.objects.all() for c in classes_to_process]))
25 |
26 | preprocessing_tasks = set()
27 | postprocessing_tasks = set()
28 |
29 | for instance in instances_to_process:
30 | preprocessing_tasks.update(instance.get_preprocessing_tasks())
31 | postprocessing_tasks.update(instance.get_postprocessing_tasks())
32 |
33 | if len(preprocessing_tasks):
34 | logger.info(f"Running {len(preprocessing_tasks)} preprocessing tasks{force_message}")
35 | for task in preprocessing_tasks:
36 | task(force=options['force'])
37 |
38 | failure_count = 0
39 | for instance in instances_to_process:
40 | try:
41 | logger.info(f"Processing {instance}")
42 | self.process_instance(instance, force=options['force'])
43 | except KeyboardInterrupt:
44 | raise
45 | except:
46 | logger.exception(f"Failed to process {str(instance)}")
47 | failure_count += 1
48 |
49 | logger.info(f"{len(instances_to_process)} {self.class_name} instances processed. "
50 | f"{len(instances_to_process) - failure_count} successful, {failure_count} failed.")
51 |
52 | if len(postprocessing_tasks):
53 | logger.info(f"Running {len(postprocessing_tasks)} postprocessing tasks{force_message}")
54 | for task in postprocessing_tasks:
55 | task(force=options['force'])
56 |
57 | logger.info(f"Finished processing all {self.class_name} instances")
58 |
59 | def process_instance(self, instance, force):
60 | return instance.process(force)
61 |
62 | def add_arguments(self, parser):
63 | parser.add_argument(
64 | 'classes_to_process',
65 | nargs='*',
66 | type=str,
67 | help=f'One or more {self.class_name} class names to process. By default, all {self.class_name} types are '
68 | 'processed.',
69 | )
70 | parser.add_argument(
71 | '--force',
72 | action='store_true',
73 | help=f'Reprocess {self.class_name} instances that do not need to be processed.',
74 | )
75 |
--------------------------------------------------------------------------------
/backend/source/source/management/commands/import.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from source.management.commands import ModelProcessingCommand
4 | from source.models.source import BaseSource
5 |
6 | logger = logging.getLogger(__name__)
7 |
8 |
9 | class Command(ModelProcessingCommand):
10 | class_name = 'source/archive'
11 | default_class = BaseSource
12 |
13 | def process_instance(self, instance, force):
14 | created_entries, updated_entries = super().process_instance(instance, force)
15 | logger.log(
16 | logging.INFO if (created_entries + updated_entries) > 0 else logging.DEBUG,
17 | f"Retrieved {created_entries + updated_entries} entries for {instance}. "
18 | f"{created_entries} created, {updated_entries} updated."
19 | )
20 | return created_entries, updated_entries
21 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-10-20 11:37
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | initial = True
9 |
10 | dependencies = [
11 | ]
12 |
13 | operations = [
14 | migrations.CreateModel(
15 | name='BackupSource',
16 | fields=[
17 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
18 | ('user', models.CharField(max_length=80)),
19 | ('host', models.CharField(max_length=255)),
20 | ('port', models.PositiveSmallIntegerField(default=22)),
21 | ('path', models.TextField()),
22 | ],
23 | ),
24 | ]
25 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0002_backupsource_key.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-10-20 13:44
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0001_initial'),
10 | ]
11 |
12 | operations = [
13 | migrations.AddField(
14 | model_name='backupsource',
15 | name='key',
16 | field=models.CharField(default='tmp', max_length=80),
17 | preserve_default=False,
18 | ),
19 | ]
20 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0003_auto_20201021_0811.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-10-21 08:11
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0002_backupsource_key'),
10 | ]
11 |
12 | operations = [
13 | migrations.AlterField(
14 | model_name='backupsource',
15 | name='key',
16 | field=models.CharField(max_length=80, unique=True),
17 | ),
18 | ]
19 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0004_twittersource.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-11-03 12:09
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0003_auto_20201021_0811'),
10 | ]
11 |
12 | operations = [
13 | migrations.CreateModel(
14 | name='TwitterSource',
15 | fields=[
16 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
17 | ('consumer_key', models.CharField(max_length=50)),
18 | ('consumer_secret', models.CharField(max_length=50)),
19 | ('access_token', models.CharField(max_length=50)),
20 | ('access_token_secret', models.CharField(max_length=50)),
21 | ('twitter_username', models.CharField(max_length=50)),
22 | ],
23 | ),
24 | ]
25 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0005_redditsource.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-11-03 15:57
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0004_twittersource'),
10 | ]
11 |
12 | operations = [
13 | migrations.CreateModel(
14 | name='RedditSource',
15 | fields=[
16 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
17 | ('client_id', models.CharField(max_length=50)),
18 | ('client_secret', models.CharField(max_length=50)),
19 | ('user_agent', models.CharField(blank=True, max_length=100)),
20 | ('reddit_username', models.CharField(max_length=20)),
21 | ],
22 | ),
23 | ]
24 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0006_hackernewssource.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-12-26 20:47
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0005_redditsource'),
10 | ]
11 |
12 | operations = [
13 | migrations.CreateModel(
14 | name='HackerNewsSource',
15 | fields=[
16 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
17 | ('hackernews_username', models.CharField(max_length=20)),
18 | ],
19 | ),
20 | ]
21 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0007_rsssource.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-12-27 12:37
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0006_hackernewssource'),
10 | ]
11 |
12 | operations = [
13 | migrations.CreateModel(
14 | name='RssSource',
15 | fields=[
16 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
17 | ('feed_url', models.URLField()),
18 | ],
19 | ),
20 | ]
21 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0008_auto_20210127_1215.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-01-27 12:15
2 |
3 | from django.db import migrations
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0007_rsssource'),
10 | ]
11 |
12 | operations = [
13 | migrations.RenameModel(
14 | old_name='BackupSource',
15 | new_name='RsyncSource',
16 | ),
17 | ]
18 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0009_auto_20210316_1316.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-03-16 13:16
2 |
3 | import django.core.validators
4 | from django.db import migrations, models
5 |
6 |
7 | class Migration(migrations.Migration):
8 |
9 | dependencies = [
10 | ('source', '0008_auto_20210127_1215'),
11 | ]
12 |
13 | operations = [
14 | migrations.AddField(
15 | model_name='rsyncsource',
16 | name='max_backups',
17 | field=models.PositiveSmallIntegerField(null=True, validators=[django.core.validators.MinValueValidator(1)]),
18 | ),
19 | migrations.AlterField(
20 | model_name='rsyncsource',
21 | name='port',
22 | field=models.PositiveIntegerField(default=22, validators=[django.core.validators.MaxValueValidator(65535)]),
23 | ),
24 | ]
25 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0010_filesystemsource.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-03-16 21:14
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0009_auto_20210316_1316'),
10 | ]
11 |
12 | operations = [
13 | migrations.CreateModel(
14 | name='FileSystemSource',
15 | fields=[
16 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
17 | ('path', models.FilePathField(allow_files=False, allow_folders=True, path='/srv/mounts')),
18 | ],
19 | options={
20 | 'abstract': False,
21 | },
22 | ),
23 | ]
24 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0011_auto_20210408_1021.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-04-08 10:21
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0010_filesystemsource'),
10 | ]
11 |
12 | operations = [
13 | migrations.AlterField(
14 | model_name='filesystemsource',
15 | name='path',
16 | field=models.FilePathField(allow_files=False, allow_folders=True, path='/data/mounts'),
17 | ),
18 | ]
19 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0012_rsyncdestination.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-04-08 12:36
2 |
3 | import django.core.validators
4 | from django.db import migrations, models
5 |
6 |
7 | class Migration(migrations.Migration):
8 |
9 | dependencies = [
10 | ('source', '0011_auto_20210408_1021'),
11 | ]
12 |
13 | operations = [
14 | migrations.CreateModel(
15 | name='RsyncDestination',
16 | fields=[
17 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
18 | ('user', models.CharField(max_length=80)),
19 | ('host', models.CharField(max_length=255)),
20 | ('port', models.PositiveIntegerField(default=22, validators=[django.core.validators.MaxValueValidator(65535)])),
21 | ('path', models.TextField()),
22 | ('key', models.CharField(max_length=80, unique=True)),
23 | ],
24 | options={
25 | 'abstract': False,
26 | },
27 | ),
28 | ]
29 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0013_auto_20210419_1110.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-04-19 11:10
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0012_rsyncdestination'),
10 | ]
11 |
12 | operations = [
13 | migrations.AddField(
14 | model_name='rsyncdestination',
15 | name='key_exchange_method',
16 | field=models.CharField(choices=[('hetzner', 'hetzner'), ('ssh-copy-id', 'ssh-copy-id')], default='ssh-copy-id', max_length=20),
17 | ),
18 | migrations.AddField(
19 | model_name='rsyncsource',
20 | name='key_exchange_method',
21 | field=models.CharField(choices=[('hetzner', 'hetzner'), ('ssh-copy-id', 'ssh-copy-id')], default='ssh-copy-id', max_length=20),
22 | ),
23 | ]
24 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0014_auto_20210502_0933.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-05-02 09:33
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0013_auto_20210419_1110'),
10 | ]
11 |
12 | operations = [
13 | migrations.AlterModelOptions(
14 | name='filesystemsource',
15 | options={'ordering': ['key']},
16 | ),
17 | migrations.AlterModelOptions(
18 | name='hackernewssource',
19 | options={'ordering': ['key']},
20 | ),
21 | migrations.AlterModelOptions(
22 | name='redditsource',
23 | options={'ordering': ['key']},
24 | ),
25 | migrations.AlterModelOptions(
26 | name='rsssource',
27 | options={'ordering': ['key']},
28 | ),
29 | migrations.AlterModelOptions(
30 | name='twittersource',
31 | options={'ordering': ['key']},
32 | ),
33 | migrations.AddField(
34 | model_name='filesystemsource',
35 | name='key',
36 | field=models.SlugField(max_length=80, null=True),
37 | ),
38 | migrations.AddField(
39 | model_name='hackernewssource',
40 | name='key',
41 | field=models.SlugField(max_length=80, null=True),
42 | ),
43 | migrations.AddField(
44 | model_name='redditsource',
45 | name='key',
46 | field=models.SlugField(max_length=80, null=True),
47 | ),
48 | migrations.AddField(
49 | model_name='rsssource',
50 | name='key',
51 | field=models.SlugField(max_length=80, null=True),
52 | ),
53 | migrations.AddField(
54 | model_name='twittersource',
55 | name='key',
56 | field=models.SlugField(max_length=80, null=True),
57 | ),
58 | migrations.AlterField(
59 | model_name='rsyncdestination',
60 | name='key',
61 | field=models.SlugField(max_length=80, null=True),
62 | ),
63 | migrations.AlterField(
64 | model_name='rsyncsource',
65 | name='key',
66 | field=models.SlugField(max_length=80, null=True),
67 | ),
68 | ]
69 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0016_auto_20210502_1244.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-05-02 12:44
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0015_auto_20210502_1007'),
10 | ]
11 |
12 | operations = [
13 | migrations.RemoveField(
14 | model_name='filesystemsource',
15 | name='id',
16 | ),
17 | migrations.RemoveField(
18 | model_name='hackernewssource',
19 | name='id',
20 | ),
21 | migrations.RemoveField(
22 | model_name='redditsource',
23 | name='id',
24 | ),
25 | migrations.RemoveField(
26 | model_name='rsssource',
27 | name='id',
28 | ),
29 | migrations.RemoveField(
30 | model_name='rsyncdestination',
31 | name='id',
32 | ),
33 | migrations.RemoveField(
34 | model_name='rsyncsource',
35 | name='id',
36 | ),
37 | migrations.RemoveField(
38 | model_name='twittersource',
39 | name='id',
40 | ),
41 | migrations.AlterField(
42 | model_name='filesystemsource',
43 | name='key',
44 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
45 | ),
46 | migrations.AlterField(
47 | model_name='hackernewssource',
48 | name='key',
49 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
50 | ),
51 | migrations.AlterField(
52 | model_name='redditsource',
53 | name='key',
54 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
55 | ),
56 | migrations.AlterField(
57 | model_name='rsssource',
58 | name='key',
59 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
60 | ),
61 | migrations.AlterField(
62 | model_name='rsyncdestination',
63 | name='key',
64 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
65 | ),
66 | migrations.AlterField(
67 | model_name='rsyncsource',
68 | name='key',
69 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
70 | ),
71 | migrations.AlterField(
72 | model_name='twittersource',
73 | name='key',
74 | field=models.SlugField(max_length=80, primary_key=True, serialize=False),
75 | ),
76 | ]
77 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0017_gitsource.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2022-01-06 15:48
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0016_auto_20210502_1244'),
10 | ]
11 |
12 | operations = [
13 | migrations.CreateModel(
14 | name='GitSource',
15 | fields=[
16 | ('key', models.SlugField(max_length=80, primary_key=True, serialize=False)),
17 | ('repo_url', models.URLField()),
18 | ('author_name', models.CharField(max_length=200, null=True)),
19 | ],
20 | options={
21 | 'ordering': ['key'],
22 | 'abstract': False,
23 | },
24 | ),
25 | ]
26 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0018_rename_backups_to_source.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from django.db import migrations, ProgrammingError, connection
4 | from django.apps import apps
5 | from django.db.backends.utils import truncate_name
6 |
7 | logger = logging.getLogger(__name__)
8 |
9 |
10 | def rename_app(app, schema_editor):
11 | old_app_name = 'backup'
12 | new_app_name = 'source'
13 |
14 | schema_editor.execute(
15 | "SELECT * FROM django_content_type "
16 | f"where app_label='{new_app_name}'"
17 | )
18 |
19 | schema_editor.execute(
20 | f"UPDATE django_content_type SET app_label='{new_app_name}' "
21 | f"WHERE app_label='{old_app_name}'"
22 | )
23 | schema_editor.execute(
24 | f"UPDATE django_migrations SET app='{new_app_name}' "
25 | f"WHERE app='{old_app_name}'"
26 | )
27 | models = apps.all_models[new_app_name]
28 | models.update(apps.all_models[old_app_name])
29 | with connection.cursor() as cursor:
30 | for model_name in models:
31 | old_table_name = truncate_name(f"{old_app_name}_{model_name}", connection.ops.max_name_length())
32 | new_table_name = truncate_name(f"{new_app_name}_{model_name}", connection.ops.max_name_length())
33 | cursor.execute(f"SELECT * FROM information_schema.tables "
34 | f"WHERE table_schema LIKE 'public'"
35 | f"AND table_type LIKE 'BASE TABLE' "
36 | f"AND table_name = '{old_table_name}'")
37 | old_table_exists = cursor.fetchone()
38 | if old_table_exists:
39 | logger.info(f"Moving old table {old_table_name} to {new_table_name}")
40 | delete_query = f"DROP TABLE {new_table_name}"
41 | try:
42 | schema_editor.execute(delete_query)
43 | except ProgrammingError:
44 | logger.error('Query failed: "%s"', delete_query, exc_info=True)
45 |
46 | rename_query = f"ALTER TABLE {old_table_name} RENAME TO {new_table_name}"
47 | try:
48 | schema_editor.execute(rename_query)
49 | except ProgrammingError:
50 | logger.error('Query failed: "%s"', rename_query, exc_info=True)
51 | else:
52 | logger.warning(f"Did not find old table {old_table_name}. "
53 | f"If you are starting this project for the first time, this is fine. "
54 | f"If you are updating the app, something went wrong when renaming tables.")
55 |
56 |
57 | class Migration(migrations.Migration):
58 | # Commits 620b36 and c83309 split the /backup app into /source and /destination apps. This migrates the old tables.
59 |
60 | dependencies = [
61 | ('source', '0017_gitsource'),
62 | ]
63 |
64 | operations = [
65 | migrations.RunPython(rename_app),
66 | ]
67 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0019_delete_rsyncdestination.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2022-04-19 15:34
2 |
3 | from django.db import migrations
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0018_rename_backups_to_source'),
10 | ]
11 |
12 | operations = [
13 | migrations.DeleteModel(
14 | name='RsyncDestination',
15 | ),
16 | ]
17 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0020_traktsource.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2022-05-14 15:01
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0019_delete_rsyncdestination'),
10 | ]
11 |
12 | operations = [
13 | migrations.CreateModel(
14 | name='TraktSource',
15 | fields=[
16 | ('key', models.SlugField(max_length=80, primary_key=True, serialize=False)),
17 | ('consumer_key', models.CharField(max_length=100)),
18 | ('consumer_secret', models.CharField(max_length=100)),
19 | ('access_token', models.CharField(blank=True, max_length=100)),
20 | ('refresh_token', models.CharField(blank=True, max_length=100)),
21 | ('access_token_created', models.DateTimeField(null=True)),
22 | ('access_token_expires', models.DateTimeField(null=True)),
23 | ('client_id', models.IntegerField()),
24 | ],
25 | options={
26 | 'abstract': False,
27 | },
28 | ),
29 | ]
30 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/0021_auto_20220516_1313.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2022-05-16 13:13
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('source', '0020_traktsource'),
10 | ]
11 |
12 | operations = [
13 | migrations.AddField(
14 | model_name='filesystemsource',
15 | name='date_from',
16 | field=models.DateTimeField(null=True),
17 | ),
18 | migrations.AddField(
19 | model_name='filesystemsource',
20 | name='date_until',
21 | field=models.DateTimeField(null=True),
22 | ),
23 | migrations.AddField(
24 | model_name='gitsource',
25 | name='date_from',
26 | field=models.DateTimeField(null=True),
27 | ),
28 | migrations.AddField(
29 | model_name='gitsource',
30 | name='date_until',
31 | field=models.DateTimeField(null=True),
32 | ),
33 | migrations.AddField(
34 | model_name='hackernewssource',
35 | name='date_from',
36 | field=models.DateTimeField(null=True),
37 | ),
38 | migrations.AddField(
39 | model_name='hackernewssource',
40 | name='date_until',
41 | field=models.DateTimeField(null=True),
42 | ),
43 | migrations.AddField(
44 | model_name='redditsource',
45 | name='date_from',
46 | field=models.DateTimeField(null=True),
47 | ),
48 | migrations.AddField(
49 | model_name='redditsource',
50 | name='date_until',
51 | field=models.DateTimeField(null=True),
52 | ),
53 | migrations.AddField(
54 | model_name='rsssource',
55 | name='date_from',
56 | field=models.DateTimeField(null=True),
57 | ),
58 | migrations.AddField(
59 | model_name='rsssource',
60 | name='date_until',
61 | field=models.DateTimeField(null=True),
62 | ),
63 | migrations.AddField(
64 | model_name='rsyncsource',
65 | name='date_from',
66 | field=models.DateTimeField(null=True),
67 | ),
68 | migrations.AddField(
69 | model_name='rsyncsource',
70 | name='date_until',
71 | field=models.DateTimeField(null=True),
72 | ),
73 | migrations.AddField(
74 | model_name='traktsource',
75 | name='date_from',
76 | field=models.DateTimeField(null=True),
77 | ),
78 | migrations.AddField(
79 | model_name='traktsource',
80 | name='date_until',
81 | field=models.DateTimeField(null=True),
82 | ),
83 | migrations.AddField(
84 | model_name='twittersource',
85 | name='date_from',
86 | field=models.DateTimeField(null=True),
87 | ),
88 | migrations.AddField(
89 | model_name='twittersource',
90 | name='date_until',
91 | field=models.DateTimeField(null=True),
92 | ),
93 | ]
94 |
--------------------------------------------------------------------------------
/backend/source/source/migrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/source/migrations/__init__.py
--------------------------------------------------------------------------------
/backend/source/source/models/__init__.py:
--------------------------------------------------------------------------------
1 | from source.models.filesystem import FileSystemSource
2 | from source.models.git import GitSource
3 | from source.models.hackernews import HackerNewsSource
4 | from source.models.reddit import RedditSource
5 | from source.models.rss import RssSource
6 | from source.models.rsync import RsyncSource
7 | from source.models.trakt import TraktSource
8 | from source.models.twitter import TwitterSource
9 |
10 | __all__ = [
11 | 'FileSystemSource',
12 | 'GitSource',
13 | 'HackerNewsSource',
14 | 'RedditSource',
15 | 'RssSource',
16 | 'RsyncSource',
17 | 'TraktSource',
18 | 'TwitterSource',
19 | ]
20 |
--------------------------------------------------------------------------------
/backend/source/source/models/filesystem.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from datetime import datetime
3 | from functools import partial
4 | from pathlib import Path
5 | from typing import Tuple
6 |
7 | from django.db import models, transaction
8 |
9 | from backend.settings import MOUNTS_ROOT
10 | from source.models.source import BaseSource
11 | from source.utils.files import create_entries_from_directory
12 | from timeline.utils.postprocessing import generate_previews
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 |
17 | class FileSystemSource(BaseSource):
18 | path = models.FilePathField(blank=False, path=str(MOUNTS_ROOT.resolve()), allow_folders=True, allow_files=False)
19 |
20 | def process(self, force=False) -> Tuple[int, int]:
21 | return self.create_file_entries(use_cache=(not force)), 0
22 |
23 | @transaction.atomic
24 | def create_file_entries(self, use_cache=True) -> int:
25 | logger.info(f"Creating entries for {self.entry_source}")
26 | return len(
27 | create_entries_from_directory(Path(self.path), source=self, backup_date=datetime.now(), use_cache=use_cache)
28 | )
29 |
30 | def get_postprocessing_tasks(self):
31 | return super().get_postprocessing_tasks() + [
32 | partial(generate_previews, source=self),
33 | ]
34 |
--------------------------------------------------------------------------------
/backend/source/source/models/git.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 | from typing import Tuple
3 | from urllib.parse import urlparse, urlunparse
4 |
5 | import pytz
6 | from django.db import models, transaction
7 | from pydriller import Repository
8 |
9 | from source.models.source import BaseSource
10 | from timeline.models import Entry
11 |
12 |
13 | class GitSource(BaseSource):
14 | repo_url = models.URLField(blank=False)
15 | author_name = models.CharField(max_length=200, null=True)
16 |
17 | def get_repo_url(self, strip_credentials=False):
18 | parsed_url = urlparse(self.repo_url)
19 | if strip_credentials:
20 | # Remove username and password if they are included in the URL
21 | parsed_url = parsed_url._replace(netloc=parsed_url.hostname)
22 |
23 | return urlunparse(parsed_url)
24 |
25 | def get_repo_name(self):
26 | parsed_url = urlparse(self.repo_url)
27 | if parsed_url.netloc == 'github.com':
28 | return parsed_url.path.rsplit('.', 1)[0].strip('/') # e.g. "nicbou/timeline"
29 |
30 | def get_commit_url(self, commit):
31 | if urlparse(self.repo_url).netloc == 'github.com':
32 | return f"{self.repo_url.rsplit('.', 1)[0]}/commit/{commit.hash}"
33 |
34 | @transaction.atomic
35 | def process(self, force=False) -> Tuple[int, int]:
36 | filters = {}
37 | if self.author_name:
38 | filters['only_authors'] = [self.author_name, ]
39 |
40 | if self.date_from:
41 | filters['since'] = self.date_from - timedelta(seconds=1)
42 | if self.date_until:
43 | filters['to'] = self.date_until + timedelta(seconds=1)
44 |
45 | commits = Repository(self.repo_url, **filters).traverse_commits()
46 |
47 | self.get_entries().delete()
48 |
49 | entries_to_create = []
50 | for commit in commits:
51 | entries_to_create.append(Entry(
52 | title=commit.msg,
53 | description=commit.hash,
54 | date_on_timeline=commit.committer_date.astimezone(pytz.UTC),
55 | schema='commit',
56 | source=self.entry_source,
57 | extra_attributes={
58 | 'hash': commit.hash,
59 | 'url': self.get_commit_url(commit),
60 | 'author': {
61 | 'email': commit.author.email,
62 | 'name': commit.author.name,
63 | },
64 | 'changes': {
65 | 'files': commit.files,
66 | 'insertions': commit.insertions,
67 | 'deletions': commit.deletions,
68 | },
69 | 'repo': {
70 | 'name': self.get_repo_name() or commit.project_name,
71 | 'url': self.get_repo_url(strip_credentials=True),
72 | },
73 | }
74 | ))
75 | Entry.objects.bulk_create(entries_to_create)
76 | return len(entries_to_create), 0
77 |
--------------------------------------------------------------------------------
/backend/source/source/models/hackernews.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from datetime import datetime, timedelta
3 | from typing import Tuple
4 |
5 | import pytz
6 | import requests
7 | from django.db import models, transaction
8 |
9 | from source.models.source import BaseSource
10 | from timeline.models import Entry
11 |
12 | logger = logging.getLogger(__name__)
13 |
14 |
15 | class HackerNewsSource(BaseSource):
16 | hackernews_username = models.CharField(max_length=20, blank=False)
17 |
18 | def process(self, force=False) -> Tuple[int, int]:
19 | base_schema = 'social.hackernews'
20 |
21 | # Hacker News entries can't be edited or deleted after 2 hours. Only look for latest entries.
22 | latest_entry = self.get_entries().order_by('-extra_attributes__post_id').first()
23 | latest_entry_date = latest_entry.date_on_timeline if latest_entry else self.date_from
24 | two_hours_ago = datetime.now(pytz.UTC) - timedelta(hours=2)
25 | after_date = min([latest_entry_date, two_hours_ago]) if latest_entry_date else None
26 |
27 | if latest_entry_date:
28 | if not self.is_date_in_date_range(latest_entry_date):
29 | return 0, 0
30 | logger.info(f'Retrieving {str(self)} entries after {after_date}')
31 |
32 | updated_entries = []
33 | created_entries = []
34 | with transaction.atomic():
35 | api_url = "https://hacker-news.firebaseio.com/v0/"
36 | item_ids_for_user = requests.get(f"{api_url}user/{self.hackernews_username}.json").json()['submitted']
37 | for item_id in item_ids_for_user:
38 | item = requests.get(f"{api_url}item/{item_id}.json").json()
39 | item_date = datetime.fromtimestamp(item['time'], pytz.UTC)
40 | if not self.is_date_in_date_range(item_date):
41 | continue
42 | if after_date and item_date <= after_date:
43 | break
44 | if item.get('deleted'):
45 | continue
46 |
47 | entry_values = {
48 | 'title': item.get('title', ''),
49 | 'description': item.get('text', ''),
50 | 'date_on_timeline': item_date,
51 | 'extra_attributes': {
52 | 'post_id': item['id'],
53 | 'post_user': self.hackernews_username,
54 | 'post_score': item.get('score'),
55 | }
56 | }
57 |
58 | if 'text' in item:
59 | entry_values['extra_attributes']['post_body_html'] = item['text']
60 | if 'url' in item:
61 | entry_values['extra_attributes']['post_url'] = item['url']
62 | if 'parent' in item:
63 | entry_values['extra_attributes']['post_parent_id'] = item['parent']
64 | if 'score' in item:
65 | entry_values['extra_attributes']['post_score'] = item['score']
66 |
67 | entry, created = Entry.objects.update_or_create(
68 | schema=f"{base_schema}.{item['type']}",
69 | source=self.entry_source,
70 | extra_attributes__post_id=item['id'],
71 | defaults=entry_values
72 | )
73 |
74 | if created:
75 | created_entries.append(entry)
76 | else:
77 | updated_entries.append(entry)
78 |
79 | return len(created_entries), len(updated_entries)
80 |
81 | def __str__(self):
82 | return f"{self.source_name}/{self.hackernews_username}"
83 |
--------------------------------------------------------------------------------
/backend/source/source/models/oauth.py:
--------------------------------------------------------------------------------
1 | from django.db import models
2 |
3 | from source.models.source import BaseSource
4 |
5 |
6 | class OAuthSource(BaseSource):
7 | """
8 | Data source that requires OAuth capability to access information
9 | """
10 | consumer_key = models.CharField(max_length=100, blank=False)
11 | consumer_secret = models.CharField(max_length=100, blank=False)
12 | access_token = models.CharField(max_length=100, blank=True)
13 | refresh_token = models.CharField(max_length=100, blank=True)
14 | access_token_created = models.DateTimeField(null=True)
15 | access_token_expires = models.DateTimeField(null=True)
16 |
17 | class Meta:
18 | abstract = True
19 |
--------------------------------------------------------------------------------
/backend/source/source/models/rss.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from time import mktime
3 | from typing import Tuple
4 |
5 | import feedparser
6 | import pytz
7 | from django.db import models, transaction
8 |
9 | from source.models.source import BaseSource
10 | from timeline.models import Entry
11 |
12 |
13 | class RssSource(BaseSource):
14 | feed_url = models.URLField(blank=False)
15 |
16 | def process(self, force=False) -> Tuple[int, int]:
17 | rss_feed = feedparser.parse(self.feed_url)
18 |
19 | updated_entries = []
20 | created_entries = []
21 | with transaction.atomic():
22 | for rss_entry in rss_feed.entries:
23 | entry, created = Entry.objects.update_or_create(
24 | schema='social.blog.article',
25 | source=self.entry_source,
26 | extra_attributes__post_id=rss_entry.id,
27 | extra_attributes__post_url=rss_entry.link,
28 | defaults={
29 | 'title': rss_entry.title,
30 | 'description': rss_entry.summary,
31 | 'date_on_timeline': datetime.fromtimestamp(mktime(rss_entry.published_parsed), pytz.UTC),
32 | 'extra_attributes': {
33 | 'post_id': rss_entry.id,
34 | 'post_url': rss_entry.link,
35 | 'post_user': rss_entry.author,
36 | 'post_body_html': rss_entry.description or rss_entry.summary,
37 | }
38 | }
39 | )
40 | if created:
41 | created_entries.append(entry)
42 | else:
43 | updated_entries.append(entry)
44 |
45 | return len(created_entries), len(updated_entries)
46 |
47 |
--------------------------------------------------------------------------------
/backend/source/source/models/twitter.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from typing import Tuple
3 |
4 | import pytz
5 | import tweepy as tweepy
6 | from django.db import models, transaction
7 |
8 | from source.models.source import BaseSource
9 | from timeline.models import Entry
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | class TwitterSource(BaseSource):
15 | consumer_key = models.CharField(max_length=50, blank=False)
16 | consumer_secret = models.CharField(max_length=50, blank=False)
17 | access_token = models.CharField(max_length=50, blank=False)
18 | access_token_secret = models.CharField(max_length=50, blank=False)
19 | twitter_username = models.CharField(max_length=50, blank=False)
20 |
21 | def process(self, force=False) -> Tuple[int, int]:
22 | auth = tweepy.OAuthHandler(self.consumer_key, self.consumer_secret)
23 | auth.set_access_token(self.access_token, self.access_token_secret)
24 | api = tweepy.API(auth)
25 |
26 | schema = 'social.twitter.tweet'
27 | latest_entry = Entry.objects \
28 | .filter(source=self.entry_source)\
29 | .order_by('-extra_attributes__post_id')\
30 | .first()
31 | latest_entry_date = latest_entry.date_on_timeline if latest_entry else self.date_from
32 | latest_entry_id = latest_entry.extra_attributes.get('post_id') if latest_entry else None
33 |
34 | if latest_entry_date:
35 | if not self.is_date_in_date_range(latest_entry_date):
36 | return 0, 0
37 | logger.info(f'Retrieving all {self} tweets after {latest_entry_date}')
38 | else:
39 | logger.info(f'Retrieving all {self} tweets')
40 |
41 | cursor = tweepy.Cursor(
42 | api.user_timeline,
43 | screen_name=f'@{self.twitter_username}',
44 | tweet_mode='extended',
45 | since=self.date_from,
46 | until=self.date_until,
47 | since_id=latest_entry_id,
48 | ).items()
49 |
50 | updated_entries = []
51 | created_entries = []
52 | with transaction.atomic():
53 | for tweet in cursor:
54 | defaults = {
55 | 'title': '',
56 | 'description': tweet.full_text,
57 | 'date_on_timeline': pytz.utc.localize(tweet.created_at),
58 | 'extra_attributes': {
59 | 'post_id': tweet.id,
60 | 'post_user': self.twitter_username,
61 | }
62 | }
63 |
64 | if tweet.in_reply_to_status_id:
65 | defaults['extra_attributes']['post_parent_id'] = tweet.in_reply_to_status_id
66 |
67 | entry, created = Entry.objects.update_or_create(
68 | schema=schema,
69 | source=self.entry_source,
70 | extra_attributes__post_id=tweet.id,
71 | defaults=defaults,
72 | )
73 |
74 | if created:
75 | created_entries.append(entry)
76 | else:
77 | updated_entries.append(entry)
78 |
79 | return len(created_entries), len(updated_entries)
80 |
--------------------------------------------------------------------------------
/backend/source/source/parsers.py:
--------------------------------------------------------------------------------
1 | from formencode.variabledecode import variable_decode
2 | from rest_framework import parsers
3 |
4 |
5 | class MultipartFormencodeParser(parsers.MultiPartParser):
6 | def parse(self, stream, media_type=None, parser_context=None):
7 | result = super().parse(
8 | stream,
9 | media_type=media_type,
10 | parser_context=parser_context
11 | )
12 | data = variable_decode(result.data)
13 | return parsers.DataAndFiles(data, result.files)
--------------------------------------------------------------------------------
/backend/source/source/serializers.py:
--------------------------------------------------------------------------------
1 | from rest_framework import serializers
2 |
3 | from .models import FileSystemSource
4 | from .models.git import GitSource
5 | from .models.hackernews import HackerNewsSource
6 | from .models.reddit import RedditSource
7 | from .models.rss import RssSource
8 | from .models.rsync import RsyncSource
9 | from .models.trakt import TraktSource
10 | from .models.twitter import TwitterSource
11 |
12 |
13 | class BaseSourceSerializer(serializers.HyperlinkedModelSerializer):
14 | key = serializers.CharField()
15 |
16 |
17 | class RsyncSourceSerializer(BaseSourceSerializer):
18 | password = serializers.CharField(write_only=True, style={'input_type': 'password'})
19 |
20 | class Meta:
21 | model = RsyncSource
22 | fields = '__all__'
23 |
24 |
25 | class TwitterSourceSerializer(BaseSourceSerializer):
26 | class Meta:
27 | model = TwitterSource
28 | fields = '__all__'
29 |
30 |
31 | class RedditSourceSerializer(BaseSourceSerializer):
32 | class Meta:
33 | model = RedditSource
34 | fields = '__all__'
35 |
36 |
37 | class HackerNewsSourceSerializer(BaseSourceSerializer):
38 | class Meta:
39 | model = HackerNewsSource
40 | fields = '__all__'
41 |
42 |
43 | class RssSourceSerializer(BaseSourceSerializer):
44 | class Meta:
45 | model = RssSource
46 | fields = '__all__'
47 |
48 |
49 | class FileSystemSourceSerializer(BaseSourceSerializer):
50 | class Meta:
51 | model = FileSystemSource
52 | fields = '__all__'
53 |
54 | class GitSourceSerializer(BaseSourceSerializer):
55 | class Meta:
56 | model = GitSource
57 | fields = '__all__'
58 |
59 | class TraktSourceSerializer(BaseSourceSerializer):
60 | pin = serializers.CharField(write_only=True, allow_blank=True)
61 |
62 | def create(self, validated_data):
63 | validated_data.pop('pin', None)
64 | return super().create(validated_data)
65 |
66 | class Meta:
67 | model = TraktSource
68 | fields = '__all__'
69 |
--------------------------------------------------------------------------------
/backend/source/source/urls.py:
--------------------------------------------------------------------------------
1 | from django.urls import include, path
2 | from rest_framework import routers
3 |
4 | from source.views import FileSystemSourceViewSet, HackerNewsSourceViewSet, RedditSourceViewSet, RssSourceViewSet, \
5 | RsyncSourceViewSet, TraktSourceViewSet, TwitterSourceViewSet, GitSourceViewSet
6 |
7 | router = routers.DefaultRouter()
8 | router.register(r'filesystem', FileSystemSourceViewSet)
9 | router.register(r'git', GitSourceViewSet)
10 | router.register(r'hackernews', HackerNewsSourceViewSet)
11 | router.register(r'reddit', RedditSourceViewSet)
12 | router.register(r'rss', RssSourceViewSet)
13 | router.register(r'rsync', RsyncSourceViewSet)
14 | router.register(r'trakt', TraktSourceViewSet)
15 | router.register(r'twitter', TwitterSourceViewSet)
16 |
17 | urlpatterns = [
18 | path('', include(router.urls)),
19 | ]
20 |
--------------------------------------------------------------------------------
/backend/source/source/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/source/utils/__init__.py
--------------------------------------------------------------------------------
/backend/source/source/utils/datetime.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | import pytz
4 |
5 |
6 | def parse_exif_date(date_str: str) -> datetime:
7 | # Official format: YYYY:MM:DD HH:MM:SS
8 | # Also seen: YYYY-MM-DD HH:MM:SS and YYYY-MM-DDTHH:MM:SS+ZZZZ
9 | return datetime.strptime(
10 | date_str.replace('\x00', '').replace('-', ':').replace('T', ' ')[:19],
11 | '%Y:%m:%d %H:%M:%S'
12 | )
13 |
14 |
15 | def datetime_to_json(date: datetime) -> str:
16 | return date.strftime('%Y-%m-%dT%H:%M:%SZ')
17 |
18 |
19 | def json_to_datetime(date_str: str) -> datetime:
20 | try:
21 | parsed_date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%SZ")
22 | except ValueError:
23 | # Date with milliseconds
24 | parsed_date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%fZ")
25 | return pytz.utc.localize(parsed_date)
--------------------------------------------------------------------------------
/backend/source/source/utils/geo.py:
--------------------------------------------------------------------------------
1 | import math
2 |
3 |
4 | def dms_to_decimal(dms, ref):
5 | degrees = dms[0]
6 | minutes = dms[1] / 60.0
7 | seconds = dms[2] / 3600.0
8 | if ref in ['S', 'W']:
9 | degrees = -degrees
10 | minutes = -minutes
11 | seconds = -seconds
12 |
13 | decimal = round(degrees + minutes + seconds, 5)
14 | if math.isnan(decimal):
15 | raise ValueError
16 | return decimal
--------------------------------------------------------------------------------
/backend/source/source/utils/models.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from typing import List, Iterable
3 |
4 | from django.apps import apps
5 | from django.db import models
6 |
7 | logger = logging.getLogger(__name__)
8 |
9 |
10 | def get_models_by_name(class_names: Iterable[str]) -> List[models.Model]:
11 | """
12 | Return all Django models that match a list of class names. It also returns models whose parents match that classname
13 | """
14 | matching_models = []
15 | for model in apps.get_models():
16 | parent_class_names = set([parent.__name__ for parent in model.mro()])
17 | if parent_class_names.intersection(class_names):
18 | matching_models.append(model)
19 | return matching_models
20 |
--------------------------------------------------------------------------------
/backend/source/source/utils/preprocessing.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from django.conf import settings
4 | from rest_framework.renderers import JSONRenderer
5 |
6 | from timeline.models import Entry
7 | from timeline.serializers import serialize_entry
8 |
9 | logger = logging.getLogger(__name__)
10 |
11 |
12 | def dump_entries(force=False):
13 | logger.info(f"Dumping all entries in {settings.ENTRIES_DUMP_PATH}")
14 | with settings.ENTRIES_DUMP_PATH.open('w+') as entry_dump:
15 | entry_dump.write('[')
16 | entries = Entry.objects.iterator(chunk_size=5000)
17 | for index, entry in enumerate(entries):
18 | if index > 0:
19 | entry_dump.write(',')
20 | entry_dump.write(JSONRenderer().render(serialize_entry(entry)).decode("utf-8"))
21 | entry_dump.write(']')
22 |
--------------------------------------------------------------------------------
/backend/source/source/utils/ssh.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import subprocess
3 | from pathlib import Path
4 |
5 | from django.conf import settings
6 |
7 | logger = logging.getLogger(__name__)
8 |
9 |
10 | class SSHCredentialsError(ConnectionError):
11 | pass
12 |
13 |
14 | class SSHTimeoutError(ConnectionError):
15 | pass
16 |
17 |
18 | KEY_EXCHANGE_SSH_COPY_ID = 'ssh-copy-id'
19 | KEY_EXCHANGE_HETZNER = 'hetzner'
20 | KEY_EXCHANGE_METHODS = (
21 | KEY_EXCHANGE_HETZNER,
22 | KEY_EXCHANGE_SSH_COPY_ID,
23 | )
24 |
25 |
26 | def copy_ssh_keys(host, port, user, password, key_exchange_method=KEY_EXCHANGE_SSH_COPY_ID):
27 | """
28 | Copies SSH keys to a remote host without user input
29 | """
30 | try:
31 | ssh_private_key_path: Path = settings.SSH_DIR / 'id_rsa'
32 | ssh_public_key_path: Path = settings.SSH_DIR / 'id_rsa.pub'
33 | if not ssh_private_key_path.exists():
34 | # Don't regenerate an existing key, or it will break every connection that relies on it
35 | subprocess.check_call(['ssh-keygen', '-b', '2048', '-t', 'rsa', '-f', ssh_private_key_path, '-q', '-N', '',])
36 |
37 | if key_exchange_method == KEY_EXCHANGE_SSH_COPY_ID:
38 | subprocess.check_call([
39 | 'sshpass', '-p', password,
40 | 'ssh-copy-id',
41 | '-o', 'StrictHostKeyChecking=no',
42 | '-p', str(port),
43 | '-i', ssh_private_key_path,
44 | f'{user}@{host}',
45 | ], timeout=10)
46 | elif key_exchange_method == KEY_EXCHANGE_HETZNER:
47 | # Hetzner storage boxes don't support ssh-copy-id or shell commands, so we upload an authorized_keys file.
48 |
49 | # Remove and recreate remote .ssh dir. Fails silently (for example if the directory exists).
50 | mk_dir_command = subprocess.Popen(['echo', 'mkdir', '.ssh'], stdout=subprocess.PIPE)
51 | subprocess.check_call(['sshpass', '-p', password, 'sftp', f'{user}@{host}'], stdin=mk_dir_command.stdout)
52 |
53 | # Upload authorized_keys file
54 | subprocess.check_call([
55 | 'sshpass', '-p', password,
56 | 'scp', str(ssh_public_key_path), f'{user}@{host}:.ssh/authorized_keys'
57 | ])
58 | else:
59 | raise Exception(f"Unexpected key exchange method: {key_exchange_method}")
60 | except subprocess.TimeoutExpired:
61 | raise SSHTimeoutError(f"Failed to copy keys to {host}. Request timed out.")
62 | except subprocess.CalledProcessError as exc:
63 | raise SSHCredentialsError(f"Failed to copy keys to {host}. Command returned exit code {exc.returncode}")
64 | except KeyboardInterrupt:
65 | raise
66 | except:
67 | raise Exception(f"Failed to copy keys to {host}")
--------------------------------------------------------------------------------
/backend/source/timeline/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/timeline/__init__.py
--------------------------------------------------------------------------------
/backend/source/timeline/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 |
3 |
4 | class TimelineConfig(AppConfig):
5 | name = 'timeline'
6 |
--------------------------------------------------------------------------------
/backend/source/timeline/management/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/timeline/management/__init__.py
--------------------------------------------------------------------------------
/backend/source/timeline/management/commands/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/timeline/management/commands/__init__.py
--------------------------------------------------------------------------------
/backend/source/timeline/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-10-20 11:37
2 |
3 | import django.utils.timezone
4 | from django.db import migrations, models
5 |
6 |
7 | class Migration(migrations.Migration):
8 |
9 | initial = True
10 |
11 | dependencies = [
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='Entry',
17 | fields=[
18 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
19 | ('date_on_timeline', models.DateTimeField(default=django.utils.timezone.now)),
20 | ('date_modified', models.DateTimeField(default=django.utils.timezone.now)),
21 | ('schema', models.CharField(max_length=100)),
22 | ('title', models.TextField()),
23 | ('description', models.TextField()),
24 | ('extra_attributes', models.JSONField()),
25 | ],
26 | ),
27 | migrations.CreateModel(
28 | name='File',
29 | fields=[
30 | ],
31 | options={
32 | 'proxy': True,
33 | 'indexes': [],
34 | 'constraints': [],
35 | },
36 | bases=('timeline.entry',),
37 | ),
38 | migrations.CreateModel(
39 | name='Location',
40 | fields=[
41 | ],
42 | options={
43 | 'proxy': True,
44 | 'indexes': [],
45 | 'constraints': [],
46 | },
47 | bases=('timeline.entry',),
48 | ),
49 | migrations.CreateModel(
50 | name='Text',
51 | fields=[
52 | ],
53 | options={
54 | 'proxy': True,
55 | 'indexes': [],
56 | 'constraints': [],
57 | },
58 | bases=('timeline.entry',),
59 | ),
60 | migrations.CreateModel(
61 | name='Image',
62 | fields=[
63 | ],
64 | options={
65 | 'proxy': True,
66 | 'indexes': [],
67 | 'constraints': [],
68 | },
69 | bases=('timeline.file',),
70 | ),
71 | migrations.CreateModel(
72 | name='Markdown',
73 | fields=[
74 | ],
75 | options={
76 | 'proxy': True,
77 | 'indexes': [],
78 | 'constraints': [],
79 | },
80 | bases=('timeline.text',),
81 | ),
82 | migrations.CreateModel(
83 | name='Video',
84 | fields=[
85 | ],
86 | options={
87 | 'proxy': True,
88 | 'indexes': [],
89 | 'constraints': [],
90 | },
91 | bases=('timeline.file',),
92 | ),
93 | ]
94 |
--------------------------------------------------------------------------------
/backend/source/timeline/migrations/0002_auto_20201020_1203.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-10-20 12:03
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('timeline', '0001_initial'),
10 | ]
11 |
12 | operations = [
13 | migrations.DeleteModel(
14 | name='File',
15 | ),
16 | migrations.DeleteModel(
17 | name='Image',
18 | ),
19 | migrations.DeleteModel(
20 | name='Location',
21 | ),
22 | migrations.DeleteModel(
23 | name='Markdown',
24 | ),
25 | migrations.DeleteModel(
26 | name='Text',
27 | ),
28 | migrations.DeleteModel(
29 | name='Video',
30 | ),
31 | migrations.AlterField(
32 | model_name='entry',
33 | name='description',
34 | field=models.TextField(blank=True),
35 | ),
36 | migrations.AlterField(
37 | model_name='entry',
38 | name='extra_attributes',
39 | field=models.JSONField(blank=True, default={}),
40 | ),
41 | migrations.AlterField(
42 | model_name='entry',
43 | name='title',
44 | field=models.TextField(blank=True),
45 | ),
46 | ]
47 |
--------------------------------------------------------------------------------
/backend/source/timeline/migrations/0003_auto_20201020_1203.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-10-20 12:03
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('timeline', '0002_auto_20201020_1203'),
10 | ]
11 |
12 | operations = [
13 | migrations.AlterField(
14 | model_name='entry',
15 | name='extra_attributes',
16 | field=models.JSONField(blank=True, default=dict),
17 | ),
18 | ]
19 |
--------------------------------------------------------------------------------
/backend/source/timeline/migrations/0004_remove_entry_date_modified.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-11-01 09:21
2 |
3 | from django.db import migrations
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('timeline', '0003_auto_20201020_1203'),
10 | ]
11 |
12 | operations = [
13 | migrations.RemoveField(
14 | model_name='entry',
15 | name='date_modified',
16 | ),
17 | ]
18 |
--------------------------------------------------------------------------------
/backend/source/timeline/migrations/0005_auto_20210110_1817.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-01-10 18:17
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('timeline', '0004_remove_entry_date_modified'),
10 | ]
11 |
12 | operations = [
13 | migrations.AddIndex(
14 | model_name='entry',
15 | index=models.Index(fields=['schema'], name='timeline_en_schema_e31fe6_idx'),
16 | ),
17 | migrations.AddIndex(
18 | model_name='entry',
19 | index=models.Index(fields=['-date_on_timeline'], name='timeline_en_date_on_861bc0_idx'),
20 | ),
21 | ]
22 |
--------------------------------------------------------------------------------
/backend/source/timeline/migrations/0006_auto_20210127_1256.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-01-27 12:56
2 | import logging
3 | from pathlib import Path
4 |
5 | from django.db import migrations, models
6 | from django.db.transaction import atomic
7 |
8 | from backend import settings
9 | from source.models.hackernews import HackerNewsSource
10 | from source.models.reddit import RedditSource
11 | from source.models.rsync import RsyncSource
12 | from source.models.twitter import TwitterSource
13 | from timeline.models import Entry
14 |
15 | logger = logging.getLogger(__name__)
16 |
17 |
18 | @atomic
19 | def set_source(app, schema_editor):
20 | """
21 | - Set the "source" attribute
22 | - Replace 'geo.location.current' schema with 'activity.location'
23 | """
24 | rsync_sources_by_key = {
25 | s.key: s.id for s in RsyncSource.objects.all()
26 | }
27 | reddit_sources_by_username = {
28 | s.reddit_username: s.id for s in RedditSource.objects.all()
29 | }
30 | twitter_sources_by_username = {
31 | s.twitter_username: s.id for s in TwitterSource.objects.all()
32 | }
33 | hackernews_sources_by_username = {
34 | s.hackernews_username: s.id for s in HackerNewsSource.objects.all()
35 | }
36 |
37 | entries = Entry.objects.all()
38 |
39 | logger.info(f"Processing {entries.count()} entries")
40 |
41 | for entry in entries:
42 | if entry.schema.startswith('file'):
43 | key = Path(entry.extra_attributes['file']['path']).relative_to(settings.BACKUPS_ROOT).parts[0]
44 | if key in rsync_sources_by_key:
45 | entry.source = f"{RsyncSource.source_name}/{rsync_sources_by_key[key]}"
46 | elif entry.schema.startswith('social.twitter'):
47 | if entry.extra_attributes['post_user'] in twitter_sources_by_username:
48 | entry.source = f"{TwitterSource.source_name}/{twitter_sources_by_username[entry.extra_attributes['post_user']]}"
49 | elif entry.schema.startswith('social.hackernews'):
50 | if entry.extra_attributes['post_user'] in hackernews_sources_by_username:
51 | entry.source = f"{HackerNewsSource.source_name}/{hackernews_sources_by_username[entry.extra_attributes['post_user']]}"
52 | elif entry.schema.startswith('social.reddit'):
53 | if entry.extra_attributes['post_user'] in reddit_sources_by_username:
54 | entry.source = f"{RedditSource.source_name}/{reddit_sources_by_username[entry.extra_attributes['post_user']]}"
55 | elif entry.schema == 'social.blog.article':
56 | # RSS entries update themselves, so the source should be updated in the next run
57 | pass
58 | elif entry.schema == 'journal':
59 | entry.source = 'frontend/web'
60 | elif entry.schema == 'geo.point.current':
61 | # The source was already saved under extra_attributes
62 | entry.source = entry.extra_attributes['source']
63 | entry.schema = 'activity.location'
64 | del entry.extra_attributes['source']
65 | elif entry.schema == 'message.text.sms':
66 | entry.source = 'archive/sms-dump-2013-2015'
67 |
68 | entry.save()
69 |
70 | logger.info("Done")
71 |
72 |
73 | class Migration(migrations.Migration):
74 |
75 | dependencies = [
76 | ('timeline', '0005_auto_20210110_1817'),
77 | ]
78 |
79 | operations = [
80 | migrations.AddField(
81 | model_name='entry',
82 | name='source',
83 | field=models.CharField(default='', max_length=100),
84 | preserve_default=False,
85 | ),
86 | migrations.AddIndex(
87 | model_name='entry',
88 | index=models.Index(fields=['source'], name='timeline_en_source_daa269_idx'),
89 | ),
90 | migrations.RunPython(set_source),
91 | ]
92 |
--------------------------------------------------------------------------------
/backend/source/timeline/migrations/0007_source_name_fix.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-01-27 12:56
2 | import logging
3 |
4 | from django.db import migrations
5 | from django.db.transaction import atomic
6 |
7 | from source.models.hackernews import HackerNewsSource
8 | from source.models.reddit import RedditSource
9 | from timeline.models import Entry
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | @atomic
15 | def fix_source(app, schema_editor):
16 | """
17 | - Fix the source attribute (RedditSource/1 -> reddit/1)
18 | - Fix the source attribute (HackerNewsSource/1 -> hackernews/1)
19 | """
20 | entries = Entry.objects.filter(source__startswith='RedditSource/')
21 | logger.info(f"Fixing source on {entries.count()} reddit entries")
22 | for entry in entries:
23 | entry.source = entry.source.replace('RedditSource', RedditSource.source_name)
24 | entry.save()
25 |
26 | entries = Entry.objects.filter(source__startswith='HackerNewsSource/')
27 | logger.info(f"Fixing source on {entries.count()} hacker news entries")
28 | for entry in entries:
29 | entry.source = entry.source.replace('HackerNewsSource', HackerNewsSource.source_name)
30 | entry.save()
31 |
32 | logger.info("Done")
33 |
34 |
35 | class Migration(migrations.Migration):
36 |
37 | dependencies = [
38 | ('timeline', '0006_auto_20210127_1256'),
39 | ]
40 |
41 | operations = [
42 | migrations.RunPython(fix_source),
43 | ]
44 |
--------------------------------------------------------------------------------
/backend/source/timeline/migrations/0008_auto_20210603_1102.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-06-03 11:02
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('timeline', '0007_source_name_fix'),
10 | ]
11 |
12 | operations = [
13 | migrations.RemoveIndex(
14 | model_name='entry',
15 | name='timeline_en_date_on_861bc0_idx',
16 | ),
17 | migrations.AddIndex(
18 | model_name='entry',
19 | index=models.Index(fields=['date_on_timeline'], name='timeline_en_date_on_d4f91d_idx'),
20 | ),
21 | ]
22 |
--------------------------------------------------------------------------------
/backend/source/timeline/migrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/timeline/migrations/__init__.py
--------------------------------------------------------------------------------
/backend/source/timeline/models.py:
--------------------------------------------------------------------------------
1 | from django.db import models
2 | from django.utils import timezone
3 |
4 |
5 | class Entry(models.Model):
6 | date_on_timeline = models.DateTimeField(default=timezone.now)
7 |
8 | source = models.CharField(max_length=100)
9 | schema = models.CharField(max_length=100)
10 | title = models.TextField(blank=True)
11 | description = models.TextField(blank=True)
12 | extra_attributes = models.JSONField(blank=True, default=dict)
13 |
14 | class Meta:
15 | indexes = [
16 | models.Index(fields=['schema']),
17 | models.Index(fields=['source']),
18 | models.Index(fields=['date_on_timeline']),
19 | ]
20 |
--------------------------------------------------------------------------------
/backend/source/timeline/permissions.py:
--------------------------------------------------------------------------------
1 | from oauth2_provider.contrib.rest_framework import TokenMatchesOASRequirements
2 | from rest_framework.permissions import IsAdminUser
3 |
4 | AdminOrTokenMatchesOASRequirements = IsAdminUser | TokenMatchesOASRequirements
--------------------------------------------------------------------------------
/backend/source/timeline/renderers.py:
--------------------------------------------------------------------------------
1 | from gpxpy.gpx import GPXTrackPoint as Point
2 | import gpxpy as gpxpy
3 | from rest_framework.renderers import BaseRenderer
4 |
5 | from source.utils.datetime import json_to_datetime
6 |
7 |
8 | class GpxRenderer(BaseRenderer):
9 | """
10 | Render GPX file from entries
11 | """
12 | media_type = "application/gpx+xml"
13 | format = "gpx"
14 | charset = "utf-8"
15 |
16 | def render(self, data, accepted_media_type=None, renderer_context=None):
17 | gpx = gpxpy.gpx.GPX()
18 | gpx_track = gpxpy.gpx.GPXTrack()
19 | gpx.tracks.append(gpx_track)
20 | gpx_segment = gpxpy.gpx.GPXTrackSegment()
21 | gpx_track.segments.append(gpx_segment)
22 |
23 | for entry in data:
24 | location = entry['extra_attributes'].get('location')
25 | if location and location.get('latitude') is not None and location.get('longitude') is not None:
26 | gpx_segment.points.append(Point(
27 | location['latitude'],
28 | location['longitude'],
29 | time=json_to_datetime(entry['date_on_timeline']),
30 | elevation=location.get('elevation'),
31 | name=entry['title'] or None,
32 | comment=entry['description'] or None,
33 | ))
34 |
35 | return gpx.to_xml()
36 |
--------------------------------------------------------------------------------
/backend/source/timeline/serializers.py:
--------------------------------------------------------------------------------
1 | from rest_framework import serializers
2 |
3 | from .models import Entry
4 |
5 |
6 | class EntrySerializer(serializers.ModelSerializer):
7 | class Meta:
8 | model = Entry
9 | fields = '__all__'
10 |
11 |
12 | _entry_fields_cache = None
13 |
14 |
15 | def get_entry_fields():
16 | global _entry_fields_cache
17 | if not _entry_fields_cache:
18 | _entry_fields_cache = EntrySerializer().fields.keys()
19 | return _entry_fields_cache
20 |
21 |
22 | def serialize_entry(entry: Entry):
23 | """
24 | Much faster serializer for entry dumps
25 | """
26 | return {
27 | field: getattr(entry, field)
28 | for field in get_entry_fields()
29 | }
--------------------------------------------------------------------------------
/backend/source/timeline/urls.py:
--------------------------------------------------------------------------------
1 | from django.urls import include, path
2 | from rest_framework import routers
3 |
4 | from .views import EntryViewSet
5 |
6 | router = routers.DefaultRouter()
7 | router.register(r'entries', EntryViewSet)
8 |
9 | urlpatterns = [
10 | path('', include(router.urls)),
11 | ]
12 |
--------------------------------------------------------------------------------
/backend/source/timeline/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/backend/source/timeline/utils/__init__.py
--------------------------------------------------------------------------------
/backend/source/timeline/views.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | from django.http import HttpResponse
4 | from django_filters.rest_framework import DjangoFilterBackend
5 | from rest_framework import viewsets
6 | from rest_framework.filters import OrderingFilter
7 |
8 | from .models import Entry
9 | from .serializers import EntrySerializer
10 |
11 |
12 | class EntryViewSet(viewsets.ModelViewSet):
13 | required_alternate_scopes = {
14 | "GET": [["entry:read"]],
15 | "POST": [["entry:write"]],
16 | "PUT": [["entry:write"]],
17 | "DELETE": [["entry:write"]],
18 | }
19 |
20 | queryset = Entry.objects.all().order_by('date_on_timeline')
21 | serializer_class = EntrySerializer
22 | filter_backends = [DjangoFilterBackend, OrderingFilter]
23 | filterset_fields = {
24 | 'date_on_timeline': ['gte', 'lte', 'exact', 'gt', 'lt'],
25 | 'schema': ['exact', 'contains'],
26 | 'source': ['exact', 'contains'],
27 | }
28 |
29 | def create(self, request, *args, **kwargs):
30 | serializer = self.get_serializer(data=request.data, many=isinstance(request.data, list))
31 | serializer.is_valid(raise_exception=True)
32 | self.perform_create(serializer)
33 | headers = self.get_success_headers(serializer.data)
34 | return HttpResponse(json.dumps(serializer.data, ensure_ascii=False), content_type="application/json")
--------------------------------------------------------------------------------
/backend/ssh_config:
--------------------------------------------------------------------------------
1 | Host *
2 | SendEnv LANG LC_*
3 | HashKnownHosts yes
4 | GSSAPIAuthentication yes
5 | StrictHostKeyChecking no
--------------------------------------------------------------------------------
/docker-compose.homeserver.yml:
--------------------------------------------------------------------------------
1 | version: "3.5"
2 | services:
3 | timeline-backend:
4 | volumes:
5 | - /var/foldersync/photos:/data/mounts/photos
6 | - /var/foldersync/screenshots:/data/mounts/screenshots
7 | - /var/foldersync/telegram:/data/mounts/telegram
8 | proxy:
9 | volumes:
10 | - /var/foldersync/photos:/data/mounts/photos
11 | - /var/foldersync/screenshots:/data/mounts/screenshots
12 | - /var/foldersync/telegram:/data/mounts/telegram
13 | networks:
14 | default:
15 | homeserver:
16 | aliases:
17 | - timeline
18 | networks:
19 | homeserver:
20 | external:
21 | name: homeserver
22 |
--------------------------------------------------------------------------------
/docker-compose.override.yml:
--------------------------------------------------------------------------------
1 | version: "3.5"
2 | services:
3 | proxy:
4 | ports:
5 | - "80:80"
6 | - "443:443"
7 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.5"
2 | services:
3 | timeline-db:
4 | image: postgres:14
5 | environment:
6 | - POSTGRES_USER=postgres
7 | - POSTGRES_DB=timeline
8 | - POSTGRES_PASSWORD=postgres
9 | volumes:
10 | - db_persistence:/var/lib/postgresql/data
11 | restart: unless-stopped
12 | timeline-backend:
13 | build: backend
14 | environment:
15 | - BACKEND_DEBUG
16 | - BACKEND_SECRET_KEY
17 | - FRONTEND_CLIENT_ID
18 | - FRONTEND_DOMAIN
19 | - GEOLOCATION_CLIENT_ID
20 | - GEOLOCATION_CLIENT_SECRET
21 | depends_on:
22 | - timeline-db
23 | volumes:
24 | - ./backend/source:/usr/src/app
25 | - archives:/data/archives
26 | - backups:/data/backups
27 | - previews:/assets/previews
28 | - backend_staticfiles:/assets/static
29 | - ssh_keys:/root/.ssh
30 | logging:
31 | options:
32 | max-size: "200k"
33 | max-file: "10"
34 | restart: unless-stopped
35 | timeline-frontend:
36 | build:
37 | context: frontend
38 | args:
39 | - FRONTEND_CLIENT_ID
40 | - FRONTEND_DOMAIN
41 | - GOOGLE_MAPS_API_KEY
42 | volumes:
43 | - ./frontend/source:/usr/share/nginx/html:ro
44 | depends_on:
45 | - timeline-backend
46 | logging:
47 | driver: "none"
48 | restart: unless-stopped
49 | mqtt-broker:
50 | build: mqtt-broker
51 | environment:
52 | - MQTT_USERNAME
53 | - MQTT_PASSWORD
54 | volumes:
55 | - mqtt_persistence:/mosquitto/data
56 | ports:
57 | - 1883:1883
58 | restart: unless-stopped
59 | geolocation-client:
60 | build: geolocation-client
61 | environment:
62 | - MQTT_USERNAME
63 | - MQTT_PASSWORD
64 | - GEOLOCATION_CLIENT_ID
65 | - GEOLOCATION_CLIENT_SECRET
66 | depends_on:
67 | - mqtt-broker
68 | - timeline-backend
69 | volumes:
70 | - ./geolocation-client/source:/usr/src/app
71 | logging:
72 | options:
73 | max-size: "200k"
74 | max-file: "10"
75 | restart: unless-stopped
76 | proxy:
77 | build: proxy
78 | depends_on:
79 | - timeline-frontend
80 | - timeline-backend
81 | volumes:
82 | - archives:/data/archives
83 | - backups:/data/backups
84 | - previews:/assets/previews
85 | - backend_staticfiles:/assets/static
86 | logging:
87 | options:
88 | max-size: "200k"
89 | max-file: "10"
90 | restart: unless-stopped
91 | volumes:
92 | db_persistence:
93 | mqtt_persistence:
94 | backups:
95 | archives:
96 | previews:
97 | ssh_keys:
98 | backend_staticfiles:
--------------------------------------------------------------------------------
/frontend/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM nginx:latest
2 |
3 | COPY nginx.conf /etc/nginx/conf.d/default.conf
4 |
5 | # Create config.js file with contents replaced with environment variables
6 | ARG FRONTEND_CLIENT_ID
7 | ARG FRONTEND_DOMAIN
8 | ARG GOOGLE_MAPS_API_KEY
9 | COPY ./source/js/config.js /usr/share/nginx/html/js/config.js
10 | RUN mkdir -p /usr/share/nginx/generated \
11 | && envsubst < /usr/share/nginx/html/js/config.js > /usr/share/nginx/generated/config.js
--------------------------------------------------------------------------------
/frontend/nginx.conf:
--------------------------------------------------------------------------------
1 | server {
2 | listen 80;
3 | listen [::]:80;
4 | server_name localhost;
5 |
6 | location = /js/config.js {
7 | # This is the config.js file with the environment variables inserted
8 | alias /usr/share/nginx/generated/config.js;
9 | }
10 |
11 | location / {
12 | root /usr/share/nginx/html;
13 | try_files $uri $uri/ /index.html;
14 | }
15 |
16 | error_page 500 502 503 504 /50x.html;
17 | location = /50x.html {
18 | root /usr/share/nginx/html;
19 | }
20 | }
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-brands-400.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-brands-400.eot
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-brands-400.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-brands-400.ttf
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-brands-400.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-brands-400.woff
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-brands-400.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-brands-400.woff2
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-regular-400.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-regular-400.eot
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-regular-400.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-regular-400.ttf
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-regular-400.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-regular-400.woff
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-regular-400.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-regular-400.woff2
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-solid-900.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-solid-900.eot
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-solid-900.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-solid-900.ttf
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-solid-900.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-solid-900.woff
--------------------------------------------------------------------------------
/frontend/source/fonts/fa-solid-900.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/fonts/fa-solid-900.woff2
--------------------------------------------------------------------------------
/frontend/source/images/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nicbou/timeline-old/3a97fa1c01a882070c547dc72030596025e3ff59/frontend/source/images/favicon.png
--------------------------------------------------------------------------------
/frontend/source/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Timeline
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/frontend/source/js/app.js:
--------------------------------------------------------------------------------
1 | import sync from './libs/vuex-router-sync.js';
2 | import router from './routes.js';
3 | import store from './store/store.js';
4 |
5 | sync(store, router, { moduleName: 'route' } );
6 |
7 | export const app = new Vue({
8 | el: '#page',
9 | router,
10 | store,
11 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/entries/activity.js:
--------------------------------------------------------------------------------
1 | import TimelineEntryIcon from './entry-icon.js';
2 |
3 | export default Vue.component('activity-entry', {
4 | props: ['entry'],
5 | computed: {
6 | metaText: function() {
7 | if(this.entry.schema === 'activity.browsing.search') {
8 | if(this.url.startsWith('https://maps.google.com')) {
9 | return 'Google Maps search';
10 | }
11 | else if(this.url.startsWith('https://www.google.com/search?tbm=isch&q=')) {
12 | return 'Google Images search';
13 | }
14 | else if(this.url.startsWith('https://translate.google.com/')) {
15 | return 'Google Translate';
16 | }
17 | else if(this.url.startsWith('https://www.google.com')) {
18 | return 'Google search';
19 | }
20 | else if(this.url.startsWith('https://youtube.com')) {
21 | return 'YouTube search';
22 | }
23 | else if(this.url.startsWith('https://twitter.com')) {
24 | return 'Twitter search';
25 | }
26 | else if(this.url.startsWith('https://www.urbandictionary.com/')) {
27 | return 'Urban Dictionary search';
28 | }
29 | else if(this.url.startsWith('https://www.wikipedia.org/')) {
30 | return 'Wikipedia search';
31 | }
32 | else if(this.url.startsWith('https://www.dict.cc')) {
33 | return 'dict.cc search';
34 | }
35 | return 'Other search';
36 | }
37 | else if(this.entry.schema === 'activity.browsing.watch') {
38 | return 'YouTube video';
39 | }
40 | return 'Page view';
41 | },
42 | icon: function() {
43 | return this.entry.schema === 'activity.browsing.search' ? `"${this.entry.title}"` : this.entry.title;
44 | },
45 | url: function() {
46 | return this.entry.extra_attributes.url;
47 | },
48 | hostname: function() {
49 | let hostname = null;
50 | try {
51 | hostname = (new URL(this.url)).hostname.replace(/^(www\.)/,"");
52 | }
53 | catch {}
54 | return hostname;
55 | },
56 | entryClass: function() {
57 | if (this.hostname && this.hostname.startsWith('youtube.')) {
58 | return 'watch youtube';
59 | }
60 | else if(this.entry.schema === 'activity.browsing.search') {
61 | return 'search';
62 | }
63 | return 'browse';
64 | },
65 | iconClass: function() {
66 | if (this.hostname && this.hostname.startsWith('youtube.')) {
67 | return 'fab fa-youtube';
68 | }
69 | else if(this.entry.schema === 'activity.browsing.search') {
70 | return 'fas fa-search';
71 | }
72 | return 'fas fa-globe-americas';
73 | },
74 | },
75 | template: `
76 |
84 | `
85 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/entries/commit.js:
--------------------------------------------------------------------------------
1 | import TimelineEntryIcon from './entry-icon.js';
2 |
3 | export default Vue.component('commit-entry', {
4 | props: ['entry'],
5 | computed: {
6 | filesChangedString: function() {
7 | if(this.entry.extra_attributes.changes.files === 1) {
8 | return `${this.entry.extra_attributes.changes.files} file changed`;
9 | }
10 | return `${this.entry.extra_attributes.changes.files} files changed`;
11 | }
12 | },
13 | template: `
14 |
15 |
16 |
19 |
20 | {{ entry.title }}
21 |
22 | {{ filesChangedString }}
23 | (+{{ entry.extra_attributes.changes.insertions }}, -{{ entry.extra_attributes.changes.deletions }})
24 |
25 |
26 |
27 | `
28 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/entries/entry-icon.js:
--------------------------------------------------------------------------------
1 | export default Vue.component('entry-icon', {
2 | props: ['entry', 'iconClass'],
3 | template: `
4 |
7 |
8 | `
9 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/entries/gallery.js:
--------------------------------------------------------------------------------
1 | import ImageThumbnailComponent from './../thumbnails/image.js';
2 | import VideoThumbnailComponent from './../thumbnails/video.js';
3 | import { hasGeolocation } from './../../utils/entries.js';
4 | import TimelineEntryIcon from './entry-icon.js';
5 |
6 | export default Vue.component('gallery', {
7 | props: ['entry'],
8 | methods: {
9 | thumbnailType: function(entry) {
10 | if(entry.schema.startsWith('file.video')) {
11 | return 'video-thumbnail';
12 | }
13 | return 'image-thumbnail';
14 | },
15 | hasGeolocation,
16 | },
17 | template: `
18 |
19 |
20 |
Gallery
21 |
22 |
23 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 | `
37 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/entries/journal.js:
--------------------------------------------------------------------------------
1 | import TimelineEntryIcon from './entry-icon.js';
2 |
3 | export default Vue.component('journal-entry', {
4 | props: ['entry'],
5 | data: function() {
6 | return {
7 | unsavedDescription: null,
8 | isEditing: false,
9 | isSaving: false,
10 | };
11 | },
12 | computed: {
13 | markdownDescription: function() {
14 | return this.entry.description ? marked(this.entry.description) : 'No text here
';
15 | },
16 | },
17 | methods: {
18 | edit: function() {
19 | this.unsavedDescription = this.entry.description;
20 | this.isEditing = true;
21 | this.$nextTick(() => {
22 | this.$refs.editor.focus();
23 | });
24 | },
25 | saveChanges: function(){
26 | this.isSaving = true;
27 | if(this.unsavedDescription.length) {
28 | this.entry.description = this.unsavedDescription;
29 | this.$store.dispatch('timeline/updateEntry', this.entry).then(e => {
30 | this.unsavedDescription = null;
31 | this.isEditing = false;
32 | this.isSaving = false;
33 | });
34 | }
35 | else {
36 | this.deleteEntry();
37 | }
38 | },
39 | deleteEntry: function() {
40 | this.$store.dispatch('timeline/deleteEntry', this.entry);
41 | },
42 | cancelChanges: function() {
43 | this.isEditing = false;
44 | this.unsavedDescription = null;
45 | },
46 | },
47 | template: `
48 |
49 |
50 |
Journal entry
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 | `
62 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/entries/message.js:
--------------------------------------------------------------------------------
1 | import TimelineEntryIcon from './entry-icon.js';
2 |
3 | export default Vue.component('message-entry', {
4 | props: ['entry'],
5 | computed: {
6 | iconClass: function() {
7 | if (this.entry.schema.startsWith('message.text.sms')) {
8 | return 'fas fa-sms';
9 | }
10 | else if (this.entry.schema.startsWith('message.telegram')){
11 | return 'fab fa-telegram-plane';
12 | }
13 | else if (this.entry.schema.startsWith('message.facebook')){
14 | return 'fab fa-facebook-messenger';
15 | }
16 | else if (this.entry.schema.startsWith('message.reddit')){
17 | return 'fab fa-reddit';
18 | }
19 | },
20 | entryClass: function() {
21 | if (this.entry.schema.startsWith('message.text.sms')) {
22 | return 'sms';
23 | }
24 | else if (this.entry.schema.startsWith('message.telegram')){
25 | return 'telegram';
26 | }
27 | else if (this.entry.schema.startsWith('message.facebook')){
28 | return 'facebook-messenger';
29 | }
30 | else if (this.entry.schema.startsWith('message.reddit')){
31 | return 'reddit';
32 | }
33 | },
34 | senderName: function() {
35 | return this.entry.extra_attributes.sender_name || this.entry.extra_attributes.sender_id;
36 | },
37 | recipientName: function() {
38 | return this.entry.extra_attributes.recipient_name || this.entry.extra_attributes.recipient_id;
39 | },
40 | },
41 | template: `
42 |
43 |
44 |
45 | {{ senderName }}
46 | ▸
47 | {{ recipientName }}
48 |
49 |
50 |
54 |
58 |
60 | {{ entry.description }}
61 |
62 |
63 | `
64 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/entries/motion.js:
--------------------------------------------------------------------------------
1 | import TimelineEntryIcon from './entry-icon.js';
2 |
3 | export default Vue.component('motion-entry', {
4 | props: ['entry'],
5 | computed: {
6 | period: function() {
7 | let hours = new Date(this.entry.date_on_timeline).getHours();
8 | if (hours < 12) {
9 | return 'morning';
10 | }
11 | else if (hours <= 18) {
12 | return 'afternoon';
13 | }
14 | else if (hours <= 24) {
15 | return 'evening';
16 | }
17 | },
18 | description: function() {
19 | return `${this.period} ${this.entry.title}`.toLowerCase()
20 | .split(' ')
21 | .map((s) => s.charAt(0).toUpperCase() + s.substring(1))
22 | .join(' ');
23 | },
24 | timeAndDistance: function() {
25 | let description = '';
26 | if (this.entry.extra_attributes.distance) {
27 | description += `${(parseFloat(this.entry.extra_attributes.distance)/1000).toFixed(1)} km`;
28 | }
29 | if (this.entry.extra_attributes.duration) {
30 | description += ` in ${(parseFloat(this.entry.extra_attributes.duration)/60).toFixed(0)} min`;
31 | }
32 | return description;
33 | },
34 | iconClass: function() {
35 | if (this.entry.title == 'walking') {
36 | return 'fas fa-walking';
37 | }
38 | else if (this.entry.title == 'biking') {
39 | return 'fas fa-biking';
40 | }
41 | else if (this.entry.title == 'running') {
42 | return 'fas fa-running';
43 | }
44 | else if (this.entry.title == 'skiing') {
45 | return 'fas fa-skiing';
46 | }
47 | else if (this.entry.title == 'rowing') {
48 | return 'fas fa-rowing';
49 | }
50 | return 'fas fa-heart';
51 | },
52 | },
53 | template: `
54 |
55 |
56 |
Activity
57 |
58 | {{ description }}
59 | {{ timeAndDistance }}
60 |
61 |
62 | `
63 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/entries/text.js:
--------------------------------------------------------------------------------
1 | import TimelineEntryIcon from './entry-icon.js';
2 |
3 | export default Vue.component('text-entry', {
4 | props: ['entry'],
5 | computed: {
6 | fileName: function() {
7 | const pathParts = this.entry.extra_attributes.file.path.split('/');
8 | return pathParts[pathParts.length - 1];
9 | },
10 | richDescription: function() {
11 | if (this.entry.extra_attributes.file.mimetype === 'text/markdown'){
12 | return marked(this.entry.description);
13 | }
14 | return '' + this.entry.description.replaceAll('\n', '
') + '
';
15 | },
16 | },
17 | template: `
18 |
19 |
20 |
{{ fileName }}
21 |
22 |
23 | `
24 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/entries/trakt.js:
--------------------------------------------------------------------------------
1 | import TimelineEntryIcon from './entry-icon.js';
2 |
3 | export default Vue.component('watch-entry', {
4 | props: ['entry'],
5 | computed: {
6 | metaText: function() {
7 | if (this.entry.schema == 'activity.watching.show') {
8 | return 'Show';
9 | }
10 | else if (this.entry.schema == 'activity.watching.movie') {
11 | return 'Film';
12 | }
13 | },
14 | iconClass: function() {
15 | if (this.entry.schema == 'activity.watching.movie') {
16 | return 'fas fa-film';
17 | }
18 | else if (this.entry.schema == 'activity.watching.show') {
19 | return 'fas fa-tv';
20 | }
21 | },
22 | url: function() {
23 | return this.entry.extra_attributes.url;
24 | }
25 | },
26 | template: `
27 |
35 | `
36 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/entries/transaction.js:
--------------------------------------------------------------------------------
1 | import TimelineEntryIcon from './entry-icon.js';
2 |
3 | export default Vue.component('transaction-entry', {
4 | props: ['entry'],
5 | computed: {
6 | isExpense: function(){
7 | return this.entry.schema === 'finance.expense';
8 | },
9 | transactionType: function(){
10 | return this.isExpense ? 'expense' : 'income';
11 | },
12 | amount: function(){
13 | let amount = this.entry.extra_attributes.recipient.amount;
14 | if(this.isExpense) {
15 | amount = this.entry.extra_attributes.sender.amount;
16 | }
17 | return Number(amount).toFixed(2);
18 | },
19 | otherCurrencyAmount: function(){
20 | let amount = this.entry.extra_attributes.sender.amount;
21 | if(this.isExpense) {
22 | amount = this.entry.extra_attributes.recipient.amount;
23 | }
24 | return Number(amount).toFixed(2);
25 | },
26 | currency: function(){
27 | let currency = this.entry.extra_attributes.recipient.currency;
28 | if(this.isExpense) {
29 | currency = this.entry.extra_attributes.sender.currency;
30 | }
31 | return currency === 'EUR' ? '€' : currency;
32 | },
33 | otherCurrency: function(){
34 | let currency = this.entry.extra_attributes.sender.currency;
35 | if(this.isExpense) {
36 | currency = this.entry.extra_attributes.recipient.currency;
37 | }
38 | return currency === 'EUR' ? '€' : currency;
39 | },
40 | otherPartyName: function(){
41 | if(this.isExpense) {
42 | return this.entry.extra_attributes.recipient.name;
43 | }
44 | return this.entry.extra_attributes.sender.name;
45 | },
46 | },
47 | template: `
48 |
49 |
50 |
{{ otherPartyName }}
51 |
52 | {{ amount }}{{ currency }} {{ transactionType }}
53 | ({{ otherCurrencyAmount }}{{ otherCurrency }})
54 | {{ entry.description }}
55 |
56 |
57 | `
58 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/filter.js:
--------------------------------------------------------------------------------
1 | import { filters } from './../models/filters.js';
2 |
3 | export default Vue.component('entry-filter', {
4 | props: ['name'],
5 | computed: {
6 | filter: function() {
7 | return filters[this.name];
8 | },
9 | entries: function() {
10 | return this.$store.state.timeline.entries.filter(this.filter.filterFunction);
11 | },
12 | filterName: function() {
13 | if(this.entries.length === 1) {
14 | return `1 ${this.filter.displayName}`;
15 | }
16 | return `${this.entries.length} ${this.filter.displayNamePlural}`;
17 | },
18 | isEnabled: function() {
19 | return this.$store.state.timeline.enabledFilters.includes(this.name);
20 | },
21 | isDisabled: function() {
22 | return (
23 | this.$store.state.timeline.enabledFilters.length > 0
24 | && !this.$store.state.timeline.enabledFilters.includes(this.name)
25 | );
26 | }
27 | },
28 | methods: {
29 | toggleFilter: function() {
30 | this.$store.dispatch('timeline/toggleFilter', this.name);
31 | },
32 | },
33 | template: `
34 |
35 |
36 | {{ filterName }}
37 |
38 | `
39 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/googleMap.js:
--------------------------------------------------------------------------------
1 | "use strict";
2 |
3 | import { initGoogleMaps } from './../services/googlemaps.js';
4 | import config from './../config.js';
5 |
6 | export default Vue.component('google-map', {
7 | props: ['markers'],
8 | data: function() {
9 | return {
10 | map: null,
11 | polyline: null,
12 | currentMapFeatures: [],
13 | };
14 | },
15 | watch: {
16 | markers: function() { this.updateFeaturesOnMap() },
17 | map: function() { this.updateFeaturesOnMap() },
18 | },
19 | methods: {
20 | updateFeaturesOnMap: function() {
21 | if (!this.map) { return }
22 | if (this.polyline) {
23 | this.polyline.setMap(null);
24 | }
25 | this.polyline = new google.maps.Polyline({
26 | path: this.markers,
27 | geodesic: true,
28 | strokeOpacity: 1.0,
29 | strokeWeight: 2,
30 | map: this.map,
31 | });
32 |
33 | const mapBounds = new google.maps.LatLngBounds();
34 | this.markers.forEach(marker => {
35 | mapBounds.extend(new google.maps.LatLng(marker.lat, marker.lng));
36 | });
37 |
38 | // Prevent excessive zoom when all the markers are really close
39 | google.maps.event.addListenerOnce(
40 | this.map, 'bounds_changed', () => this.map.setZoom(Math.min(15, this.map.getZoom()))
41 | );
42 | this.map.fitBounds(mapBounds);
43 | },
44 | },
45 | async mounted() {
46 | try {
47 | const google = await initGoogleMaps();
48 | this.map = new google.maps.Map(this.$el, {
49 | disableDefaultUI: true,
50 | mapTypeId: 'terrain',
51 | });
52 | this.updateFeaturesOnMap();
53 | } catch (error) {
54 | console.error(error);
55 | }
56 | },
57 | template: ``,
58 | })
59 |
--------------------------------------------------------------------------------
/frontend/source/js/components/journalEditor.js:
--------------------------------------------------------------------------------
1 | export default Vue.component('journal-editor', {
2 | props: ['entry'],
3 | data: function() {
4 | return {
5 | unsavedDescription: '',
6 | isSaving: false,
7 | };
8 | },
9 | mounted: function() {
10 | this.$nextTick(() => {
11 | this.$refs.editor.focus();
12 | });
13 | },
14 | methods: {
15 | close: function(event) {
16 | this.$emit('close');
17 | },
18 | saveChanges: function(){
19 | this.isSaving = true;
20 | const now = moment();
21 | const lastMinuteOfTheDay = moment(this.$store.state.route.query.date, 'YYYY-MM-DD', true).add(1, 'd').subtract(1, 'm');
22 | let dateOnTimeline = now.isAfter(lastMinuteOfTheDay) ? lastMinuteOfTheDay : now;
23 | this.$store.dispatch('timeline/addEntry', {
24 | 'schema': 'journal',
25 | 'source': 'frontend/web',
26 | 'title': '',
27 | 'description': this.unsavedDescription,
28 | 'extra_attributes': {},
29 | 'date_on_timeline': dateOnTimeline.format(),
30 | }).then(e => {
31 | this.unsavedDescription = null;
32 | this.isSaving = false;
33 | this.close();
34 | });
35 | },
36 | },
37 | template: `
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | `
46 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/preview.js:
--------------------------------------------------------------------------------
1 | import ImagePreview from './previews/image.js';
2 | import PdfPreview from './previews/pdf.js';
3 | import VideoPreview from './previews/video.js';
4 | import { hasGeolocation } from './../utils/entries.js';
5 |
6 | export default Vue.component('preview', {
7 | props: ['entry'],
8 | computed: {
9 | mimetype: function(){
10 | if (this.entry.extra_attributes && this.entry.extra_attributes.file) {
11 | return this.entry.extra_attributes.file.mimetype;
12 | }
13 | return undefined;
14 | },
15 | previewType: function() {
16 | if (this.mimetype.startsWith('image/')) {
17 | return 'image-preview';
18 | }
19 | else if(this.mimetype.startsWith('video/')) {
20 | return 'video-preview';
21 | }
22 | else if(this.mimetype === 'application/pdf') {
23 | return 'pdf-preview';
24 | }
25 | },
26 | },
27 | methods: {
28 | hasGeolocation,
29 | close: function(event) {
30 | this.$emit('close');
31 | }
32 | },
33 | template: `
34 |
91 | `
92 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/previews/geolocation.js:
--------------------------------------------------------------------------------
1 | import config from './../googleMap.js';
2 | import { hasGeolocation } from './../../utils/entries.js';
3 |
4 | export default Vue.component('entry-map', {
5 | props: ['entries',],
6 | computed: {
7 | geolocationEntries: function() {
8 | return this.entries.filter(hasGeolocation);
9 | },
10 | markers: function() {
11 | return this.geolocationEntries.map(e => {
12 | return {
13 | lat: e.extra_attributes.location.latitude,
14 | lng: e.extra_attributes.location.longitude,
15 | };
16 | });
17 | },
18 | },
19 | template: `
20 |
21 | `
22 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/previews/image.js:
--------------------------------------------------------------------------------
1 | export default Vue.component('image-preview', {
2 | props: ['entry'],
3 | computed: {
4 | imageSrcSet: function() {
5 | return `${this.entry.extra_attributes.previews.preview} 1x, ${this.entry.extra_attributes.previews.preview2x} 2x`;
6 | },
7 | },
8 | template: `
9 |
14 | `
15 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/previews/pdf.js:
--------------------------------------------------------------------------------
1 | import ImagePreview from './image.js';
2 |
3 | export default Vue.component('pdf-preview', {
4 | props: ['entry'],
5 | computed: {
6 | },
7 | template: `
8 |
11 | `
12 | });
13 |
--------------------------------------------------------------------------------
/frontend/source/js/components/previews/video.js:
--------------------------------------------------------------------------------
1 | export default Vue.component('video-preview', {
2 | props: ['entry'],
3 | template: `
4 |
7 | `
8 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/settings/archives.js:
--------------------------------------------------------------------------------
1 | import { RequestStatus } from './../../models/requests.js';
2 | import ArchiveComponent from './archive.js';
3 |
4 | export default Vue.component('archives', {
5 | data: function() {
6 | return {
7 | isSaving: false,
8 | uploadedFiles: [],
9 | newArchive: null,
10 | }
11 | },
12 | created: function() {
13 | this.$store.dispatch('archives/getArchives').catch(response => {
14 | if([401, 403].includes(response.status)) {
15 | this.$router.push({name: 'login'});
16 | }
17 | });
18 | this.$store.dispatch('archives/getArchiveEndpoints');
19 | },
20 | computed: {
21 | archives: function() {
22 | return this.$store.state.archives.archives;
23 | },
24 | archiveEndpoints: function() {
25 | return this.$store.state.archives.archiveEndpoints;
26 | },
27 | },
28 | methods: {
29 | createArchive: function() {
30 | this.newArchive = {
31 | 'key': '',
32 | 'type': 'facebook',
33 | 'description': '',
34 | 'date_processed': null,
35 | };
36 | }
37 | },
38 | template: `
39 |
40 |
41 | Archives
42 |
43 |
44 |
45 |
46 |
47 | -
48 |
49 |
50 | -
51 |
52 |
53 |
54 |
55 | `
56 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/settings/settings.js:
--------------------------------------------------------------------------------
1 | import { RequestStatus } from './../../models/requests.js';
2 |
3 | export default Vue.component('settings', {
4 | data: function() {
5 | return {}
6 | },
7 | created: function() {
8 | },
9 | computed: {
10 | },
11 | methods: {
12 | },
13 | template: `
14 |
15 |
19 |
31 |
32 | `
33 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/settings/source.js:
--------------------------------------------------------------------------------
1 | import { sourceTypes } from './../../services/source-service.js';
2 | import sourceTrakt from './sourceTrakt.js'
3 |
4 | export default Vue.component('source-data', {
5 | props: ['source', 'isNew'],
6 | data: function() {
7 | return {
8 | sourceTypes: sourceTypes,
9 | }
10 | },
11 | template: `
12 |
13 |
14 |
15 |
{{ sourceTypes[this.source.type] }} source - {{ this.source.key }}
16 |
17 |
18 |
19 |
20 | `
21 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/settings/sourceTrakt.js:
--------------------------------------------------------------------------------
1 | import { sourceTypes } from './../../services/source-service.js';
2 | import SpinnerComponent from '../spinner.js'
3 |
4 | export default Vue.component('source-trakt', {
5 | props: ['source', 'isNew'],
6 | data: function() {
7 | return {
8 | isLoading: false,
9 | authInfo: null,
10 | authStatus: false,
11 | authPin: null,
12 | }
13 | },
14 | methods: {
15 | startPoll: function() {
16 | // GET request to backend to start polling
17 | // Receive a code to visit in response
18 | this.isLoading = true
19 | fetch(this.source.url + 'start_poll/')
20 | .then(response => response.json())
21 | .then(data => this.authInfo = { url: data.verification_url, code: data.user_code })
22 | .finally(() => {
23 | this.isLoading = false;
24 | });
25 | },
26 | checkAuthStatus: function() {
27 | // Queries the OAuth endpoint to see if the details are ok
28 | this.isLoading = true
29 | fetch(this.source.url + 'status/')
30 | .then(response => response.json())
31 | .then(data => this.authStatus = data.status)
32 | .finally(() => {
33 | this.isLoading = false;
34 | });
35 | },
36 | getURL: function() {
37 | // Obtain URL for obtaining pin. 1st stage of OAuth process
38 | this.isLoading = true
39 | fetch(this.source.url + 'get_url/')
40 | .then(response => response.json())
41 | .then(data => this.authInfo = { url: data.url })
42 | .finally(() => {
43 | this.isLoading = false;
44 | });
45 | },
46 | submitPin: function() {
47 | this.isLoading = true
48 | fetch(this.source.url + 'put_pin/',{
49 | method: 'PUT',
50 | headers:{
51 | 'Content-Type':'application/json'
52 | },
53 | body: JSON.stringify({'pin': this.authPin})
54 | })
55 | .then(() => {this.authStatus = true})
56 | .finally(() => {this.checkAuthStatus()});
57 | },
58 | },
59 | computed: {
60 | display_status: function() {
61 | if (this.isLoading){
62 | return 'Connecting ---'
63 | }
64 | // Prettify the authentication status message
65 | if (this.authStatus){
66 | return 'Connected \u{2705}';
67 | }
68 | else {
69 | return 'False \u{274C}';
70 | }
71 | },
72 | },
73 | mounted: function() {
74 | this.checkAuthStatus()
75 | },
76 | template: `
77 |
78 |
Authentication Status: {{ display_status }}
79 |
80 |
81 |
82 |
87 |
88 |
89 | `
90 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/settings/sources.js:
--------------------------------------------------------------------------------
1 | import { RequestStatus } from './../../models/requests.js';
2 | import SourceComponent from './source.js';
3 |
4 | export default Vue.component('sources', {
5 | data: function() {
6 | return {
7 | isSaving: false,
8 | uploadedFiles: [],
9 | newSource: null,
10 | }
11 | },
12 | created: function() {
13 | this.$store.dispatch('sources/getSources').catch(response => {
14 | if([401, 403].includes(response.status)) {
15 | this.$router.push({name: 'login'});
16 | }
17 | });
18 | this.$store.dispatch('sources/getSourceEndpoints');
19 | },
20 | computed: {
21 | sources: function() {
22 | return this.$store.state.sources.sources;
23 | },
24 | sourceEndpoints: function() {
25 | return this.$store.state.sources.sourceEndpoints;
26 | },
27 | },
28 | methods: {
29 | createSource: function() {
30 | this.newSource = {
31 | 'key': '',
32 | 'type': 'trakt',
33 | 'description': '',
34 | 'date_processed': null,
35 | };
36 | }
37 | },
38 | template: `
39 |
40 |
41 | Sources
42 |
45 |
46 |
47 | -
48 |
49 |
50 | -
51 |
52 |
53 |
54 |
55 | `
56 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/spinner.js:
--------------------------------------------------------------------------------
1 | export default Vue.component('spinner', {
2 | template: `
3 | Loading entries...
4 | `
5 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/thumbnails/image.js:
--------------------------------------------------------------------------------
1 | export default Vue.component('image-thumbnail', {
2 | props: ['entry', 'height'],
3 | computed: {
4 | src: function() {
5 | return this.entry.extra_attributes.previews.thumbnail;
6 | },
7 | srcset: function() {
8 | return `${this.entry.extra_attributes.previews.thumbnail} 1x, ${this.entry.extra_attributes.previews.thumbnail2x} 2x`;
9 | },
10 | width: function() {
11 | // Some entries can have a preview but no width. For example, a PDF has no width.
12 | if(this.entry.extra_attributes.media && this.entry.extra_attributes.media.width && this.entry.extra_attributes.height){
13 | return Math.floor(this.entry.extra_attributes.media.width/entry.extra_attributes.media.height * this.height)
14 | }
15 | }
16 | },
17 | template: `
18 |
27 | `
28 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/thumbnails/video.js:
--------------------------------------------------------------------------------
1 | export default Vue.component('video-thumbnail', {
2 | props: ['entry', 'height'],
3 | methods: {
4 | videoHoverStart: function() {
5 | this.$refs.videoElement.play()
6 | },
7 | videoHoverEnd: function() {
8 | this.$refs.videoElement.pause()
9 | this.$refs.videoElement.currentTime = 0;
10 | },
11 | },
12 | template: `
13 |
22 | `
23 | });
--------------------------------------------------------------------------------
/frontend/source/js/components/timeline-nav.js:
--------------------------------------------------------------------------------
1 | export default Vue.component('timeline-nav', {
2 | computed: {
3 | timelineDate: {
4 | get() {
5 | return moment(this.$store.state.route.query.date, 'YYYY-MM-DD', true);
6 | },
7 | set(newDate) {
8 | const queryParams = { ...this.$store.state.route.query };
9 | queryParams.date = moment.min(newDate, this.today).format('YYYY-MM-DD');
10 | return this.$router.push({ name: 'timeline', query: queryParams });
11 | }
12 | },
13 | timelineDateIso: {
14 | get() {
15 | return this.$store.state.route.query.date;
16 | },
17 | set(newDate) {
18 | return this.timelineDate = moment(newDate, 'YYYY-MM-DD', true);
19 | }
20 | },
21 | today: function(){
22 | return moment().startOf('day');
23 | },
24 | showTomorrow: function() {
25 | return moment(this.timelineDate).add('days', 1).diff(this.today) <= 0
26 | },
27 | showNextWeek: function() {
28 | return moment(this.timelineDate).add('weeks', 1).diff(this.today) <= 0
29 | },
30 | showNextMonth: function() {
31 | return moment(this.timelineDate).add('months', 1).diff(this.today) <= 0
32 | },
33 | showNextYear: function() {
34 | return moment(this.timelineDate).add('years', 1).diff(this.today) <= 0
35 | },
36 | },
37 | methods: {
38 | pickTimelineDate: function(date) {
39 | this.timelineDate = moment(date);
40 | },
41 | moveTimelineDate: function(quantity, unit) {
42 | this.timelineDate = moment(this.timelineDate).add(quantity, unit);
43 | },
44 | },
45 | template: `
46 |
58 | `
59 | });
--------------------------------------------------------------------------------
/frontend/source/js/config.js:
--------------------------------------------------------------------------------
1 | // The variables in this file are replaced by environment variables at build time.
2 | // The new file is saved in a different place, but nginx serves it at the same address.
3 | // If you change this file, you must rebuild the docker image for the changes to apply.
4 | export default {
5 | googleMapsApiKey: "${GOOGLE_MAPS_API_KEY}",
6 | clientId: "${FRONTEND_CLIENT_ID}",
7 | domain: "${FRONTEND_DOMAIN}",
8 | };
9 |
--------------------------------------------------------------------------------
/frontend/source/js/libs/vuex-router-sync.js:
--------------------------------------------------------------------------------
1 | export default function (store, router, options) {
2 | var moduleName = (options || {}).moduleName || 'route'
3 |
4 | store.registerModule(moduleName, {
5 | namespaced: true,
6 | state: cloneRoute(router.currentRoute),
7 | mutations: {
8 | 'ROUTE_CHANGED': function ROUTE_CHANGED (state, transition) {
9 | store.state[moduleName] = cloneRoute(transition.to, transition.from)
10 | }
11 | }
12 | })
13 |
14 | var isTimeTraveling = false
15 | var currentPath
16 |
17 | // sync router on store change
18 | var storeUnwatch = store.watch(
19 | function (state) { return state[moduleName]; },
20 | function (route) {
21 | var fullPath = route.fullPath;
22 | if (fullPath === currentPath) {
23 | return
24 | }
25 | if (currentPath != null) {
26 | isTimeTraveling = true
27 | router.push(route)
28 | }
29 | currentPath = fullPath
30 | },
31 | { sync: true }
32 | )
33 |
34 | // sync store on router navigation
35 | var afterEachUnHook = router.afterEach(function (to, from) {
36 | if (isTimeTraveling) {
37 | isTimeTraveling = false
38 | return
39 | }
40 | currentPath = to.fullPath
41 | store.commit(moduleName + '/ROUTE_CHANGED', { to: to, from: from })
42 | })
43 |
44 | return function unsync () {
45 | // On unsync, remove router hook
46 | if (afterEachUnHook != null) {
47 | afterEachUnHook()
48 | }
49 |
50 | // On unsync, remove store watch
51 | if (storeUnwatch != null) {
52 | storeUnwatch()
53 | }
54 |
55 | // On unsync, unregister Module with store
56 | store.unregisterModule(moduleName)
57 | }
58 | }
59 |
60 | function cloneRoute (to, from) {
61 | var clone = {
62 | name: to.name,
63 | path: to.path,
64 | hash: to.hash,
65 | query: to.query,
66 | params: to.params,
67 | fullPath: to.fullPath,
68 | meta: to.meta
69 | }
70 | if (from) {
71 | clone.from = cloneRoute(from)
72 | }
73 | return Object.freeze(clone)
74 | }
75 |
76 |
--------------------------------------------------------------------------------
/frontend/source/js/models/requests.js:
--------------------------------------------------------------------------------
1 | export const RequestStatus = {
2 | NONE: 'NONE',
3 | PENDING: 'PENDING',
4 | SUCCESS: 'SUCCESS',
5 | FAILURE: 'FAILURE',
6 | }
--------------------------------------------------------------------------------
/frontend/source/js/routes.js:
--------------------------------------------------------------------------------
1 | import TimelineComponent from './components/timeline.js';
2 | import ArchivesComponent from './components/settings/archives.js';
3 | import SettingsComponent from './components/settings/settings.js';
4 | import SourcesComponent from './components/settings/sources.js';
5 | import store from './store/store.js';
6 |
7 |
8 | const router = new VueRouter({
9 | mode: 'history',
10 | routes: [
11 | {
12 | path: '/settings',
13 | name: 'settings',
14 | component: SettingsComponent,
15 | children: [
16 | { path: 'archives', name: 'archives', component: ArchivesComponent },
17 | { path: 'sources', name: 'sources', component: SourcesComponent },
18 | ],
19 | meta: { requiresAuth: true },
20 | },
21 | {
22 | path: '/timeline',
23 | name: 'timeline',
24 | component: TimelineComponent,
25 | meta: { requiresAuth: true },
26 | },
27 | {
28 | name: 'login',
29 | path: '/login',
30 | async beforeEnter(to, from, next) {
31 | window.location.replace(await store.dispatch('auth/getAuthorizationCodeUrl'));
32 | }
33 | },
34 | {
35 | path: '/',
36 | redirect: { name: 'timeline' }
37 | },
38 | {
39 | name: 'oauth-redirect',
40 | path: '/oauth-redirect',
41 | beforeEnter: async (to, from, next) => {
42 | if(to.query.code) {
43 | store.dispatch('auth/getToken', to.query.code).then(_ => next({ name: 'timeline' }));
44 | }
45 | else {
46 | store.dispatch('auth/clearToken').then(_ => next({ name: 'login' }));
47 | }
48 | },
49 | }
50 | ]
51 | });
52 |
53 | router.beforeEach((to, from, next) => {
54 | if (to.meta.requiresAuth) {
55 | if(store.state.auth.accessToken === null) {
56 | next({ name: 'login' }); // TODO: remember path
57 | }
58 | else {
59 | next();
60 | }
61 | }
62 | else {
63 | next();
64 | }
65 | });
66 |
67 | export default router;
68 |
--------------------------------------------------------------------------------
/frontend/source/js/services/api-service.js:
--------------------------------------------------------------------------------
1 | import config from './../config.js';
2 |
3 | export default class {
4 | static getApiBase() {
5 | return `https://${config.domain}/api`;
6 | }
7 |
8 | static fetchJsonWithToken(absoluteUrl, options, accessToken) {
9 | if(!accessToken){
10 | throw new Error("fetchJsonWithToken called without an access token");
11 | }
12 | options.headers = options.headers || {};
13 | options.headers.Authorization = `Bearer ${accessToken}`;
14 | return fetch(absoluteUrl, options)
15 | .then(response => response.ok ? response.json() : Promise.reject(response));
16 | }
17 | }
--------------------------------------------------------------------------------
/frontend/source/js/services/archive-service.js:
--------------------------------------------------------------------------------
1 | import ApiObjectService from './object-service.js';
2 |
3 | export const archiveTypes = {
4 | facebook: 'Facebook',
5 | googletakeout: 'Google Takeout',
6 | gpx: 'GPX track',
7 | icalendar: 'iCalendar file',
8 | json: 'JSON entries',
9 | n26csv: 'N26 CSV export',
10 | reddit: 'Reddit',
11 | telegram: 'Telegram',
12 | twitter: 'Twitter',
13 | };
14 |
15 | export default class extends ApiObjectService{
16 | static getApiBase() {
17 | return super.getApiBase() + '/archive';
18 | }
19 |
20 | static objectToFormData(archive, attachedFiles) {
21 | const formData = new FormData();
22 | formData.append('key', archive.key);
23 | formData.append('description', archive.description);
24 | formData.append('date_from', archive.date_from || '');
25 | formData.append('date_until', archive.date_until || '');
26 | formData.append('date_processed', archive.date_processed || '');
27 | for (var i = 0; i < attachedFiles.length; i++) {
28 | formData.append('archive_files', attachedFiles[i], attachedFiles[i].name);
29 | }
30 | return formData;
31 | }
32 |
33 | static async getEndpoints(accessToken) {
34 | const archiveEndpointsByType = await super.getEndpoints(accessToken);
35 | delete archiveEndpointsByType.archivefile;
36 | return archiveEndpointsByType;
37 | }
38 |
39 | static deleteFile(fileId, accessToken) {
40 | return this.fetchJsonWithToken(this.getApiBase() + `/archivefile/${fileId}/`, { method: 'DELETE' }, accessToken);
41 | }
42 | }
--------------------------------------------------------------------------------
/frontend/source/js/services/auth-service.js:
--------------------------------------------------------------------------------
1 | import ApiService from './api-service.js';
2 | import config from './../config.js';
3 |
4 | export function generateRandomString() {
5 | const array = new Uint32Array(28);
6 | window.crypto.getRandomValues(array);
7 | return Array.from(array, dec => ('0' + dec.toString(16)).substr(-2)).join('');
8 | }
9 |
10 | function base64UrlEncode(string) {
11 | return btoa(String.fromCharCode.apply(null, new Uint8Array(string)))
12 | .replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
13 | }
14 |
15 | export default class AuthService extends ApiService {
16 | static async getAuthorizationCodeUrl(codeVerifier) {
17 | const codeChallenge = await AuthService.pkceChallengeFromVerifier(codeVerifier);
18 |
19 | const authorizationCodeUrl = new URL(AuthService.getApiBase() + '/oauth/authorize/');
20 | authorizationCodeUrl.search = new URLSearchParams({
21 | approval_prompt: 'auto',
22 | response_type: 'code',
23 | redirect_uri: `https://${config.domain}/oauth-redirect`,
24 | client_id: config.clientId,
25 | code_challenge: codeChallenge,
26 | code_challenge_method: 'S256',
27 | scopes: [
28 | 'entry:read',
29 | 'entry:write',
30 | 'source:read',
31 | 'source:write',
32 | 'destination:read',
33 | 'destination:write',
34 | 'archive:read',
35 | 'archive:write',
36 | ].join(' '),
37 | });
38 |
39 | return authorizationCodeUrl;
40 | }
41 |
42 | static async getToken(authorizationCode, codeVerifier) {
43 | const tokenUrl = AuthService.getApiBase() + '/oauth/token/';
44 | const bodyParams = new URLSearchParams({
45 | grant_type: 'authorization_code',
46 | code: authorizationCode,
47 | client_id: config.clientId,
48 | code_verifier: codeVerifier,
49 | });
50 |
51 | return fetch(tokenUrl, {
52 | method: 'POST',
53 | headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
54 | body: bodyParams.toString(),
55 | }).then(response => response.ok ? response.json() : Promise.reject(response));
56 | }
57 |
58 | static async pkceChallengeFromVerifier(codeVerifier) {
59 | const encoder = new TextEncoder();
60 | const sha256Verifier = await window.crypto.subtle.digest('SHA-256', encoder.encode(codeVerifier));
61 | const encodedVerifier = btoa(String.fromCharCode.apply(null, new Uint8Array(sha256Verifier)))
62 | .replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
63 | return encodedVerifier;
64 | }
65 | }
--------------------------------------------------------------------------------
/frontend/source/js/services/googlemaps.js:
--------------------------------------------------------------------------------
1 | import config from './../config.js';
2 |
3 | const GMAPS_CALLBACK_NAME = 'gmapsCallback';
4 |
5 | let googleMapsIsInitialised = !!window.google;
6 | let resolveGMapsInitPromise;
7 | let rejectGMapsInitPromise;
8 |
9 | const initGMapsPromise = new Promise((resolve, reject) => {
10 | resolveGMapsInitPromise = resolve;
11 | rejectGMapsInitPromise = reject;
12 | });
13 |
14 | export function initGoogleMaps() {
15 | if (googleMapsIsInitialised) return initGMapsPromise;
16 |
17 | googleMapsIsInitialised = true;
18 | // The callback function is called by
19 | // the Google Maps script if it is
20 | // successfully loaded.
21 | window[GMAPS_CALLBACK_NAME] = () => resolveGMapsInitPromise(window.google);
22 |
23 | // We inject a new script tag into
24 | // the `` of our HTML to load
25 | // the Google Maps script.
26 | const script = document.createElement('script');
27 | script.async = true;
28 | script.defer = true;
29 | script.src = `https://maps.googleapis.com/maps/api/js?key=${config.googleMapsApiKey}&libraries=places&callback=${GMAPS_CALLBACK_NAME}`;
30 | script.onerror = rejectGMapsInitPromise;
31 | document.querySelector('head').appendChild(script);
32 |
33 | return initGMapsPromise;
34 | }
35 |
--------------------------------------------------------------------------------
/frontend/source/js/services/object-service.js:
--------------------------------------------------------------------------------
1 | import ApiService from './api-service.js';
2 |
3 | export default class ApiObjectService extends ApiService {
4 | static objectToFormData(object, attachedFiles) {
5 | throw new Exception('objectToFormData is not implemented');
6 | }
7 |
8 | static async get(accessToken) {
9 | // Fetch objects from multiple endpoints. Return a list of objects of all types.
10 | const objectEndpointsByType = await this.getEndpoints(accessToken);
11 | const allObjects = [];
12 | for (const [objectType, objectsOfTypeUrl] of Object.entries(objectEndpointsByType)) {
13 | const objectsOfType = await this.fetchJsonWithToken(objectsOfTypeUrl, {}, accessToken);
14 | objectsOfType.forEach(object => object.type = objectType);
15 | allObjects.push(...objectsOfType);
16 | }
17 |
18 | return allObjects;
19 | }
20 |
21 | static getEndpoints(accessToken) {
22 | return this.fetchJsonWithToken(this.getApiBase() + '/', {}, accessToken);
23 | }
24 |
25 | static create(object, fileAttachments, accessToken) {
26 | return this.fetchJsonWithToken(
27 | this.getApiBase() + `/${object.type}/`,
28 | {
29 | method: 'POST',
30 | body: this.objectToFormData(object, fileAttachments),
31 | },
32 | accessToken
33 | ).then(createdObject => {
34 | createdObject.type = object.type;
35 | return createdObject;
36 | });
37 | }
38 |
39 | static update(object, newFileAttachments, accessToken) {
40 | return this.fetchJsonWithToken(
41 | object.url,
42 | {
43 | method: 'PUT',
44 | body: this.objectToFormData(object, newFileAttachments),
45 | },
46 | accessToken
47 | ).then(updatedObject => {
48 | updatedObject.type = object.type;
49 | return updatedObject;
50 | });
51 | }
52 |
53 | static delete(object, accessToken) {
54 | return this.fetchJsonWithToken(
55 | object.url,
56 | { method: 'DELETE' },
57 | accessToken
58 | );
59 | }
60 | }
--------------------------------------------------------------------------------
/frontend/source/js/services/source-service.js:
--------------------------------------------------------------------------------
1 | import ApiObjectService from './object-service.js';
2 |
3 | export const sourceTypes = {
4 | filesystem: 'Filesystem',
5 | git: 'Git',
6 | hackernews: 'Hacker News',
7 | reddit: 'Reddit',
8 | rss: 'RSS',
9 | rsync: 'Rsync',
10 | trakt: 'Trakt',
11 | twitter: 'Twitter',
12 | };
13 |
14 | export default class extends ApiObjectService {
15 | static getApiBase() {
16 | return super.getApiBase() + '/source';
17 | }
18 |
19 | static objectToFormData(source, attachedFiles) {
20 | const formData = new FormData();
21 | formData.append('key', source.key);
22 | formData.append('description', source.description);
23 | return formData;
24 | }
25 | }
--------------------------------------------------------------------------------
/frontend/source/js/services/timeline-service.js:
--------------------------------------------------------------------------------
1 | import ApiService from './api-service.js';
2 |
3 | export default class extends ApiService {
4 | static getApiBase(){
5 | return super.getApiBase() + '/timeline';
6 | }
7 |
8 | static getEntries(date, filters, accessToken) {
9 | const requestUrl = new URL(this.getApiBase() + '/entries/');
10 | requestUrl.search = new URLSearchParams({
11 | date_on_timeline__gte: moment(date).startOf('day').toJSON(),
12 | date_on_timeline__lt: moment(date).startOf('day').add(1, 'day').toJSON(),
13 | ...filters
14 | });
15 | return this.fetchJsonWithToken(requestUrl, {}, accessToken);
16 | }
17 |
18 | static saveEntry(entry, accessToken){
19 | const isNewEntry = entry.id === null || entry.id === undefined;
20 | const relativeUrl = isNewEntry ? '/entries/' : `/entries/${entry.id}/`
21 | return this.fetchJsonWithToken(
22 | this.getApiBase() + relativeUrl,
23 | {
24 | method: isNewEntry ? 'POST' : 'PUT',
25 | headers: { 'Content-Type': 'application/json', },
26 | body: JSON.stringify(entry),
27 | },
28 | accessToken
29 | );
30 | }
31 |
32 | static deleteEntry(entry, accessToken) {
33 | return this.fetchJsonWithToken(
34 | this.getApiBase() + `/entries/${entry.id}/`,
35 | { method: 'DELETE' },
36 | accessToken
37 | );
38 | }
39 | }
--------------------------------------------------------------------------------
/frontend/source/js/store/archives.js:
--------------------------------------------------------------------------------
1 | import { RequestStatus } from './../models/requests.js';
2 | import ArchiveService from './../services/archive-service.js';
3 |
4 | export default {
5 | namespaced: true,
6 | state: {
7 | archives: [],
8 | archiveEndpoints: [],
9 | archivesRequestStatus: RequestStatus.NONE,
10 | archivesRequestPromise: null,
11 | },
12 | mutations: {
13 | SET_ARCHIVE_ENDPOINTS(state, archiveEndpoints) {
14 | state.archiveEndpoints = archiveEndpoints;
15 | },
16 | SET_ARCHIVES(state, archives) {
17 | state.archives = archives;
18 | },
19 | ADD_ARCHIVE(state, archive) {
20 | state.archives.push(archive);
21 | },
22 | DELETE_ARCHIVE(state, archive) {
23 | state.archives = state.archives.filter(a => a !== archive);
24 | },
25 | UPDATE_ARCHIVE(state, archive) {
26 | Object.assign(state.archives.find(a => a.key === archive.key), archive);
27 | },
28 | SET_ARCHIVES_REQUEST_PROMISE(state, promise) {
29 | state.archivesRequestPromise = promise;
30 | },
31 | ARCHIVES_REQUEST_SUCCESS(state) {
32 | state.archivesRequestStatus = RequestStatus.SUCCESS;
33 | },
34 | ARCHIVES_REQUEST_PENDING(state) {
35 | state.archivesRequestStatus = RequestStatus.PENDING;
36 | },
37 | ARCHIVES_REQUEST_FAILURE(state) {
38 | state.archivesRequestStatus = RequestStatus.FAILURE;
39 | },
40 | },
41 | actions: {
42 | async getArchiveEndpoints(context) {
43 | return ArchiveService.getEndpoints(context.rootState.auth.accessToken)
44 | .then(archiveEndpoints => {
45 | context.commit('SET_ARCHIVE_ENDPOINTS', archiveEndpoints);
46 | return archiveEndpoints;
47 | });
48 | },
49 | async getArchives(context, forceRefresh=false) {
50 | if (context.state.archivesRequestStatus === RequestStatus.NONE || forceRefresh) {
51 | context.commit('ARCHIVES_REQUEST_PENDING');
52 | const archivesRequestPromise = ArchiveService.get(context.rootState.auth.accessToken)
53 | .then(archives => {
54 | context.commit('SET_ARCHIVES', archives);
55 | context.commit('ARCHIVES_REQUEST_SUCCESS');
56 | return context.state.archives;
57 | })
58 | .catch(async response => {
59 | context.commit('SET_ARCHIVES', []);
60 | context.commit('ARCHIVES_REQUEST_FAILURE');
61 | return Promise.reject(response);
62 | });
63 | context.commit('SET_ARCHIVES_REQUEST_PROMISE', archivesRequestPromise);
64 | return archivesRequestPromise;
65 | }
66 | return context.state.archivesRequestPromise;
67 | },
68 | async createArchive(context, {archive, files}) {
69 | return ArchiveService.create(archive, files, context.rootState.auth.accessToken)
70 | .then((updatedArchive) => {
71 | context.commit('ADD_ARCHIVE', updatedArchive);
72 | return context.state.archives;
73 | });
74 | },
75 | async updateArchive(context, {archive, newFiles}) {
76 | return ArchiveService.update(archive, newFiles, context.rootState.auth.accessToken)
77 | .then((updatedArchive) => {
78 | context.commit('UPDATE_ARCHIVE', updatedArchive);
79 | return context.state.archives;
80 | });
81 | },
82 | async deleteArchive(context, archive) {
83 | return ArchiveService.delete(archive, context.rootState.auth.accessToken)
84 | .then(() => {
85 | context.commit('DELETE_ARCHIVE', archive);
86 | return context.state.archives;
87 | });
88 | },
89 | async deleteArchiveFile(context, fileId) {
90 | return ArchiveService.deleteFile(fileId, context.rootState.auth.accessToken);
91 | }
92 | }
93 | };
--------------------------------------------------------------------------------
/frontend/source/js/store/auth.js:
--------------------------------------------------------------------------------
1 | import { RequestStatus } from './../models/requests.js';
2 | import { filters } from './../models/filters.js';
3 | import AuthService, { generateRandomString } from './../services/auth-service.js';
4 |
5 | export default {
6 | namespaced: true,
7 | state: {
8 | accessToken: sessionStorage.getItem('accessToken'),
9 | codeVerifier: sessionStorage.getItem('codeVerifier'),
10 | },
11 | mutations: {
12 | setAccessToken: function (state, accessToken) {
13 | state.accessToken = accessToken;
14 | sessionStorage.setItem('accessToken', accessToken);
15 | },
16 | setCodeVerifier: function (state, codeVerifier) {
17 | state.codeVerifier = codeVerifier;
18 | sessionStorage.setItem('codeVerifier', codeVerifier);
19 | },
20 | },
21 | actions: {
22 | async getAuthorizationCodeUrl(context) {
23 | const codeVerifier = generateRandomString()
24 | context.commit('setCodeVerifier', codeVerifier);
25 | return await AuthService.getAuthorizationCodeUrl(codeVerifier);
26 | },
27 | async getToken(context, authorizationCode) {
28 | const { access_token } = await AuthService.getToken(authorizationCode, context.state.codeVerifier);
29 | context.commit('setAccessToken', access_token);
30 | context.commit('setCodeVerifier', null);
31 | },
32 | async clearToken(context, authorizationCode) {
33 | context.commit('setAccessToken', null);
34 | context.commit('setCodeVerifier', null);
35 | },
36 | }
37 | };
--------------------------------------------------------------------------------
/frontend/source/js/store/store.js:
--------------------------------------------------------------------------------
1 | import archivesStore from './archives.js';
2 | import authStore from './auth.js';
3 | import sourcesStore from './sources.js';
4 | import timelineStore from './timeline.js';
5 |
6 | export default new Vuex.Store({
7 | modules: {
8 | archives: archivesStore,
9 | auth: authStore,
10 | sources: sourcesStore,
11 | timeline: timelineStore,
12 | },
13 | });
--------------------------------------------------------------------------------
/frontend/source/js/utils/entries.js:
--------------------------------------------------------------------------------
1 | export function hasGeolocation(entry) {
2 | return (
3 | entry.extra_attributes.location
4 | && entry.extra_attributes.location.latitude
5 | && entry.extra_attributes.location.longitude
6 | );
7 | }
--------------------------------------------------------------------------------
/geolocation-client/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:slim
2 |
3 | COPY requirements.txt ./
4 | RUN pip install -r requirements.txt
5 |
6 | CMD ["python", "/usr/src/app/main.py"]
7 |
--------------------------------------------------------------------------------
/geolocation-client/requirements.txt:
--------------------------------------------------------------------------------
1 | paho-mqtt==1.5.1
2 | requests==2.25.0
3 | coloredlogs==14.0
4 | pytz
--------------------------------------------------------------------------------
/geolocation-client/source/main.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import json
3 | import logging
4 | import os
5 | from datetime import datetime
6 |
7 | import coloredlogs as coloredlogs
8 | import paho.mqtt.client as mqtt
9 | import pytz
10 | import requests
11 |
12 | logger = logging.getLogger(__name__)
13 | coloredlogs.install(level='DEBUG', logger=logger)
14 |
15 |
16 | def on_connect(client, userdata, flags, rc):
17 | client.subscribe("+/+/#")
18 | logger.info(f'MQTT client is connected and listening for messages. Return code was {rc}')
19 |
20 |
21 | def on_message(client, userdata, msg):
22 | try:
23 | data = json.loads(msg.payload)
24 | if data['_type'] == 'location':
25 | add_entry({
26 | 'schema': 'activity.location',
27 | 'source': msg.topic,
28 | 'title': '',
29 | 'description': '',
30 | 'extra_attributes': {
31 | 'location': {
32 | 'latitude': data['lat'],
33 | 'longitude': data['lon'],
34 | 'altitude': data.get('alt'),
35 | 'accuracy': data.get('acc'),
36 | },
37 | },
38 | 'date_on_timeline': datetime.fromtimestamp(data['tst'], pytz.UTC).strftime('%Y-%m-%dT%H:%M:%SZ')
39 | })
40 | logger.info(f"Geolocation message processed ({data['lat']}, {data['lon']})")
41 | except:
42 | logger.exception(f"Cannot process message: {msg.payload}")
43 |
44 |
45 | def add_entry(entry: dict):
46 | try:
47 | access_token_response = requests.post('http://timeline-backend/oauth/token/', data={
48 | "client_id": os.environ['GEOLOCATION_CLIENT_ID'],
49 | "client_secret": os.environ['GEOLOCATION_CLIENT_SECRET'],
50 | "scope": "entry:write",
51 | "grant_type": "client_credentials",
52 | }).json()
53 | timeline_response = requests.post(
54 | 'http://timeline-backend/timeline/entries/',
55 | json=entry,
56 | headers={
57 | "Authorization": f"Bearer {access_token_response['access_token']}",
58 | }
59 | )
60 | timeline_response.raise_for_status()
61 | except KeyError:
62 | logger.exception(f"Could not post geolocation on timeline. Unexpected token response: {access_token_response}")
63 | except:
64 | logger.exception("Could not post geolocation on timeline")
65 |
66 |
67 | mqtt_client = mqtt.Client()
68 | mqtt_client.on_connect = on_connect
69 | mqtt_client.on_message = on_message
70 |
71 | mqtt_client.username_pw_set(os.environ['MQTT_USERNAME'], os.environ['MQTT_PASSWORD'])
72 | mqtt_client.connect("mqtt-broker", 1883, 30)
73 | mqtt_client.loop_forever()
74 |
--------------------------------------------------------------------------------
/mqtt-broker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM eclipse-mosquitto:1.6.13
2 |
3 | COPY docker-entrypoint.sh /
--------------------------------------------------------------------------------
/mqtt-broker/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/ash
2 | set -e
3 |
4 | # Set permissions
5 | user="$(id -u)"
6 | if [ "$user" = '0' ]; then
7 | [ -d "/mosquitto" ] && chown -R mosquitto:mosquitto /mosquitto || true
8 | fi
9 |
10 | # Set password
11 | touch /mosquitto/config/mosquitto.passwd
12 | mosquitto_passwd -b /mosquitto/config/mosquitto.passwd "${MQTT_USERNAME}" "${MQTT_PASSWORD}"
13 |
14 | exec "$@"
--------------------------------------------------------------------------------
/proxy/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM nginx:latest
2 | COPY ssl-certs/* /etc/ssl/certs/
3 | COPY nginx.conf /etc/nginx/conf.d/default.conf
4 |
--------------------------------------------------------------------------------
/proxy/nginx.conf:
--------------------------------------------------------------------------------
1 | server {
2 | listen 80;
3 | return 301 https://$host$request_uri;
4 | }
5 |
6 | server {
7 | listen 443 ssl http2;
8 |
9 | client_max_body_size 10G;
10 |
11 | # Allow large uploads to go through
12 | proxy_read_timeout 30m;
13 | proxy_send_timeout 30m;
14 | client_body_timeout 30m;
15 | keepalive_timeout 30m;
16 | send_timeout 30m;
17 |
18 | gzip on;
19 | gzip_types
20 | text/plain
21 | text/css
22 | text/js
23 | text/xml
24 | text/javascript
25 | application/javascript
26 | application/json
27 | application/xml
28 | application/gpx+xml
29 | application/rss+xml
30 | image/svg+xml;
31 | gzip_min_length 1000;
32 |
33 | ssl_certificate /etc/ssl/certs/cert-chain.crt;
34 | ssl_certificate_key /etc/ssl/certs/server.key;
35 |
36 | location = /favicon.ico {
37 | auth_request off;
38 | return 204;
39 | access_log off;
40 | log_not_found off;
41 | }
42 |
43 | location /api/static/ {
44 | alias /assets/static/;
45 | }
46 |
47 | location /api/ {
48 | proxy_pass http://timeline-backend:80;
49 | proxy_pass_header Authorization;
50 | proxy_set_header SCRIPT_NAME /api;
51 | proxy_set_header Host $host;
52 | proxy_set_header X-Forwarded-Host $host;
53 | proxy_set_header X-Forwarded-Proto $scheme;
54 | proxy_redirect off;
55 | }
56 |
57 | location /data/archives/ {
58 | alias /data/archives/;
59 | }
60 |
61 | location /data/backups/ {
62 | alias /data/backups/;
63 | }
64 |
65 | location /assets/previews/ {
66 | alias /assets/previews/;
67 | }
68 |
69 | location /data/mounts/ {
70 | alias /data/mounts/;
71 | }
72 |
73 | location / {
74 | access_log off;
75 | proxy_pass http://timeline-frontend:80;
76 | proxy_redirect off;
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/scripts/copy-production-db.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 | set -x
4 |
5 | scripts_dir=$(dirname "$0")
6 | today=$(date "+%Y-%m-%d")
7 |
8 | scp -P 2200 root@home.nicolasbouliane.com:/var/timeline/db-backups/backup-${today}.sql backup-${today}.sql
9 | bash ${scripts_dir}/db-restore.sh backup-${today}.sql
10 |
--------------------------------------------------------------------------------
/scripts/db-backup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | echo -e "\n\033[1mBacking up the database to ${1}\033[0m"
4 | docker exec -t $(docker-compose ps -q timeline-db) pg_dump --if-exists --clean --create -U postgres -f /tmp/db-dump.sql timeline
5 | docker cp $(docker-compose ps -q timeline-db):/tmp/db-dump.sql "$1"
6 | docker exec -t $(docker-compose ps -q timeline-db) rm /tmp/db-dump.sql
7 | echo "Done."
8 |
--------------------------------------------------------------------------------
/scripts/db-migrate.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | docker-compose exec timeline-backend python manage.py makemigrations && docker-compose exec timeline-backend python manage.py migrate
3 |
--------------------------------------------------------------------------------
/scripts/db-restore.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | echo -e "\n\033[1mRestoring database from ${1}\033[0m"
4 | cat "$1" | docker-compose exec -T timeline-db psql -U postgres
5 | echo "Done."
6 |
--------------------------------------------------------------------------------
/scripts/dev-env.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | open -a 'PyCharm CE'
--------------------------------------------------------------------------------
/scripts/timeline-create-user.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | docker-compose exec timeline-backend python manage.py createsuperuser
3 |
--------------------------------------------------------------------------------
/scripts/timeline-export.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | docker-compose exec timeline-backend python manage.py export $@
3 |
--------------------------------------------------------------------------------
/scripts/timeline-import.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | docker-compose exec timeline-backend python manage.py import $@
3 |
--------------------------------------------------------------------------------
/scripts/timeline-shell.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | docker-compose exec timeline-backend python manage.py shell
3 |
--------------------------------------------------------------------------------