├── server ├── tests │ ├── __init__.py │ ├── x.png │ ├── db-wipe.sql │ ├── .svarog.ini.github │ ├── test_repository.py │ ├── utils.py │ ├── db-data.psql │ ├── dbtest.py │ ├── test_pagination.py │ ├── test_web_interface.py │ └── test_repository_postgres.py ├── .gitignore ├── .flaskenv ├── app │ ├── static │ │ ├── favicon.png │ │ ├── bootstrap-custom.css │ │ └── svarog.css │ ├── controllers │ │ ├── __init__.py │ │ ├── stations.py │ │ ├── station.py │ │ ├── obslist.py │ │ ├── login.py │ │ ├── obs.py │ │ └── receive.py │ ├── routes.py │ ├── template_globals.py │ ├── __init__.py │ ├── utils.py │ ├── authorize_station.py │ ├── tle_diagrams.py │ ├── hmac_token.py │ └── pagination.py ├── db │ ├── svarog-03.psql │ ├── svarog-04.psql │ ├── svarog-09.psql │ ├── svarog-06.psql │ ├── svarog-08.psql │ ├── svarog-10.psql │ ├── svarog-07.psql │ ├── svarog-01.psql │ ├── svarog-05.psql │ ├── svarog-02.psql │ └── svarog-11.psql ├── svarog-web.py ├── .svarog.ini.travis ├── svarog.ini.template ├── templates │ ├── obs_delete.html │ ├── macros.jinja │ ├── index.html │ ├── stations.html │ ├── login.html │ ├── pagination.html │ ├── station.html │ ├── base.html │ ├── obs.html │ └── obslist.html ├── examples │ └── requests.http ├── scripts │ └── create_charts.py ├── update.sh ├── setup.py └── migrate_db.py ├── logo.png ├── doc ├── logo.png ├── automation-server-arch.png ├── nginx │ ├── unit.json │ └── nginx ├── arch.md ├── gunicorn │ └── gunicorn-svarog.service ├── devel.md ├── users.md ├── submit_obs.md ├── apache2 │ ├── svarog.wsgi │ └── svarog.conf ├── automation-server-arch.drawio └── install.md ├── requirements.in ├── .github └── workflows │ ├── pytest-3.11.yml │ ├── pytest-3.12.yml │ ├── pylint.yml │ ├── flake8.yml │ └── pytest-main.yml ├── .gitignore ├── .flake8.ini ├── arch.md ├── SECURITY.md ├── .travis.yml ├── LICENSE ├── README.md ├── requirements.txt └── .pylint /server/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /server/.gitignore: -------------------------------------------------------------------------------- 1 | build/ 2 | .env/ 3 | -------------------------------------------------------------------------------- /server/.flaskenv: -------------------------------------------------------------------------------- 1 | FLASK_APP=svarog-web.py 2 | -------------------------------------------------------------------------------- /logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gut-space/svarog-server/HEAD/logo.png -------------------------------------------------------------------------------- /doc/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gut-space/svarog-server/HEAD/doc/logo.png -------------------------------------------------------------------------------- /server/tests/x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gut-space/svarog-server/HEAD/server/tests/x.png -------------------------------------------------------------------------------- /server/app/static/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gut-space/svarog-server/HEAD/server/app/static/favicon.png -------------------------------------------------------------------------------- /server/db/svarog-03.psql: -------------------------------------------------------------------------------- 1 | ALTER TABLE stations ADD COLUMN secret char(16); 2 | 3 | UPDATE schema SET version = 3; 4 | -------------------------------------------------------------------------------- /doc/automation-server-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gut-space/svarog-server/HEAD/doc/automation-server-arch.png -------------------------------------------------------------------------------- /server/db/svarog-04.psql: -------------------------------------------------------------------------------- 1 | ALTER TABLE stations ALTER COLUMN secret TYPE bytea USING secret::bytea; 2 | 3 | UPDATE schema SET version = 4; -------------------------------------------------------------------------------- /server/svarog-web.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from app import app 3 | 4 | app.config['SECRET_KEY'] = 'the earth is flat' 5 | 6 | if __name__ == '__main__': 7 | app.run(debug=True, port=8080) 8 | -------------------------------------------------------------------------------- /requirements.in: -------------------------------------------------------------------------------- 1 | astropy 2 | Flask 3 | flask_wtf 4 | flask_login 5 | psycopg2-binary 6 | webargs 7 | typing_extensions 8 | python-dateutil 9 | python-crontab 10 | orbit-predictor 11 | matplotlib 12 | pyorbital 13 | -------------------------------------------------------------------------------- /server/.svarog.ini.travis: -------------------------------------------------------------------------------- 1 | [database] 2 | host = localhost 3 | database = svarog 4 | user = postgres 5 | password = "" 6 | 7 | [storage] 8 | image_root = ~/tmp 9 | 10 | [security] 11 | ignore_hmac_validation_errors = False 12 | 13 | [view] 14 | items_per_page = 100 15 | -------------------------------------------------------------------------------- /.github/workflows/pytest-3.11.yml: -------------------------------------------------------------------------------- 1 | name: pytest-3.11 2 | on: push 3 | 4 | jobs: 5 | ubuntu-latest-build: 6 | uses: ./.github/workflows/pytest-main.yml 7 | with: 8 | python-version: 3.11 9 | 10 | permissions: 11 | contents: read 12 | pull-requests: write 13 | -------------------------------------------------------------------------------- /.github/workflows/pytest-3.12.yml: -------------------------------------------------------------------------------- 1 | name: pytest-3.12 2 | on: push 3 | 4 | jobs: 5 | ubuntu-latest-build: 6 | uses: ./.github/workflows/pytest-main.yml 7 | with: 8 | python-version: 3.12 9 | 10 | permissions: 11 | contents: read 12 | pull-requests: write 13 | -------------------------------------------------------------------------------- /server/app/static/bootstrap-custom.css: -------------------------------------------------------------------------------- 1 | .tooltip-inner { 2 | min-width: 480px; /* the minimum width */ 3 | } 4 | 5 | .text-danger { 6 | color: red 7 | } 8 | 9 | .text-success { 10 | color: rgb(0,180, 0) 11 | } 12 | 13 | .text-warning { 14 | color: orange 15 | } 16 | 17 | -------------------------------------------------------------------------------- /server/svarog.ini.template: -------------------------------------------------------------------------------- 1 | [database] 2 | host = localhost 3 | database = svarog 4 | user = postgres 5 | password = postgres 6 | 7 | [storage] 8 | image_root = /home/svarog/data 9 | 10 | [security] 11 | ignore_hmac_validation_errors = false 12 | 13 | [view] 14 | items_per_page = 100 15 | 16 | -------------------------------------------------------------------------------- /server/tests/db-wipe.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE station_owners; 2 | DROP TABLE station_photos; 3 | DROP VIEW observation_ratings; 4 | DROP TABLE observation_files; 5 | DROP TABLE observations; 6 | DROP TABLE stations; 7 | DROP TABLE satellites; 8 | DROP TABLE users; 9 | DROP TABLE schema; 10 | DROP TYPE user_type; 11 | -------------------------------------------------------------------------------- /server/tests/.svarog.ini.github: -------------------------------------------------------------------------------- 1 | [database] 2 | host = localhost 3 | user = postgres 4 | password = postgres 5 | database = svarog 6 | maintenance_database = postgres 7 | 8 | [storage] 9 | image_root = ~/tmp 10 | 11 | [security] 12 | ignore_hmac_validation_errors = False 13 | 14 | [view] 15 | items_per_page = 100 16 | -------------------------------------------------------------------------------- /server/templates/obs_delete.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |

Observation Deletion {{ obs_id }}

5 | 6 |

STATUS:

7 | 12 |

Now move to a list of observations.

13 | {% endblock %} 14 | -------------------------------------------------------------------------------- /server/app/controllers/__init__.py: -------------------------------------------------------------------------------- 1 | import glob 2 | from os.path import dirname, basename, isfile, join 3 | 4 | # Default export of all controllers in directory 5 | # for one time import in routes.py. 6 | # In this directory should be placed only Flask controllers. 7 | modules = glob.glob(join(dirname(__file__), "*.py")) 8 | 9 | __all__ = [basename(f)[:-3] for f in modules if isfile(f)] 10 | -------------------------------------------------------------------------------- /server/templates/macros.jinja: -------------------------------------------------------------------------------- 1 | {% macro rating_icon(value, border=True) %} 2 | 7 | {{ value | percentage }} 8 | 9 | {% endmacro %} 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /station/config/config.yml 2 | /station/config/http* 3 | __pycache__ 4 | venv 5 | .DS_Store 6 | .vscode/ 7 | /server/svarog.ini 8 | /server/app/data 9 | /server/.env/bin/ 10 | /server/.env/ 11 | /server/data/ 12 | /server/build/ 13 | /server/cache_tmp/ 14 | /server/backups/ 15 | /server/svarog_server.egg-info/ 16 | /server/dist/ 17 | 18 | *.pyc 19 | 20 | 21 | # Local observations should be ignored. 22 | /observations/ 23 | /data/ 24 | -------------------------------------------------------------------------------- /server/db/svarog-09.psql: -------------------------------------------------------------------------------- 1 | ALTER TABLE observation_files ADD COLUMN rating real DEFAULT NULL; 2 | 3 | -- General puropose ratings for observations 4 | 5 | CREATE VIEW observation_ratings AS 6 | SELECT observation_files.obs_id as obs_id, 7 | AVG(observation_files.rating) as rating 8 | FROM observation_files 9 | WHERE observation_files.rating IS NOT NULL 10 | GROUP BY observation_files.obs_id; 11 | 12 | UPDATE schema SET version = 9; 13 | -------------------------------------------------------------------------------- /server/tests/test_repository.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest import mock 3 | 4 | from app.repository import Repository 5 | 6 | 7 | class RepositoryTests(unittest.TestCase): 8 | 9 | @mock.patch("psycopg2.connect") 10 | def test_db_version(self, mock_connect): 11 | repostory = Repository({}) 12 | mock_cursor = mock_connect.return_value.cursor.return_value 13 | mock_cursor.fetchone.side_effect = [{"count": 1}, (3,), {"version": 15}] 14 | 15 | version = repostory.get_database_version() 16 | self.assertEqual(version, 15) 17 | -------------------------------------------------------------------------------- /.flake8.ini: -------------------------------------------------------------------------------- 1 | [flake8] 2 | 3 | # List of files and dirs to ignore 4 | exclude = 5 | .git, 6 | __pycache__, 7 | venv 8 | 9 | # Print the total number of errors 10 | count = True 11 | 12 | # Print the source code generating the error/warning in question 13 | show-source = True 14 | 15 | # Count the number of occurrences of each error/warning code and print a report. 16 | statistics = True 17 | 18 | # Max line lenght 19 | max-line-length = 160 20 | 21 | # The following errors and warnings will be ignored. 22 | extend-ignore = 23 | # E4, 24 | # E51, 25 | # W234 26 | -------------------------------------------------------------------------------- /doc/nginx/unit.json: -------------------------------------------------------------------------------- 1 | { 2 | "listeners": { 3 | "127.0.0.1:1234": { 4 | "pass": "applications/flask" 5 | } 6 | }, 7 | 8 | "applications": { 9 | "flask": { 10 | "type": "python 3.9", 11 | "path": "/home/svarog/devel/svarog/server/", 12 | "home": "/home/svarog/devel/svarog/server/venv/", 13 | "working_directory": "/home/svarog", 14 | "module": "wsgi", 15 | "user": "svarog", 16 | "group": "svarog", 17 | "callable": "app" 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /server/app/routes.py: -------------------------------------------------------------------------------- 1 | from flask import render_template, send_from_directory, send_file 2 | from app import app 3 | # Load routes from modules in "controllers" directory 4 | from app.controllers import login, obs, obslist, receive, station, stations # noqa: F401 5 | 6 | 7 | @app.route('/') 8 | @app.route('/index') 9 | def index(): 10 | return render_template("index.html") 11 | 12 | 13 | @app.route('/favicon.png') 14 | def favicon(): 15 | return send_file('favicon.png') 16 | 17 | 18 | @app.route('/data/') 19 | def send_js(path): 20 | return send_from_directory('data', path) 21 | -------------------------------------------------------------------------------- /server/db/svarog-06.psql: -------------------------------------------------------------------------------- 1 | ALTER TABLE observations 2 | ALTER COLUMN tle TYPE varchar(69)[2]; -- data cannot include trailing character 3 | 4 | -- From Postgres 12 or less documentation: 5 | -- However, the current implementation ignores any supplied array size limits, 6 | -- i.e., the behavior is the same as for arrays of unspecified length. 7 | 8 | ALTER TABLE observations ADD CONSTRAINT tle_length_check CHECK ( 9 | (tle is NULL) OR ( 10 | array_length(tle, 1) = 2 AND 11 | LENGTH(tle[1]) = 69 AND 12 | LENGTH(tle[2]) = 69 13 | ) 14 | ); 15 | 16 | UPDATE schema SET version = 6; -------------------------------------------------------------------------------- /arch.md: -------------------------------------------------------------------------------- 1 | [doc](../README.md) > Architecture 2 | 3 | # Svarog Architecture 4 | 5 | ![Svarog architecture](automation-server-arch.png) 6 | 7 | The project consists of two major components: a station and a server. Station is a reasonably small set of tools that's 8 | intended to be run on Rasbperry Pi (although it can be run on any Linux machine) that conducts the actual observations 9 | and does most of the data processing. Once an observation is completed, it is uploaded to the server. Station is 10 | intended to be fully automated. 11 | 12 | The server is able to receive decoded information from multiple stations. 13 | -------------------------------------------------------------------------------- /doc/arch.md: -------------------------------------------------------------------------------- 1 | [doc](../README.md) > Architecture 2 | 3 | # Svarog Architecture 4 | 5 | ![satnogs architecture](automation-server-arch.png) 6 | 7 | The project consists of two major components: a station and a server. Station is a reasonably small set of tools that's 8 | intended to be run on Rasbperry Pi (although it can be run on any Linux machine) that conducts the actual observations 9 | and does most of the data processing. Once an observation is completed, it is uploaded to the server. Station is 10 | intended to be fully automated. 11 | 12 | The server is able to receive decoded information from multiple stations. 13 | -------------------------------------------------------------------------------- /server/templates/index.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | 4 | {% block content %} 5 |

SVAROG Project

6 |

This is a small network of satellite ground stations, which aims to receive signals from 7 | satellites. It's a project created in 2020 by Sławek Figiel, Tomek Mrugalski and Ewelina Omernik, three 8 | students of Space and Satellite Technologies studies of Gdańsk University of Technology.

9 | 10 |

This project is available on github: server 11 | and station.

12 | 13 | 14 | {% endblock %} -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | The only supported version is the latest code from the `master` branch. 6 | 7 | | Version | Supported | 8 | | ------- | ------------------ | 9 | | master | :heavy_check_mark: | 10 | 11 | ## Reporting a Vulnerability 12 | 13 | Svarog is a small project, run by two volunteers with limited time. 14 | Tomek can be reached at thomson (at) klub (dot) com (dot) pl and Sławek at 15 | fivitti (at) gmail (dot) com. If you report a vulnerability, we'll try to 16 | address it as soon as possible. However, keep in mind that Svarog is a hobby 17 | project and we have full time jobs and other family responsibilities. 18 | -------------------------------------------------------------------------------- /server/examples/requests.http: -------------------------------------------------------------------------------- 1 | POST http://127.0.0.1:5000/receive HTTP/1.1 2 | Content-Type: multipart/form-data; boundary=boundary 3 | Authorization: HMAC-SHA256 1,2020-02-20T21:56,sig 4 | 5 | --boundary 6 | Content-Disposition: form-data; name="aos" 7 | 8 | 2020-02-20T21:39:26 9 | --boundary 10 | Content-Disposition: form-data; name="tca" 11 | 12 | 2020-02-20T21:39:26 13 | --boundary 14 | Content-Disposition: form-data; name="los" 15 | 16 | 2020-02-20T21:39:26 17 | --boundary 18 | Content-Disposition: form-data; name="sat" 19 | 20 | NOAA-15 21 | --boundary 22 | Content-Disposition: form-data; name="file"; filename="x.png" 23 | Content-Type: image/png 24 | 25 | < ./x.png 26 | --boundary-- -------------------------------------------------------------------------------- /server/db/svarog-08.psql: -------------------------------------------------------------------------------- 1 | 2 | -- Create enum. Using enums may be a bit tricky. But let's see how it works out. 3 | CREATE TYPE user_type AS ENUM ( 4 | 'regular', -- normal user who just created account 5 | 'owner', -- station owner 6 | 'admin', -- admin 7 | 'banned'); -- someone we don't like anymore 8 | 9 | -- Add a table to store users 10 | CREATE TABLE users ( 11 | id SERIAL primary key, 12 | username VARCHAR(32), 13 | digest VARCHAR(128) NOT NULL, -- length of SHA256 digest is 32 bytes, but we're using whatever was returned by werkzeug.security 14 | email VARCHAR(128), 15 | role user_type DEFAULT 'regular' 16 | ); 17 | 18 | -- We're at 8 already. 19 | UPDATE schema SET version = 8; 20 | -------------------------------------------------------------------------------- /server/tests/utils.py: -------------------------------------------------------------------------------- 1 | def check_output(self, output: str, strings): 2 | """Checks if specified output (presumably stdout) has appropriate content. strings 3 | is a list of strings that are expected to be present. They're expected 4 | to appear in the specified order, but there may be other things 5 | printed in between them. Will assert if string is not found. """ 6 | 7 | # Make sure we're dealing with a string, and not something similar (like bytes) 8 | output = str(output) 9 | 10 | offset = 0 11 | for s in strings: 12 | new_offset = output.find(s, offset) 13 | 14 | self.assertNotEqual(new_offset, -1, "Not found an expected string: '%s'" % s) 15 | # string found, move to its end and continue searching 16 | offset = new_offset + len(s) 17 | -------------------------------------------------------------------------------- /doc/gunicorn/gunicorn-svarog.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Gunicorn instance for Svarog 3 | After=network.target 4 | 5 | [Service] 6 | User=svarog 7 | Group=svarog 8 | WorkingDirectory=/home/svarog/devel/svarog-server/server 9 | Environment="PATH=/home/svarog/devel/svarog-server/server/venv" 10 | 11 | # If you want to enable TLS directly in the gunicorn, use the following: 12 | # --keyfile /home/svarog/tls/privkey.pem --certfile /home/svarog/tls/cert.pem \ 13 | ExecStart=/home/svarog/devel/svarog-server/server/venv/bin/gunicorn \ 14 | --access-logfile /home/svarog/logs/access.log --error-logfile /home/svarog/logs/error.log \ 15 | --workers 1 --bind 127.0.0.1:5002 app:app 16 | ExecReload=/bin/kill -s HUP $MAINPID 17 | KillMode=mixed 18 | TimeoutStopSec=5 19 | PrivateTmp=true 20 | 21 | [Install] 22 | WantedBy=multi-user.target 23 | -------------------------------------------------------------------------------- /doc/devel.md: -------------------------------------------------------------------------------- 1 | # Developer's guide 2 | 3 | 1. **Running unit-tests** 4 | 5 | Developers may be interested in running unit tests. The best way to do that is to call a command 6 | `python -m pytest -s -v` (if you call `pytest -s -v` instead, you risk running mismatched python 7 | version and also errors about missing imports). 8 | 9 | 2. **Running linter** 10 | 11 | ``` 12 | pylint --rcfile .pylint $(git ls-files 'station/*.py') 13 | ``` 14 | 15 | ``` 16 | flake8 --config .flake8.ini --color=auto $(git ls-files 'station/*.py') 17 | ``` 18 | 19 | 2. **Developer's environment** 20 | 21 | You may want to set DEV_ENVIRONMENT variable in your setup. If set to 1, it will 22 | enable debug logging and will use local files for crontab and config file. 23 | Setting its value to 2 gets the same result, except not enabling debug logging, 24 | which is useful for cleaner test runs. 25 | -------------------------------------------------------------------------------- /server/db/svarog-10.psql: -------------------------------------------------------------------------------- 1 | -- This schema version introduces station ownership relation. 2 | -- For each station, there can be an arbitrary number of users that are owners. 3 | -- In the future, the owner will be able to some administrative actions, such as 4 | -- deleting observation, edit station details, etc. 5 | 6 | CREATE TABLE station_owners ( 7 | user_id int NOT NULL, 8 | station_id int NOT NULL, 9 | PRIMARY KEY (user_id, station_id), 10 | CONSTRAINT fk_user FOREIGN KEY(user_id) REFERENCES users(id) ON UPDATE CASCADE ON DELETE CASCADE, 11 | CONSTRAINT fk_station FOREIGN KEY(station_id) REFERENCES stations(station_id) ON UPDATE CASCADE ON DELETE CASCADE 12 | ); 13 | 14 | CREATE INDEX fk_station_owners_user_id ON station_owners (user_id); 15 | CREATE INDEX fk_station_owners_station_id ON station_owners (station_id); 16 | 17 | UPDATE schema SET version = 10; 18 | -------------------------------------------------------------------------------- /server/db/svarog-07.psql: -------------------------------------------------------------------------------- 1 | -- Add foreign key on observations referenced to satellite 2 | 3 | -- Fill missing IDs, name column was droped earlier and we lost the data 4 | UPDATE observations 5 | SET sat_id = s.sat_id 6 | FROM satellites s 7 | WHERE observations.sat_id is NULL AND 8 | UPPER(s.sat_name) = CASE 9 | WHEN REPLACE(observations.thumbnail, '-', ' ') ILIKE '%NOAA 15%' THEN 'NOAA 15' 10 | WHEN REPLACE(observations.thumbnail, '-', ' ') ILIKE '%NOAA 18%' THEN 'NOAA 18' 11 | WHEN REPLACE(observations.thumbnail, '-', ' ') ILIKE '%NOAA 19%' THEN 'NOAA 19' 12 | ELSE 'NOAA 15' 13 | END; 14 | 15 | ALTER TABLE observations ALTER COLUMN sat_id SET NOT NULL; 16 | 17 | -- Add reference 18 | ALTER TABLE observations 19 | ADD CONSTRAINT fk_sat_id FOREIGN KEY (sat_id) REFERENCES satellites (sat_id) 20 | ON DELETE CASCADE; 21 | 22 | UPDATE schema SET version = 7; 23 | -------------------------------------------------------------------------------- /.github/workflows/pylint.yml: -------------------------------------------------------------------------------- 1 | name: Pylint 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | build: 11 | 12 | runs-on: ubuntu-latest 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | python-version: [3.11, 3.12] 17 | 18 | permissions: 19 | contents: read 20 | pull-requests: write 21 | 22 | steps: 23 | - uses: actions/checkout@v4 24 | - name: Set up Python ${{ matrix.python-version }} 25 | uses: actions/setup-python@v5 26 | with: 27 | python-version: ${{ matrix.python-version }} 28 | - name: Install dependencies 29 | run: | 30 | python -m pip install --upgrade pip 31 | python -m pip install pylint wheel 32 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 33 | - name: Analyse with pylint 34 | run: | 35 | PYTHONPATH=.:server/ pylint --rcfile .pylint $(git ls-files 'server/*.py') 36 | -------------------------------------------------------------------------------- /server/db/svarog-01.psql: -------------------------------------------------------------------------------- 1 | 2 | DROP TABLE IF EXISTS observations; 3 | DROP TABLE IF EXISTS satellites; 4 | 5 | -- This table contains information about the observations conducted. 6 | CREATE TABLE observations ( 7 | obs_id serial PRIMARY KEY NOT NULL, 8 | aos TIMESTAMP, -- Aquisiton of signal 9 | tca TIMESTAMP, -- Time of closest approach 10 | los TIMESTAMP, -- Loss of signal 11 | sat_id int, -- this is norad id 12 | sat_name VARCHAR(32), 13 | filename VARCHAR(255), 14 | notes VARCHAR 15 | ); 16 | 17 | -- This table contains list of satellites we're observing. 18 | CREATE TABLE satellites ( 19 | sat_id int PRIMARY KEY NOT NULL, 20 | sat_name VARCHAR(32), 21 | url VARCHAR(128) 22 | ); 23 | 24 | INSERT INTO satellites VALUES(25338, 'NOAA-15', 'https://www.n2yo.com/satellite/?s=25338'); 25 | INSERT INTO satellites VALUES(28654, 'NOAA-18', 'https://www.n2yo.com/satellite/?s=28654'); 26 | INSERT INTO satellites VALUES(33591, 'NOAA-19', 'https://www.n2yo.com/satellite/?s=33591'); 27 | 28 | -------------------------------------------------------------------------------- /doc/users.md: -------------------------------------------------------------------------------- 1 | [doc](../README.md) > Users 2 | 3 | # Users Management 4 | 5 | Users information has been introduced in [schema update 08](../server/db/svarog-08.psql). There's a new table that holds 6 | user-id, username, salted SHA256 digest, and user role. Until we add flask-admin (or some other management panel), the 7 | process of adding new users is manual. 8 | 9 | This is pretty basic for now. One day we will add a capability for the user registration. 10 | 11 | To add a new user: 12 | 13 | 1. log into the server using ssh 14 | 2. psql svarog 15 | 3. add entry for new user 16 | 17 | ```sql 18 | INSERT INTO users(id, username, digest, email, role) 19 | VALUES(1, 20 | 'admin', 21 | 'pbkdf2:sha256:150000$kTuJClSh$2e93de2d7a169df346a577a24ccc85c2cf1ff62e5a64f944a301cda76ce39c68', 22 | 'spam@wp.pl', 23 | 'admin'); 24 | ``` 25 | 26 | You can generate the hash using the following python commands: 27 | 28 | ```python 29 | from werkzeug.security import generate_password_hash 30 | generate_password_hash('secret1') 31 | ``` 32 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | python: 4 | - "3.8" 5 | - "3.9" 6 | 7 | env: 8 | - SHELLCHECK_OPTS="" 9 | 10 | #services: 11 | 12 | # - postgresql 13 | 14 | addons: 15 | postgresql: "10" 16 | apt: 17 | packages: 18 | - postgresql-10 19 | - postgresql-client-10 20 | - shellcheck 21 | 22 | install: 23 | - pip install -r station/requirements.txt 24 | - pip install -r server/requirements.txt 25 | - pip install pytest 26 | 27 | before_script: 28 | - psql -c 'create database svarog;' -U postgres 29 | - cp server/.svarog.ini.travis server/svarog.ini 30 | - cd server && python migrate_db.py && cd .. 31 | - psql -f server/tests/db-data.psql svarog 32 | - mkdir -p ~/.config/svarog-gut 33 | 34 | script: 35 | - cd station && pytest -v && cd .. 36 | - cd server && PYTHONTEST=. pytest -v && cd .. 37 | - find . -name '*.sh' | xargs shellcheck ${SHELLCHECK_OPTS} 38 | 39 | notifications: 40 | email: 41 | recipients: 42 | - spam.travis@klub.com.pl 43 | on_success: change 44 | on_failure: always 45 | -------------------------------------------------------------------------------- /server/db/svarog-05.psql: -------------------------------------------------------------------------------- 1 | CREATE TABLE observation_files 2 | ( 3 | obs_file_id serial PRIMARY KEY, 4 | filename VARCHAR(255) NOT NULL, 5 | media_type VARCHAR(255) NOT NULL, 6 | obs_id INTEGER REFERENCES observations ON DELETE CASCADE 7 | ); 8 | 9 | -- Index on foreign key 10 | CREATE INDEX idx_observation_files_obs_id ON observation_files(obs_id); 11 | 12 | ALTER TABLE observations ADD COLUMN thumbnail VARCHAR(255); 13 | 14 | UPDATE observations 15 | SET thumbnail='thumb-' || filename; 16 | 17 | -- Move current files to new table 18 | INSERT INTO observation_files(filename, media_type, obs_id) 19 | SELECT filename, 'image/png', obs_id 20 | FROM observations 21 | WHERE filename IS NOT NULL; 22 | 23 | ALTER TABLE observations DROP COLUMN filename; 24 | -- This column repeats data from satellites table 25 | ALTER TABLE observations DROP COLUMN sat_name; 26 | 27 | -- Normalize default names to Celestrak compatible 28 | UPDATE satellites 29 | SET sat_name=REPLACE(sat_name, '-', ' ') 30 | WHERE LEFT(sat_name, 5) = 'NOAA-'; 31 | 32 | UPDATE schema SET version = 5; 33 | -------------------------------------------------------------------------------- /server/templates/stations.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |

Ground stations

6 | 7 |

8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | {% for s in stations %} 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | {% endfor %} 25 |
IDNameCoords# of observationsRegisteredLast observation
{{ s.station_id }}{{ s.name }}{{ s.coords }}{{ s.cnt }} [list]{% if s.registered %} {{ s.registered.isoformat(" ", "seconds") }} {% else %}Unknown{% endif %}{% if s.lastobs %} {{ s.lastobs|datetime }} {% else %}None{% endif %}
26 |

27 | 28 | {% include 'pagination.html' %} 29 | 30 | {% endblock %} 31 | 32 | -------------------------------------------------------------------------------- /doc/submit_obs.md: -------------------------------------------------------------------------------- 1 | # Submitting observations 2 | 3 | This page describes the protocol and parameters necessary for submitting new 4 | observations. It is a work in progress. 5 | 6 | ## Configuration 7 | 8 | Configuration should be submitted as JSON. There's a number of parameters that 9 | are (almost) mandatory: 10 | 11 | - protocol - string, e.g. APT, BPSK, HRPT 12 | - frequency - floating, expressed in Hz 13 | - antenna - string, model of the antenna used to receive data. Feel free to specify exact brand and model number 14 | - antenna-type - string, defines type, e.g. yagi, crossed dipole, helix, parabolic etc. 15 | - receiver - string, specifies the model of your SDR (or radio in general), e.g. AirSpy Mini 16 | - lna - string, specifies the Low Noise Amplifier. Please use `none` if you don't use LNA 17 | - filter - string, describe the filter or filters you're using. Please use `none` if you don't use LNA 18 | 19 | The structure is flexible, and it's OK to submit additional parameters. 20 | If you define new parameters, please use all lowercase name. Try to be consistent 21 | and don't use abbreviations. The parameter name will be shown as is in the UI. -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Slawomir Figiel and Tomek Mrugalski 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /doc/nginx/nginx: -------------------------------------------------------------------------------- 1 | # svarog.space 2 | # 3 | # This is a site configuration for nginx. You probably want to put it in 4 | # /etc/nginx/sites-available and then add a link to in in /etc/nginx/sites-enabled 5 | # 6 | # For it to work, you need to have unit (https://unit.nginx.org/) running 7 | # See the related unit.json for Unit configuration. 8 | server { 9 | listen 80; 10 | listen [::]:80; 11 | 12 | # SSL configuration 13 | listen 443 ssl; 14 | listen [::]:443 ssl; 15 | ssl_certificate /etc/letsencrypt/fullchain.pem; 16 | ssl_certificate_key /etc/letsencrypt/privkey.pem; 17 | 18 | index index.html; 19 | 20 | server_name svarog.space; 21 | 22 | location ^~ /data/ { 23 | rewrite ^/data/(.*)$ /$1 break; 24 | root /home/svarog/data; 25 | } 26 | 27 | location ^~ /viewer { 28 | root /home/svarog/public_html; 29 | index index.html; 30 | try_files $uri $uri/ $uri/index.html =405; 31 | } 32 | 33 | access_log /home/svarog/logs/nginx-access.log; 34 | error_log /home/svarog/logs/nginx-error.log debug; 35 | 36 | location / { 37 | 38 | proxy_pass http://127.0.0.1:5002; 39 | proxy_set_header Host $host; 40 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /server/app/controllers/stations.py: -------------------------------------------------------------------------------- 1 | from app.repository import Repository 2 | from app import app, utils 3 | 4 | from app.pagination import use_pagination 5 | 6 | 7 | @app.route('/stations') 8 | @use_pagination() 9 | def stations(limit_and_offset): 10 | '''This function retrieves list of all registered ground stations.''' 11 | repository = Repository() 12 | stations = repository.read_stations(**limit_and_offset) 13 | statistics = repository.read_stations_statistics(**limit_and_offset) 14 | station_count = repository.count_stations() 15 | # Now convert the data to a list of objects that we can pass to the template. 16 | stationlist = [] 17 | 18 | for station, stat in zip(stations, statistics): 19 | x = {} 20 | x['station_id'] = station['station_id'] 21 | x['name'] = station['name'] 22 | x['coords'] = utils.coords(station['lon'], station['lat']) 23 | x['descr'] = station['descr'] 24 | x['config'] = station['config'] 25 | x['registered'] = station['registered'] 26 | x['lastobs'] = stat["last_los"] 27 | x['cnt'] = stat["observation_count"] 28 | 29 | stationlist.append(x) 30 | 31 | return 'stations.html', dict(stations=stationlist, item_count=station_count) 32 | -------------------------------------------------------------------------------- /doc/apache2/svarog.wsgi: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # This WSGI script can be used with apache2 wsgi interface. 4 | # Make sure: 5 | # - there is venv/bin/activate_this.py (use virtualenv, not venv module) 6 | # - that the paths are valid 7 | # - if having problems, turning the logging level to DEBUG may help 8 | # - adding logging.info(...) here and there is primitive, but effective technique 9 | 10 | import os 11 | import runpy 12 | import sys 13 | import logging 14 | 15 | HOME_DIR = '/home/svarog' 16 | 17 | logging.basicConfig(filename=os.path.join(HOME_DIR, 'logs/wsgi.log'), 18 | format='%(asctime)s %(levelname)s %(filename)s:%(lineno)d: %(message)s', 19 | level=logging.INFO) 20 | 21 | activate_this = os.path.join(HOME_DIR, 'devel/svarog/server/venv/bin/activate_this.py') 22 | 23 | runpy.run_path(activate_this) 24 | 25 | sys.path.insert(0, os.path.join(HOME_DIR, 'devel/svarog/server')) 26 | 27 | path = os.path.join(os.path.dirname(__file__), os.pardir) 28 | if path not in sys.path: 29 | sys.path.append(path) 30 | 31 | from app import app as application 32 | application.secret_key = 'your secret key here' 33 | 34 | # Uncomment this to get more information about template loading 35 | # application.config['EXPLAIN_TEMPLATE_LOADING'] = True 36 | -------------------------------------------------------------------------------- /server/app/template_globals.py: -------------------------------------------------------------------------------- 1 | from urllib.parse import urlsplit, parse_qsl, urlencode, urlunsplit 2 | 3 | from app import app 4 | from app.utils import first 5 | 6 | 7 | @app.template_global() 8 | def url_page(url: str, page: int, param_name: str): 9 | """Given a URL, set or replace a query parameter and return the 10 | modified URL. 11 | 12 | See: https://stackoverflow.com/a/12897375 13 | >>> set_query_parameter('http://example.com?foo=bar&biz=baz', 'foo', 'stuff') 14 | 'http://example.com?foo=stuff&biz=baz' 15 | 16 | """ 17 | scheme, netloc, path, query_string, fragment = urlsplit(url) 18 | query_params = parse_qsl(query_string) 19 | 20 | page_param = first(lambda p: p[0] == param_name, query_params) 21 | if page_param is not None: 22 | query_params.remove(page_param) 23 | query_params.append((param_name, str(page))) 24 | 25 | new_query_string = urlencode(query_params, doseq=True) 26 | return urlunsplit((scheme, netloc, path, new_query_string, fragment)) # type: ignore 27 | 28 | 29 | @app.template_filter() 30 | def datetime(value): 31 | """Given a datetime, it format the out to use yyyy-mm-dd hh:mm:ss format""" 32 | return value.isoformat(" ", "seconds") 33 | 34 | 35 | @app.template_filter() 36 | def percentage(value): 37 | return "{:.0%}".format(value) 38 | -------------------------------------------------------------------------------- /.github/workflows/flake8.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Flake8 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | pull_request: 10 | branches: [ master ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | python-version: [3.11, 3.12] 20 | 21 | permissions: 22 | contents: read 23 | pull-requests: write 24 | 25 | steps: 26 | - uses: actions/checkout@v4 27 | - name: Set up Python ${{ matrix.python-version }} 28 | uses: actions/setup-python@v5 29 | with: 30 | python-version: ${{ matrix.python-version }} 31 | - name: Install dependencies 32 | run: | 33 | python -m pip install --upgrade pip 34 | python -m pip install flake8 35 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 36 | - name: Lint with flake8 37 | run: | 38 | # stop the build if there are Python syntax errors or undefined names 39 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 40 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 41 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 42 | -------------------------------------------------------------------------------- /doc/automation-server-arch.drawio: -------------------------------------------------------------------------------- 1 | 5VpRc9o4EP41zNw9pGNbtjGPISTt3VxncqEzd32UrbXRVFhUFoH0158MMsaWQtzUEMJl8iCtZEn+vm8X7cIA3czXHwVezD5zAmzgOWQ9QJOB57m+5w3Kf4c8bS1R6G8NmaBET6oNU/oDtNHR1iUlUDQmSs6ZpIumMeF5Dols2LAQfNWclnLW3HWBMzAM0wQz0/oPJXKm3yJwavsnoNms2tl19MgcV5O1oZhhwld7JnQ7QDeCc7ltzdc3wErwKly2z909M7o7mIBcdnng+o79mEgvykfse5ZeTUiMbq9cTc8jZkv9xvq08qmCQPBlTqBcxRmg8WpGJUwXOClHV4p0ZZvJOVM9VzXNU+mDPoKQsN4z6VN+BD4HKZ7UFD3qDzViWjI7BFc1AaiyzfbARyNtxJr0bLd2jYtqaGh+Bib3/GBCHVByT4qSTUshU9uOCX1Uzaxs3vNCZgKmf/9VDaq99sYtjxAscYwLqIZi0Z7cXuJZatyXqUkpYzeccbF5FhEMUZooeyEF/wZ7I2ESQZz2Q2bgNNn0LWy6Ns2HxyITdSHzWqE4g85EriAuiQGhgLlgMkP/3Mj0u5A5Vh9FChcFYVc+P335rLxYHVoNOn9OL5jStn96kUnpjr6TUBoYlN4xXHy7AGxR8NbYhga208lDr8imKYSJFVkyHMWO0xeyTdFagD1tHBoawGYLAYSqi/v7063nvwxvdEp0oy5RPucYX+GFvKDQ3GAhcII3FvmoCw3Zd7Hu/Dn7gAnlZRINiUqwL/nq1CITWT5mT0tmlUEdZhMvJVcJP+V5Z04LnsqVguqCyWwHSBubtgDpRsGx2DSzmkJq3pzfHnCxiEFsXuH+j98NxNVryyasTfhynkMLa23CjGa56iYKTOW/aFyCSBPMrvXAnBJSbmPlsVlZ6IEY123FTJubDS3MeEdzMzNDqdLESyUhGAYvk2Ar4ByPBDOnMOAvZnhRNlMG6+uyvqrAgJzo5iRROUhBk8PBJ4CI+LbgE3kxCsND8AJplGtNcPfACyzYVTYBTLn9Y7PIawNU73DPqTpJXXvzmg7kBy1OCr4UCeinalqMhYy6jz/6EIaj+i9sriuxyEAa627o3qHwCxU8r0cFqMgq5M8II40SsCdFcRSUEB8Qxhbwl29lWwAPzAvPSmhRUx5hu3bbVWftdfzWOsfW1ZEiC6yp/LcMyR8C3fu6NzJZ62i96TzpTo9Xod5EF52V6EatmPRa0bntOuipVWfWcfpRXa7OtSe7svu1klrZqYW36T2jPEBJtKn0GMobO6PgcA3onYWx9oVzd7n81TiGTqwos4CF1WUyz7EhrHdXHdxx9Ga5tmernPTgr89idS7e0VK1777SO9rfCfvtVOHI7lFt/38j0BuqW/uwAT1yg1dyaFlrGPREo+rWPzjZTq9/toNu/wM= -------------------------------------------------------------------------------- /server/scripts/create_charts.py: -------------------------------------------------------------------------------- 1 | # This script generates fly-over charts for observations that have TLE 2 | # information recorded. 3 | 4 | from typing import Dict 5 | from app.controllers.receive import make_charts 6 | from app.repository import Repository, Station, StationId 7 | 8 | if __name__ == '__main__': 9 | stations: Dict[StationId, Station] = {} 10 | 11 | repository = Repository() 12 | observation_count = repository.count_observations() 13 | 14 | print("There are a total of %d observations" % observation_count) 15 | 16 | STEP = 100 17 | limit = STEP 18 | offset = 0 19 | index = 0 20 | 21 | while offset < observation_count: 22 | # ToDo: Add filtration when MR with filtration will be merged. 23 | observations = repository.read_observations(limit=limit, offset=offset) 24 | 25 | print("Processing batch of %d observations" % len(observations)) 26 | 27 | for observation in observations: 28 | if observation['tle'] is None: 29 | print("No TLE info for observation %d, skipping." % observation['obs_id']) 30 | continue 31 | station_id = observation["station_id"] 32 | if station_id in stations: 33 | station = stations[station_id] 34 | else: 35 | station = repository.read_station(station_id) 36 | stations[station_id] = station 37 | 38 | make_charts(observation, station) 39 | index += 1 40 | print("Processed %s/%s observation" % (index, observation_count)) 41 | 42 | offset = limit 43 | limit += STEP 44 | 45 | print("Finish!") 46 | -------------------------------------------------------------------------------- /server/app/__init__.py: -------------------------------------------------------------------------------- 1 | from app.utils import get_footer 2 | 3 | from configparser import ConfigParser, NoSectionError, NoOptionError 4 | 5 | import os 6 | from flask import Flask 7 | 8 | 9 | def create_app(): 10 | """ Creates and returns a Flask app.""" 11 | app = Flask(__name__, template_folder='../templates') 12 | return app 13 | 14 | 15 | app = create_app() 16 | 17 | root_dir = os.path.dirname(os.path.realpath(__file__)) 18 | root_dir = os.path.dirname(root_dir) 19 | ini_path = os.path.join(root_dir, 'svarog.ini') 20 | 21 | try: 22 | 23 | config = ConfigParser() 24 | config.optionxform = str 25 | 26 | loaded_configs = config.read(ini_path) 27 | 28 | if not loaded_configs: 29 | raise Exception(f"Unable to read config file from {ini_path}") 30 | 31 | for key, value in config.defaults().items(): 32 | app.config[key] = value 33 | 34 | for section_name in config.sections(): 35 | app.config[section_name] = {} 36 | section = config[section_name] 37 | for key, value in section.items(): 38 | app.config[section_name][key] = value 39 | 40 | except IOError as e: 41 | raise Exception("Unable to read %s file: %s" % (ini_path, e)) 42 | except NoSectionError as e: 43 | raise Exception("Unable to find section 'database' in the %s file: %s" % (ini_path, e)) 44 | except NoOptionError as e: 45 | raise Exception("Unable to find option in 'database' section in the %s file: %s" % (ini_path, e)) 46 | 47 | # TODO: this is a hack. Template_globals and routes does "from app import app". This is a circular dependency. 48 | from app import template_globals # noqa: F401, E402 49 | from app import routes # noqa: F401, E402 50 | 51 | footer = get_footer() 52 | app.jinja_env.globals["footer"] = footer 53 | -------------------------------------------------------------------------------- /server/templates/login.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %} 4 | Login 5 | {% endblock %} 6 | 7 | {% block content %} 8 | {% if form %} 9 |
10 | 29 |
30 | {% endif %} 31 | 32 | {% if user %} 33 |

Welcome, {{ user.username }}!

34 |

Your user-id is {{ user.id }}.

35 |

Your role is {{ user.role.name }}.

36 |

You are the owner of the following {{ stations|length }} station(s): 37 | {% for s in stations %} 38 | {{s.name}} 39 | {% endfor %} 40 |

41 |

logout

42 | {% endif %} 43 | 44 | {% with messages = get_flashed_messages() %} 45 | {% if messages %} 46 | {% for message in messages %} 47 | 53 | {% endfor %} 54 | {% endif %} 55 | {% endwith %} 56 | 57 | {% endblock %} 58 | -------------------------------------------------------------------------------- /server/update.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # For some strange reason, git branch --show-current doesn't work on Debian 10, 4 | # even though the git version is 2.20 there. 5 | BRANCH=$(git rev-parse --abbrev-ref HEAD) 6 | 7 | if test "$BRANCH" != "master"; then 8 | echo "The current branch ($BRANCH) is not master, skipping update." 9 | exit 1 10 | fi 11 | 12 | # Get rid of any mess that could be in progress 13 | git merge --abort 14 | git cherry-pick --abort 15 | 16 | # Discard any local changes and switch to master 17 | git reset --hard 18 | 19 | # Disabled for testing. The problem with testing is that if this 20 | # checks out master, then there's no way to test this change on 21 | # a branch. As such, this should be uncommented only AFTER this 22 | # update procedure is merged to master. 23 | 24 | #git checkout master 25 | 26 | # This will wipe all non-tracked files. It's a bit much. 27 | #git clean -fxd 28 | 29 | # Get the new version 30 | git pull 31 | 32 | # Store current SHA in a file 33 | git rev-parse HEAD > commit.txt 34 | 35 | # Run the installation script 36 | VENV=$PWD/venv/bin/activate 37 | 38 | # Create virtual environment if it doesn't exist 39 | if [ ! -d "venv" ]; then 40 | echo "Creating virtual environment..." 41 | python3 -m venv venv 42 | fi 43 | 44 | echo "Enabling venv at $VENV" 45 | 46 | # We don't care that venv activation script will not be checked. 47 | # shellcheck disable=SC1090 48 | . "$VENV" 49 | 50 | # Install dependencies 51 | pip install -r ../requirements.txt 52 | 53 | # Install the server 54 | python setup.py install 55 | 56 | # Restart the server (make sure this corresponds to the actual server you're running) 57 | # sudo systemctl restart apache2 58 | # sudo systemctl restart unit 59 | sudo systemctl restart gunicorn-svarog 60 | sudo systemctl restart nginx 61 | -------------------------------------------------------------------------------- /server/app/controllers/station.py: -------------------------------------------------------------------------------- 1 | from app.repository import Repository 2 | from flask import render_template, abort 3 | from app import app, utils 4 | 5 | 6 | @app.route('/station/') 7 | def station(station_id=None): 8 | 9 | repository = Repository() 10 | station = repository.read_station(station_id) 11 | if station is None: 12 | abort(404, "Station not found") 13 | statistics = repository.read_station_statistics(station["station_id"]) 14 | 15 | photos = repository.read_station_photos(station_id) 16 | 17 | owners = repository.station_owners(station_id) 18 | 19 | # Now get 3 latest observations from this station 20 | filters = { 21 | "station_id": station['station_id'] 22 | } 23 | latest_obs = repository.read_observations(filters=filters, limit=3, offset=0) 24 | 25 | # Get the 3 observations with the best rating 26 | best_obs = repository.read_observations(filters=filters, limit=3, offset=0, order="r.rating DESC", expr="r.rating is not NULL") 27 | 28 | x = {} 29 | x['station_id'] = station['station_id'] 30 | x['name'] = station['name'] 31 | x['coords'] = utils.coords(station['lon'], station['lat']) 32 | x['descr'] = station['descr'] 33 | 34 | x['config'] = station['config'] 35 | x['registered'] = station['registered'] 36 | x['lastobs'] = statistics["last_los"] 37 | x['firstobs'] = statistics["first_aos"] 38 | x['cnt'] = statistics["observation_count"] 39 | 40 | files = [] 41 | for photo in photos: 42 | y = {} 43 | y['filename'] = photo['filename'] 44 | y['descr'] = photo['descr'] 45 | y['sort'] = photo['sort'] 46 | files.append(y) 47 | 48 | return render_template('station.html', station=x, files=files, owners=owners, latest_obs=latest_obs, best_obs=best_obs) 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Python 3.11](https://github.com/gut-space/svarog-server/actions/workflows/pytest-3.11.yml/badge.svg)](https://github.com/gut-space/svarog-server/actions/workflows/pytest-3.11.yml) 2 | [![Python 3.12](https://github.com/gut-space/svarog-server/actions/workflows/pytest-3.12.yml/badge.svg)](https://github.com/gut-space/svarog-server/actions/workflows/pytest-3.12.yml) 3 | [![Pylint](https://github.com/gut-space/svarog-server/actions/workflows/pylint.yml/badge.svg)](https://github.com/gut-space/svarog-server/actions/workflows/pylint.yml) 4 | [![Flake8](https://github.com/gut-space/svarog-server/actions/workflows/flake8.yml/badge.svg)](https://github.com/gut-space/svarog-server/actions/workflows/flake8.yml) 5 | [![CodeQL](https://github.com/gut-space/svarog-server/actions/workflows/github-code-scanning/codeql/badge.svg)](https://github.com/gut-space/svarog-server/actions/workflows/github-code-scanning/codeql.yml) 6 | 7 | 8 | 9 | The goal of this project is to build a fully functional automated VHF satellite ground station, loosely based on [satnogs](https://satnogs.org) project. 10 | 11 | Project founders: [Sławek Figiel](https://github.com/fivitti) and [Tomek Mrugalski](https://github.com/tomaszmrugalski/) 12 | 13 | # Project status 14 | 15 | As of June 2025, the project is in the process of being revived after couple years or dormancy. Stay tuned. 16 | 17 | # Documentation 18 | 19 | - [Installation](doc/install.md) 20 | - [Architecture](doc/arch.md) 21 | - [Developer's guide](doc/devel.md) 22 | - [User Management](doc/users.md) 23 | - [Project report](https://github.com/gut-space/satnogs/blob/master/doc/prototype-phase/satnogs-gdn-report.pdf) - a report from the early days when this was a team university project 24 | - [Project poster 1](doc/prototype-phase/poster1-pl.jpg) 25 | - [Project poster 2](doc/prototype-phase/poster2-en.jpg) 26 | - For older files see https://github.com/gut-space/satnogs. 27 | -------------------------------------------------------------------------------- /server/db/svarog-02.psql: -------------------------------------------------------------------------------- 1 | create table schema(version int); 2 | insert into schema values(1); 3 | 4 | /* Information about stations */ 5 | create table stations (station_id serial PRIMARY KEY NOT NULL, 6 | name varchar(64) NOT NULL, 7 | lon float NOT NULL, 8 | lat float NOT NULL, 9 | descr varchar NOT NULL, 10 | config varchar, 11 | registered timestamp, 12 | firstobs timestamp, 13 | lastobs timestamp); 14 | 15 | /* List of photos for a station */ 16 | create table station_photos (photo_id serial PRIMARY KEY NOT NULL, 17 | station_id serial, 18 | sort int NOT NULL, -- integer values so the photos can be sorted 19 | filename varchar NOT NULL, 20 | descr varchar, 21 | CONSTRAINT fk_station_id FOREIGN KEY (station_id) REFERENCES stations (station_id) ); 22 | 23 | /* Ok, now insert a first station based on the historical records */ 24 | INSERT INTO stations(name, lon, lat, descr, config, registered, firstobs, lastobs) 25 | values('My first ground station', 0.0, 0.0, 'Description missing', 'Configuration unknown', 26 | (select min(aos) from observations), 27 | (select min(aos) from observations), 28 | (select max(aos) from observations)); 29 | 30 | /* Now extend existing observations table with extra data. In particular, add 31 | column that references specific station-id. */ 32 | ALTER TABLE observations ADD COLUMN station_id integer default 1, 33 | ADD CONSTRAINT fk_station_id FOREIGN KEY (station_id) REFERENCES stations (station_id); 34 | UPDATE observations SET station_id = 1 WHERE station_id is null; 35 | 36 | /* Now add field that specifies configurations and TLE */ 37 | ALTER TABLE observations ADD COLUMN config varchar, 38 | ADD COLUMN tle varchar[240]; 39 | 40 | /* Finally bump schema version to 2 */ 41 | UPDATE schema SET version = 2; -------------------------------------------------------------------------------- /server/templates/pagination.html: -------------------------------------------------------------------------------- 1 | {% if pagination and pagination.page_count > 1 %} 2 | 56 | {% endif %} 57 |

58 | Displaying {{ pagination.items_current }} of {{ pagination.items_count }} 59 | item{{ 's' if pagination.items_count > 1 else '' }} 60 |

61 | -------------------------------------------------------------------------------- /.github/workflows/pytest-main.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: pytest with Postgres 5 | 6 | on: 7 | # workflow_call is a required field of the "on" property, 8 | # when intending to use the workflow as a reusable workflow 9 | workflow_call: 10 | inputs: 11 | python-version: 12 | required: true 13 | type: string 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | # Label of the container job 20 | container-job: 21 | # Containers must run in Linux based operating systems 22 | runs-on: ubuntu-latest 23 | timeout-minutes: 10 24 | strategy: 25 | matrix: 26 | python-version: ["3.10", "3.11", "3.12"] 27 | 28 | # Service containers to run with `container-job` 29 | services: 30 | # Label used to access the service container 31 | postgres: 32 | # Docker Hub image 33 | image: postgres 34 | ports: 35 | - 5432:5432 36 | env: 37 | # Provide the password for postgres 38 | POSTGRES_PASSWORD: postgres 39 | # Set health checks to wait until postgres has started 40 | options: >- 41 | --health-cmd pg_isready 42 | --health-interval 10s 43 | --health-timeout 5s 44 | --health-retries 5 45 | 46 | steps: 47 | # Downloads a copy of the code in your repository before running CI tests 48 | - name: Check out repository code 49 | uses: actions/checkout@v4 50 | 51 | - name: Set up Python ${{ inputs.python-version }} 52 | uses: actions/setup-python@v5 53 | with: 54 | python-version: ${{ inputs.python-version }} 55 | cache: 'pip' 56 | cache-dependency-path: 'requirements.txt' 57 | 58 | - name: Install dependencies 59 | run: | 60 | pip install -r requirements.txt 61 | pip install pytest 62 | sudo apt-get install imagemagick 63 | 64 | - name: Setup configs 65 | run: | 66 | cp server/tests/.svarog.ini.github server/svarog.ini 67 | mkdir -p ~/.config/svarog-gut 68 | - name: Run unit tests 69 | run: | 70 | cd server 71 | python -m pytest -s -v 72 | -------------------------------------------------------------------------------- /doc/apache2/svarog.conf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | WSGIPassAuthorization On 5 | WSGIScriptAlias / /home/svarog/public_html/svarog.wsgi 6 | 7 | Order allow,deny 8 | Allow from all 9 | 10 | Alias /viewer /home/svarog/public_html/viewer 11 | Alias /static /home/svarog/public_html/static 12 | Alias /data /home/svarog/data 13 | 14 | Order allow,deny 15 | Allow from all 16 | 17 | 18 | Options Indexes 19 | Order allow,deny 20 | Require all granted 21 | Allow from all 22 | 23 | 24 | 25 | # The ServerName directive sets the request scheme, hostname and port that 26 | # the server uses to identify itself. This is used when creating 27 | # redirection URLs. In the context of virtual hosts, the ServerName 28 | # specifies what hostname must appear in the request's Host: header to 29 | # match this virtual host. For the default virtual host (this file) this 30 | # value is not decisive as it is used as a last resort host regardless. 31 | # However, you must set it for any further virtual host explicitly. 32 | ServerName svarog.space 33 | ServerAdmin admin@klub.com.pl 34 | DocumentRoot /home/svarog/public_html 35 | Options Indexes FollowSymlinks ExecCGI 36 | ErrorLog /home/svarog/logs/svarog.klub.com.pl-error.log 37 | TransferLog /home/svarog/logs/svarog.klub.com.pl-trans.log 38 | 39 | # Available loglevels: trace8, ..., trace1, debug, info, notice, warn, 40 | # error, crit, alert, emerg. 41 | # It is also possible to configure the loglevel for particular 42 | # modules, e.g. 43 | #LogLevel info ssl:warn 44 | 45 | # ErrorLog ${APACHE_LOG_DIR}/error.log 46 | # CustomLog ${APACHE_LOG_DIR}/access.log combined 47 | 48 | # For most configuration files from conf-available/, which are 49 | # enabled or disabled at a global level, it is possible to 50 | # include a line for only one particular virtual host. For example the 51 | # following line enables the CGI configuration for this host only 52 | # after it has been globally disabled with "a2disconf". 53 | #Include conf-available/serve-cgi-bin.conf 54 | 55 | 56 | Options Indexes FollowSymLinks 57 | AllowOverride None 58 | Require all granted 59 | 60 | 61 | ServerName svarog.space 62 | SSLCertificateFile /etc/letsencrypt/live/svarog.klub.com.pl/fullchain.pem 63 | SSLCertificateKeyFile /etc/letsencrypt/live/svarog.klub.com.pl/privkey.pem 64 | Include /etc/letsencrypt/options-ssl-apache.conf 65 | 66 | 67 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # pip-compile requirements.in 6 | # 7 | astropy==7.0.1 8 | # via -r requirements.in 9 | astropy-iers-data==0.2025.2.10.0.33.26 10 | # via astropy 11 | blinker==1.9.0 12 | # via flask 13 | certifi==2025.1.31 14 | # via requests 15 | charset-normalizer==3.4.1 16 | # via requests 17 | click==8.1.8 18 | # via flask 19 | contourpy==1.3.1 20 | # via matplotlib 21 | cycler==0.12.1 22 | # via matplotlib 23 | defusedxml==0.7.1 24 | # via pyorbital 25 | flask==3.1.1 26 | # via 27 | # -r requirements.in 28 | # flask-login 29 | # flask-wtf 30 | flask-login==0.6.3 31 | # via -r requirements.in 32 | flask-wtf==1.2.2 33 | # via -r requirements.in 34 | fonttools==4.56.0 35 | # via matplotlib 36 | idna==3.10 37 | # via requests 38 | itsdangerous==2.2.0 39 | # via 40 | # flask 41 | # flask-wtf 42 | jinja2==3.1.6 43 | # via flask 44 | kiwisolver==1.4.8 45 | # via matplotlib 46 | markupsafe==3.0.2 47 | # via 48 | # flask 49 | # jinja2 50 | # werkzeug 51 | # wtforms 52 | marshmallow==3.26.1 53 | # via webargs 54 | matplotlib==3.10.0 55 | # via -r requirements.in 56 | numpy==2.2.3 57 | # via 58 | # astropy 59 | # contourpy 60 | # matplotlib 61 | # orbit-predictor 62 | # pyerfa 63 | # pyorbital 64 | # scipy 65 | orbit-predictor==1.15.0 66 | # via -r requirements.in 67 | packaging==24.2 68 | # via 69 | # astropy 70 | # marshmallow 71 | # matplotlib 72 | # webargs 73 | pillow==11.1.0 74 | # via matplotlib 75 | psycopg2-binary==2.9.10 76 | # via -r requirements.in 77 | pyerfa==2.0.1.5 78 | # via astropy 79 | pyorbital==1.9.2 80 | # via -r requirements.in 81 | pyparsing==3.2.1 82 | # via matplotlib 83 | python-crontab==3.2.0 84 | # via -r requirements.in 85 | python-dateutil==2.9.0.post0 86 | # via 87 | # -r requirements.in 88 | # matplotlib 89 | # python-crontab 90 | pyyaml==6.0.2 91 | # via astropy 92 | requests==2.32.4 93 | # via 94 | # orbit-predictor 95 | # pyorbital 96 | scipy==1.15.1 97 | # via pyorbital 98 | sgp4==2.23 99 | # via orbit-predictor 100 | six==1.17.0 101 | # via python-dateutil 102 | typing-extensions==4.12.2 103 | # via -r requirements.in 104 | urllib3==2.5.0 105 | # via requests 106 | webargs==8.6.0 107 | # via -r requirements.in 108 | werkzeug==3.1.3 109 | # via 110 | # flask 111 | # flask-login 112 | wtforms==3.2.1 113 | # via flask-wtf 114 | -------------------------------------------------------------------------------- /server/db/svarog-11.psql: -------------------------------------------------------------------------------- 1 | -- This schema version changes the config columns from varchar to jsonb. 2 | 3 | ALTER TABLE observations ALTER COLUMN config TYPE jsonb USING json_build_object('text',config); 4 | ALTER TABLE stations ALTER COLUMN config TYPE jsonb USING json_build_object('text',config); 5 | 6 | -- Now we need to migrate existing data. This is really specific to station 1. 7 | -- Let's add protocol transmission. So far we were able to receive APT and LRPT. 8 | UPDATE observations SET config = config || '{"protocol": "APT" }' WHERE sat_id IN (25338, 28654, 33591) AND station_id = 1; 9 | UPDATE observations SET config = config || '{"protocol": "LRPT" }' WHERE sat_id IN (40069, 44387) AND station_id = 1; 10 | 11 | -- All transmissions received so far were done using crossed dipole 12 | UPDATE observations SET config = config || '{"antenna": "WiMo TA-1", "antenna-type": "crossed dipole" }' WHERE station_id = 1; 13 | 14 | -- Add frequencies. We haven't done any Doppler magic, so everything was received on base frequencies. 15 | UPDATE observations SET config = config || '{"frequency": 137.62e6 }' WHERE sat_id = 25338 AND station_id = 1; -- NOAA 15 16 | UPDATE observations SET config = config || '{"frequency": 137.912e6 }' WHERE sat_id = 28654 AND station_id = 1; -- NOAA 18 17 | UPDATE observations SET config = config || '{"frequency": 137.1e6 }' WHERE sat_id = 33591 AND station_id = 1; -- NOAA 19 18 | UPDATE observations SET config = config || '{"frequency": 137.1e6 }' WHERE sat_id = 40069 AND station_id = 1; -- METEOR M-2 19 | 20 | -- Now set the SDR being used. I don't remember the exact date when AirSpy was installed. However, shortly afterwards 21 | -- the PR for adding generic RX recipe (PR#50) was merged. The merge was on 2021-06-09, so let's assume 22 | -- that's the date of AirSpy upgrade. 23 | 24 | UPDATE observations set config = config || '{"receiver": "RTL-SDRv3"}' WHERE station_id = 1 AND aos<'2021-06-09'::date; 25 | UPDATE observations set config = config || '{"receiver": "AirSpy Mini"}' WHERE station_id = 1 AND aos>='2021-06-09'::date; 26 | 27 | -- The last METEOR observation recorded is from 2021-03-26. That's because the newly installed LNA + filter 28 | -- (Nooelec NOAA) has a filter that is wide enough for NOAA, but too narrow for Meteor. 29 | UPDATE observations SET config = config || '{"lna":"Nooelec NOAA", "filter":"Nooelec NOAA"}' 30 | WHERE station_id =1 AND aos>='2021-03-26'::date; 31 | UPDATE observations SET config = config || '{"lna": "none", "filter": "none"}' 32 | WHERE station_id =1 AND aos<'2021-03-26'::date; 33 | 34 | -- Not related to the schema update. When registering new stations, let's set the registered to now, unless 35 | -- explicitly specified another date. 36 | ALTER TABLE stations ALTER COLUMN registered SET DEFAULT now(); 37 | 38 | UPDATE schema SET version = 11; 39 | -------------------------------------------------------------------------------- /server/app/controllers/obslist.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from app import app 3 | from app.repository import Repository 4 | from app.pagination import use_pagination 5 | 6 | from webargs import fields 7 | from webargs.flaskparser import use_kwargs 8 | 9 | 10 | @app.route('/obslist') 11 | @use_pagination() 12 | # Use_kwargs introduces ugly behavior when user provide parameter without value, 13 | # for example "?sat_id=". It should be ignored or handle as None value, but 14 | # I cannot set them. Now user get HTTP 422 Unprocessable Entity. I think that 15 | # it isn't important, because user doesn't provide it manually. 16 | @use_kwargs({ 17 | 'aos_before': fields.Date(), 18 | 'los_after': fields.Date(), 19 | 'sat_id': fields.Int(), 20 | 'station_id': fields.Int(), 21 | 'has_tle': fields.Bool() 22 | }, validate=lambda kwargs: "aos_before" not in kwargs or 23 | "los_after" not in kwargs or 24 | kwargs["aos_before"] >= kwargs["los_after"], 25 | location="querystring") 26 | def obslist(limit_and_offset, **filters): 27 | '''This function retrieves observations list from a local database and 28 | displays it.''' 29 | aos_before_org = filters.get("aos_before") 30 | if aos_before_org is not None: 31 | # Repository uses datetime.datetime structure to bound dates and it is 32 | # exact date. 33 | # User provides datetime.date (which not include hour) and it means that 34 | # list should contain observations from @los_after day 00:00:00 hour to 35 | # @aos_before day 23:59:59.999 hour. For handle it we add 1 day to 36 | # @aos_before day before send to repository. 37 | filters["aos_before"] = aos_before_org + timedelta(days=1) 38 | 39 | repository = Repository() 40 | obslist = repository.read_observations(filters, **limit_and_offset) 41 | satellites_list = repository.read_satellites() 42 | observation_count = repository.count_observations(filters) 43 | stations_list = repository.read_stations() 44 | 45 | satellites_dict = {sat["sat_id"]: sat["sat_name"] for sat in satellites_list} 46 | stations_dict = {s["station_id"]: s["name"] for s in stations_list} 47 | for obs in obslist: 48 | obs["sat_name"] = satellites_dict[obs["sat_id"]] 49 | obs["station_name"] = stations_dict[obs["station_id"]] 50 | 51 | if aos_before_org is not None: 52 | # We send back to user the same date as user provide. 53 | filters["aos_before"] = aos_before_org 54 | 55 | # When database will contain many satellites and stations then we need 56 | # refactor this code to lazy, async read satellites and stations. 57 | return 'obslist.html', dict(obslist=obslist, item_count=observation_count, 58 | satellites=satellites_list, stations=stations_list, filters=filters) 59 | -------------------------------------------------------------------------------- /server/app/static/svarog.css: -------------------------------------------------------------------------------- 1 | .tle-title { 2 | font-weight: bold; 3 | } 4 | 5 | .tle-wrapper { 6 | white-space: normal; 7 | } 8 | 9 | .tle-wrapper code { 10 | white-space: nowrap; 11 | } 12 | 13 | .tle1 { white-space: pre; color: green } 14 | .tle2 { white-space: pre; color: black } 15 | .tle3 { white-space: pre; color: red } 16 | .tle4 { white-space: pre; color: blue } 17 | .tle5 { white-space: pre; color: darkorange } 18 | .tle6 { white-space: pre; color: gray } 19 | 20 | .product-wrapper { 21 | position: relative; 22 | } 23 | 24 | .product-wrapper img { 25 | max-width: 100%; 26 | width: auto; 27 | height: auto; 28 | } 29 | 30 | .product-wrapper .product-rating { 31 | position: absolute; 32 | left: 5px; 33 | top: 5px; 34 | } 35 | 36 | .product-rating--border { 37 | padding: 4px; 38 | z-index: 1; 39 | border: 2px solid; 40 | border-radius: 5px; 41 | background-color: white; 42 | } 43 | 44 | dl { 45 | padding: 0.5em; 46 | } 47 | 48 | dt { 49 | float: left; 50 | clear: left; 51 | width: 130px; 52 | margin: 0 10px 0 0; 53 | border: 1px solid green; 54 | text-align: right; 55 | font-weight: bold; 56 | } 57 | 58 | dd { 59 | margin: 0 0 0 50px; 60 | padding: 0 0 0.5em 1em; 61 | } 62 | 63 | .param { 64 | display: inline-block; 65 | min-width: 10px; 66 | padding: 3px 7px; 67 | font-size: 12px; 68 | font-weight: 700; 69 | line-height: 1; 70 | color: #fff; 71 | text-align: center; 72 | white-space: nowrap; 73 | vertical-align: middle; 74 | background-color: lightgreen; 75 | color: black; 76 | border-radius: 5px; 77 | } 78 | 79 | .danger { 80 | display: inline-block; 81 | min-width: 10px; 82 | padding: 3px 7px; 83 | font-size: 12px; 84 | font-weight: 700; 85 | line-height: 1; 86 | color: #fff; 87 | text-align: center; 88 | white-space: nowrap; 89 | vertical-align: middle; 90 | background-color: crimson; 91 | color: black; 92 | border-radius: 5px; 93 | } 94 | 95 | .admin legend { 96 | width: 7em; 97 | font-size: 12px; 98 | padding: 0px 5px 0px 5px; 99 | border: 0px; 100 | margin-bottom: 0px; 101 | } 102 | 103 | .admin fieldset { 104 | border: 1px solid red; 105 | color: black; 106 | text-align: left; 107 | width: 800px; 108 | height: 80px; 109 | padding: 10px; 110 | } 111 | 112 | .section-title { 113 | color: black; 114 | font-weight: bold; 115 | } 116 | 117 | #gallery img { 118 | width: 300px; 119 | max-height: 200px; 120 | object-fit: contain; 121 | } 122 | 123 | #gallery img+img { 124 | margin: 1rem; 125 | } 126 | 127 | .viewer-footer { 128 | background-color: rgba(0,0,0,.5); 129 | } 130 | -------------------------------------------------------------------------------- /server/templates/station.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block addToHead %} 4 | 5 | 6 | 7 | 12 | {% endblock %} 13 | 14 | {% block content %} 15 |

Ground Station {{ station.name }}

16 | 17 |

18 |

    19 |
  • Station location: {{ station.coords }} (see on 20 | OpenStreetMap, 21 | Google Maps) 22 |
  • 23 |
  • Station registration time: {{ station.registered }}
  • 24 |
  • First recorded transmission: {{ station.firstobs }}
  • 25 |
  • Most recent transmission: {{ station.lastobs }}
  • 26 |
  • Number of transmissions: {{ station.cnt }} 27 | 28 | [List] 29 | 30 |
  • 31 |
  • Station admins: 32 | {% for o in owners %} 33 | {{o.username}} 34 | {% endfor %} 35 |
  • 36 |
  • ID: {{ station.station_id }}
  • 37 |
38 |

39 |

40 | About the station: {{ station.descr|safe }} 41 |

42 |

43 | Current configuration: 44 |

    45 | {% for c in station.config %} 46 | {% if station.config[c] is defined %} 47 |
  • {{ c }}: {{ station.config[c] }}
  • 48 | {% endif %} 49 | {% endfor %} 50 |
51 |

52 | 53 | {% if files is defined %} 54 | 59 | {% endif %} 60 | 61 | {% if best_obs is defined %} 62 |
Highest rated observations:
63 |
64 | {% for o in best_obs %} 65 | 66 | {% endfor %} 67 | {% endif %} 68 |
69 | 70 | 71 | {% if latest_obs is defined %} 72 |
Latest observations:
73 | {% for o in latest_obs %} 74 | 75 | {% endfor %} 76 | {% endif %} 77 | 78 | {% endblock %} -------------------------------------------------------------------------------- /server/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from migrate_db import migrate 3 | import datetime 4 | from os import getcwd, path, makedirs, environ 5 | import shutil 6 | import sys 7 | 8 | from setuptools import setup, find_packages 9 | 10 | # STEP 1: install python packages 11 | REQUIREMENTS = [i.strip() for i in open("../requirements.txt").readlines()] 12 | 13 | setup(name='svarog-server', 14 | version='1.0', 15 | description='svarog server', 16 | author='SF, TM', 17 | packages=find_packages(), 18 | install_requires=REQUIREMENTS 19 | ) 20 | 21 | # STEP 2: ensure the config is exists 22 | config_path = "svarog.ini" 23 | if not path.exists(config_path): 24 | shutil.copyfile("svarog.ini.template", config_path) 25 | 26 | # The chance that the template would actually work are pathetic. Need to tell the user to 27 | # tweak it to his/her specific needs. 28 | print("NOTE: Template config file copied over to %s" % config_path) 29 | print("NOTE: Please edit it to match your configuration and rerun the setup script again.") 30 | sys.exit(-1) 31 | 32 | # STEP 3: ensure the database is updated. 33 | 34 | 35 | def backup_database(): 36 | import subprocess 37 | try: 38 | from configparser import ConfigParser 39 | except ImportError: 40 | from ConfigParser import ConfigParser 41 | backup_dir = "backups" 42 | makedirs(backup_dir, exist_ok=True) 43 | config = ConfigParser() 44 | config.read(config_path) 45 | db = config["database"] 46 | now = datetime.datetime.utcnow() 47 | timestamp_filename = "svarog-%s.backup" % (now.isoformat()) 48 | backup_path = path.join(backup_dir, timestamp_filename) 49 | env = environ.copy() 50 | env["PGPASSWORD"] = db["password"] 51 | subprocess.check_call( 52 | ["pg_dump", 53 | "--host", db["host"], 54 | "--port", db.get("port", "5432"), 55 | "--username", db["user"], 56 | "--dbname", db["database"], 57 | "--format", "c", # Custom 58 | "--compress", "8", 59 | "--no-password", 60 | "--file", backup_path 61 | ], env=env 62 | ) 63 | if not path.exists(backup_path): 64 | raise FileNotFoundError(backup_path) 65 | print("Backup created here: %s" % (backup_path,)) 66 | 67 | 68 | backup_database() 69 | migrate() 70 | 71 | # STEP 4: make sure the update script will be called every day 72 | COMMENT_UPDATE_TAG = 'svarog-update' 73 | 74 | 75 | def install_update_cronjob(): 76 | print("Installing cronjob") 77 | from crontab import CronTab 78 | cron = CronTab(user=True) 79 | 80 | # remove old cronjobs (if any) 81 | cron.remove_all(comment=COMMENT_UPDATE_TAG) 82 | 83 | # This job will pull the new code at noon 84 | job = cron.new(command="cd " + getcwd() + " && ./update.sh", comment=COMMENT_UPDATE_TAG) 85 | job.setall('0 12 * * *') 86 | 87 | cron.write() 88 | 89 | 90 | install_update_cronjob() 91 | 92 | print("Installation complete.") 93 | -------------------------------------------------------------------------------- /server/migrate_db.py: -------------------------------------------------------------------------------- 1 | from sys import exit 2 | import os 3 | from typing import Tuple, List 4 | import logging 5 | 6 | try: 7 | from app.repository import Repository 8 | except KeyError: 9 | exit("Unable to load svarog.ini - make sure the file is present and has all entries.") 10 | 11 | 12 | def list_migrations(directory: str, extension=".psql", prefix="svarog-") -> List[Tuple[int, str]]: 13 | ''' 14 | List all files in @directory meet the convention: 15 | [@prefix][XX][@extension] 16 | where XX is number. 17 | 18 | Function return list of pairs: XX number and path. List is sorted by XX number. 19 | ''' 20 | filenames = os.listdir(directory) 21 | 22 | migrations = [] 23 | 24 | for filename in filenames: 25 | if not filename.endswith(extension): 26 | continue 27 | if not filename.startswith(prefix): 28 | continue 29 | path = os.path.join(directory, filename) 30 | if not os.path.isfile(path): 31 | continue 32 | 33 | version_raw, _ = os.path.splitext(filename) 34 | version_raw = version_raw.lstrip(prefix) 35 | version = int(version_raw) 36 | migrations.append((version, path)) 37 | 38 | migrations.sort(key=lambda p: p[0]) 39 | return migrations 40 | 41 | 42 | def migrate(config=None, migration_directory="db"): 43 | ''' 44 | Perform migrations. 45 | 46 | Parameters 47 | ========== 48 | config 49 | Dictionary with psycopg2 "connect" method arguments. 50 | If None then read INI file 51 | migration_directory: str 52 | Directory with .psql files. Files must to keep naming convention: 53 | svarog-XX.psql 54 | where XX is number of database revision. 55 | 56 | Returns 57 | ======= 58 | Function print migration status on console. Changes are save in database. 59 | 60 | Notes 61 | ===== 62 | If any migration fail then all changes are revert. 63 | ''' 64 | repository = Repository(config) 65 | 66 | db_version = repository.get_database_version() 67 | 68 | migrations = list_migrations(migration_directory) 69 | 70 | with repository.transaction() as transaction: 71 | for migration_version, migration_path in migrations: 72 | if migration_version <= db_version: 73 | logging.info("Skip migration to %d version" % (migration_version,)) 74 | continue 75 | 76 | logging.info("Process migration to %d version..." % (migration_version,), end="") 77 | with open(migration_path) as migration_file: 78 | content = migration_file.read() 79 | 80 | repository.execute_raw_query(content) 81 | logging.info("OK") 82 | 83 | transaction.commit() 84 | 85 | new_db_version = repository.get_database_version() 86 | logging.info("Migration complete from %d to %d!" % (db_version, new_db_version)) 87 | 88 | 89 | if __name__ == '__main__': 90 | logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', datefmt='%H:%M:%S') 91 | migrate() 92 | -------------------------------------------------------------------------------- /server/templates/base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | {% block head %} 5 | SVAROG: {% block title %}{% endblock %} 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 20 | {% endblock %} 21 | {% block addToHead %} 22 | {% endblock %} 23 | 24 | 25 | 26 | 55 | 56 |
{% block content %}{% endblock %}
57 | 66 | {% block scripts %}{% endblock %} 67 | 68 | 69 | -------------------------------------------------------------------------------- /server/tests/db-data.psql: -------------------------------------------------------------------------------- 1 | DELETE FROM public.observations; 2 | DELETE FROM public.stations; 3 | DELETE FROM public.station_photos; 4 | DELETE FROM satellites; 5 | 6 | INSERT INTO satellites VALUES 7 | (25338, 'NOAA 15', 'https://www.n2yo.com/satellite/?s=25338'), 8 | (28654, 'NOAA 18', 'https://www.n2yo.com/satellite/?s=28654'), 9 | (33591, 'NOAA 19', 'https://www.n2yo.com/satellite/?s=33591'); 10 | 11 | INSERT INTO public.stations (station_id, name, lon, lat, descr, config, registered, firstobs, lastobs, secret) VALUES 12 | (1, 'TKiS-1', 18.5317870000000013, 54.3524689999999993, 'The TKiS-1 station was created by Slawek Figiel, Tomek Mrugalski and Ewelina Omernik as part of the group project during \ntheir MSc studies at Gdansk University of Technology.', '{ "antenna": "WiMo TA-1", "antenna-type":"crossed dipole", "sdr":"RTL-SDR v3" }', '2019-12-15 08:54:53', '2019-12-15 08:54:53', '2020-02-16 18:26:01.722841', '\x0123456789abcdef'), 13 | (2, 'ETI-1', 18.6132530000000003, 54.3708900000000028, 'Planned ground station at ETI faculty of Gdansk University of Technology', '{"text":"Configuration is TBD"}', '2020-02-16 21:15:20.615274)', NULL, NULL, NULL); 14 | 15 | INSERT INTO public.observations (obs_id, aos, tca, los, sat_id, thumbnail, notes, config, tle, station_id) VALUES 16 | (750, '2020-03-08 15:35:02.042786', '2020-03-08 15:40:01.234567', '2020-03-08 15:51:33.972692', 33591, 'thumb-eb38486b-cd40-4879-81e9-31131766e84b-NOAA 19_2020-03-08T15:51:33.972692_apt.png', NULL, NULL, NULL, 1), 17 | (751, '2020-03-08 16:17:02.639337', '2020-03-08 16:17:25.567890', '2020-03-08 16:32:56.1666', 25338, 'thumb-72e94349-19ad-428c-b812-526971705607-NOAA 15_2020-03-08T16:32:56.166600_apt.png', NULL, NULL, '{ "1 25544U 98067A 08264.51782528 -.00002182 00000-0 -11606-4 0 2927", "2 25544 51.6416 247.4627 0006703 130.5360 325.0288 15.72125391563537"}', 1), 18 | (752, '2020-03-08 17:24:02.088677', '2020-03-08 17:34:56.789012', '2020-03-08 17:39:06.960326', 28654, 'thumb-f6b927bf-1472-4ea6-8657-48265cfae5ca-NOAA 18_2020-03-08T17:39:06.960326_apt.png', 'Note', NULL, NULL, 1); 19 | 20 | -- Add observation 1276 (this one is special for two reasons: excellent reception and visible several countries, 21 | -- which makes it particularly good for experiments with rectification and georeferencing) 22 | INSERT INTO public.observations (obs_id, aos, tca, los, sat_id, notes, config, tle, station_id, thumbnail) VALUES 23 | (1276, '2020-04-12 09:01:03.063476', '2020-04-12 09:01:03.063476', '2020-04-12 09:17:06.466954', 28654, '', NULL, '{"1 28654U 05018A 20098.54037539 .00000075 00000-0 65128-4 0 9992","2 28654 99.0522 154.2797 0015184 73.2195 287.0641 14.12501077766909"}', 1, 'thumb-7ee1c350-09e8-49f0-9826-ca504c5543bf-ab802ca1-419f-418a-aeea-d99bb9c702aa-0-1c60fdf5-3f18-409d-9834-17f014c608c1_product.png'); 24 | INSERT INTO public.observation_files (obs_file_id, filename, media_type, obs_id, rating) 25 | VALUES (1069, 'ab802ca1-419f-418a-aeea-d99bb9c702aa-0-1c60fdf5-3f18-409d-9834-17f014c608c1_product.png', 'image/png', 1276, NULL), 26 | (1070, 'abcdefab-cdef-abcd-efab-abcdefabcdef-0-12345678-9012-3456-7890-123456789012_product.png', 'image/png', 750, 0.5); 27 | 28 | INSERT INTO public.station_photos (photo_id, station_id, sort, filename, descr) VALUES 29 | (1, 1, 1, 'wimo-ta1-antenna.jpg', 'WiMo antenna'), 30 | (2, 1, 2, 'malina4.jpg', 'Pi + SDR module'); 31 | 32 | INSERT INTO public.users (id, username, digest, email, role) VALUES 33 | (1, 'asimov', '023456789abcdef', 'issac@terminus.org', 'regular'), 34 | (2, 'baxter', 'aaaaaaaaaaaaaaa', 'baxter@gmail.com', 'owner'), 35 | (3, 'clarke', 'pbkdf2:sha256:150000$Ij6XJyek$d6a0cd085e6955843a9c3224ccf24088852207d55bb056aa0b544168f94860b8', 'acc@gmail.com', 'admin'), -- password = sha256('password') 36 | (4, 'lem', 'pbkdf2:sha256:150000$Ij6XJyek$d6a0cd085e6955843a9c3224ccf24088852207d55bb056aa0b544168f94860b8', 'fake@wp.pl', 'banned'), -- password = sha256('password') 37 | (5, 'admin', 'pbkdf2:sha256:150000$WRMwkYaf$b9d2695437ac61d0c94219cdb284a3dab37e8155fe8057e3752b12a9bec6cf88', 'admin@example.com', 'admin'); -- password = admin 38 | 39 | -- Now pretend some stations have owners 40 | insert into station_owners values (1,1), (2,1), (3,1), (5,1), (4,2); 41 | -------------------------------------------------------------------------------- /server/app/utils.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | from datetime import datetime 3 | from io import BytesIO 4 | from os import path 5 | import shutil 6 | from typing import Callable, Iterable, Optional, TypeVar 7 | from string import Formatter 8 | 9 | 10 | def coords(lon, lat): 11 | t = "%2.4f" % lat 12 | if (lat > 0): 13 | t += "N" 14 | else: 15 | t += "S" 16 | 17 | t += " %2.4f" % lon 18 | if (lon > 0): 19 | t += "E" 20 | else: 21 | t += "W" 22 | return t 23 | 24 | 25 | def make_thumbnail(input_path, output_path, width=200): 26 | subprocess.check_call(["convert", "-thumbnail", str(width), input_path, output_path]) 27 | 28 | 29 | T = TypeVar("T") 30 | 31 | 32 | def first(condition: Callable[[T], bool], items: Iterable[T]) -> Optional[T]: 33 | '''Return first element for which @condition is True. Otherwise return None''' 34 | for item in items: 35 | if condition(item): 36 | return item 37 | return None 38 | 39 | 40 | def get_footer(): 41 | """Returns data regarding the last update: timestamp of the upgrade process and SHA of the last git commit. 42 | Both pieces of information are coming from the timestamp.txt file (which is generated by update.sh script)""" 43 | COMMIT_FILE = "commit.txt" 44 | try: 45 | root_dir = path.dirname(path.realpath(__file__)) 46 | root_dir = path.dirname(root_dir) 47 | commit_path = path.join(root_dir, COMMIT_FILE) 48 | 49 | with open(commit_path, 'r') as f: 50 | return { 51 | 'commit': f.read().strip(), 52 | 'timestamp': datetime.fromtimestamp(path.getmtime(commit_path)).isoformat(" ", "minutes") 53 | } 54 | except OSError: 55 | # The file was not found or is generally inaccessible. Return nothing. 56 | return None 57 | 58 | 59 | def save_binary_stream_to_file(path: str, stream: BytesIO): 60 | '''Efficient way to save binary stream to file. 61 | See: https://stackoverflow.com/a/39050559''' 62 | stream.seek(0) 63 | with open(path, 'wb') as f: 64 | shutil.copyfileobj(stream, f, length=131072) 65 | 66 | 67 | def strfdelta(tdelta, fmt='{D:02}d {H:02}h {M:02}m {S:02}s', inputtype='timedelta'): 68 | """Convert a datetime.timedelta object or a regular number to a custom- 69 | formatted string, just like the stftime() method does for datetime.datetime 70 | objects. 71 | 72 | The fmt argument allows custom formatting to be specified. Fields can 73 | include seconds, minutes, hours, days, and weeks. Each field is optional. 74 | 75 | Some examples: 76 | '{D:02}d {H:02}h {M:02}m {S:02}s' --> '05d 08h 04m 02s' (default) 77 | '{W}w {D}d {H}:{M:02}:{S:02}' --> '4w 5d 8:04:02' 78 | '{D:2}d {H:2}:{M:02}:{S:02}' --> ' 5d 8:04:02' 79 | '{H}h {S}s' --> '72h 800s' 80 | 81 | The inputtype argument allows tdelta to be a regular number instead of the 82 | default, which is a datetime.timedelta object. Valid inputtype strings: 83 | 's', 'seconds', 84 | 'm', 'minutes', 85 | 'h', 'hours', 86 | 'd', 'days', 87 | 'w', 'weeks' 88 | 89 | by MarredCheese, source: https://stackoverflow.com/questions/538666/ 90 | """ 91 | 92 | # Convert tdelta to integer seconds. 93 | if inputtype == 'timedelta': 94 | remainder = int(tdelta.total_seconds()) 95 | elif inputtype in ['s', 'seconds']: 96 | remainder = int(tdelta) 97 | elif inputtype in ['m', 'minutes']: 98 | remainder = int(tdelta) * 60 99 | elif inputtype in ['h', 'hours']: 100 | remainder = int(tdelta) * 3600 101 | elif inputtype in ['d', 'days']: 102 | remainder = int(tdelta) * 86400 103 | elif inputtype in ['w', 'weeks']: 104 | remainder = int(tdelta) * 604800 105 | 106 | f = Formatter() 107 | desired_fields = [field_tuple[1] for field_tuple in f.parse(fmt)] 108 | possible_fields = ('W', 'D', 'H', 'M', 'S') 109 | constants = {'W': 604800, 'D': 86400, 'H': 3600, 'M': 60, 'S': 1} 110 | values = {} 111 | for field in possible_fields: 112 | if field in desired_fields and field in constants: 113 | values[field], remainder = divmod(remainder, constants[field]) 114 | return f.format(fmt, **values) 115 | -------------------------------------------------------------------------------- /server/app/controllers/login.py: -------------------------------------------------------------------------------- 1 | from flask import render_template, request, flash, redirect, url_for 2 | from app import app 3 | from flask_wtf import FlaskForm 4 | from flask_login import current_user, login_user, logout_user, UserMixin, LoginManager 5 | 6 | from wtforms import StringField, PasswordField, BooleanField, SubmitField 7 | from wtforms.validators import InputRequired, Length 8 | from urllib.parse import urlparse 9 | from werkzeug.security import check_password_hash 10 | 11 | from app.repository import Repository, UserRole 12 | 13 | 14 | class LoginForm(FlaskForm): 15 | username = StringField("Your login name", validators=[InputRequired(), Length(min=3, max=20)]) 16 | password = PasswordField("Your password", validators=[InputRequired(), Length(min=5, max=80)]) 17 | remember = BooleanField("Remember me") 18 | 19 | submit = SubmitField("Sign In") 20 | 21 | 22 | class ApplicationUser(UserMixin): 23 | 24 | # The following fields are useful for Flask-login 25 | # It's basically a reimplementation of UserMixin 26 | 27 | def __init__(self, user): 28 | # Copy id, username, digest, email, role fields 29 | for key, value in user.items(): 30 | setattr(self, key, value) 31 | self.auth = False 32 | 33 | def check_digest(self, digest: str): 34 | """This compares hashes. It's almost useless (except perhaps for testing) as the hashes 35 | are salted. So they're almost always different. This sets the auth field (true if provided 36 | password is correct).""" 37 | self.auth = self.digest == digest 38 | return self.auth 39 | 40 | def check_password(self, passwd: str): 41 | return check_password_hash(self.digest, passwd) 42 | 43 | def is_authenticated(self): 44 | return self.auth 45 | 46 | def is_anonymous(self): 47 | return not self.auth 48 | 49 | def get_id(self): 50 | return self.id 51 | 52 | 53 | @app.route("/login", methods=["GET", "POST"]) 54 | def login(): 55 | 56 | repository = Repository() 57 | 58 | if current_user.is_authenticated: 59 | stations = repository.owned_stations(current_user.get_id()) 60 | 61 | # list of stations 62 | owned_stations = " ".join(f"{s['name']}({s['station_id']})" for s in stations) 63 | app.logger.info("Authenticated user %s, owner of %s" % (current_user.username, owned_stations)) 64 | 65 | return render_template("login.html", user=current_user, stations=stations) 66 | 67 | form = LoginForm() 68 | 69 | if form.validate_on_submit(): 70 | app.logger.info("Login requested for user %s, pass=%s, remember_me=%s" % (form.username.data, form.password.data, form.remember.data)) 71 | 72 | user = repository.read_user(user=form.username.data) 73 | 74 | if user is None: 75 | app.logger.info("Login failed: invalid username: %s" % form.username.data) 76 | flash("Invalid username.") 77 | return redirect(url_for("login")) 78 | 79 | u = ApplicationUser(user) 80 | if not u.check_password(form.password.data): 81 | app.logger.info("Login failed: invalid password %s for user %s" % (form.password.data, form.username.data)) 82 | flash("Invalid password.") 83 | return redirect(url_for("login")) 84 | 85 | if u.role == UserRole.BANNED: 86 | app.logger.info("Login failed: attempt to login into disabled account %s" % form.username.data) 87 | flash("Account disabled.") 88 | return redirect(url_for("login")) 89 | 90 | app.logger.info("Login successful for user %s" % form.username.data) 91 | login_user(u, remember=form.remember.data) 92 | 93 | next_page = request.args.get("next") 94 | if not next_page or urlparse(next_page).netloc != "": 95 | next_page = url_for("index") 96 | return redirect(next_page) 97 | 98 | return render_template("login.html", form=form) 99 | 100 | 101 | lm = LoginManager(app) 102 | 103 | 104 | @lm.user_loader 105 | def load_user(user_id): 106 | rep = Repository() 107 | u = rep.read_user(user=user_id) 108 | if u: 109 | return ApplicationUser(u) 110 | return None 111 | 112 | 113 | @app.route("/logout") 114 | def logout(): 115 | logout_user() 116 | return redirect(url_for("index")) 117 | -------------------------------------------------------------------------------- /server/tests/dbtest.py: -------------------------------------------------------------------------------- 1 | import os 2 | from configparser import ConfigParser 3 | import uuid 4 | from functools import wraps 5 | from contextlib import contextmanager 6 | 7 | import psycopg2 8 | from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT 9 | 10 | from migrate_db import migrate 11 | from app.repository import Repository 12 | 13 | 14 | # The relative path to the root directory. 15 | _root_dir = os.path.dirname(os.path.realpath(__file__)) 16 | _root_dir = os.path.dirname(_root_dir) 17 | 18 | 19 | def _read_configurations(): 20 | '''Read the configuration file in the root directory.''' 21 | ini_path = os.path.join(_root_dir, 'svarog.ini') 22 | 23 | config = ConfigParser() 24 | config.optionxform = str 25 | config.read(ini_path) 26 | config = config['database'] 27 | 28 | config = dict(**config) 29 | maintenance_config = config.copy() 30 | 31 | # The possibility to provide the separate maintenance credentials 32 | # if the standard user has limited privileges. 33 | # They are used only to create, migrate and drop database. 34 | # The test are running using the standard credentials. 35 | maintenance_mapping = { 36 | 'maintenance_user': 'user', 37 | 'maintenance_password': 'password', 38 | 'maintenance_database': 'database' 39 | } 40 | 41 | for from_, to in maintenance_mapping.items(): 42 | if from_ in maintenance_config: 43 | maintenance_config[to] = maintenance_config[from_] 44 | del maintenance_config[from_] 45 | del config[from_] 46 | 47 | # The main database from the configuration is not touched. 48 | # The database name is used as a prefix for the test databases. 49 | pattern_database_name = config.get('database', 'postgres') 50 | test_database_name = f'{pattern_database_name}-{uuid.uuid4()}' 51 | test_database_name = test_database_name.replace('-', '_') 52 | config['database'] = test_database_name 53 | 54 | return config, maintenance_config 55 | 56 | 57 | def _standard_seed_db(config): 58 | '''Migrate and seed the test database.''' 59 | migrate(config, os.path.join(_root_dir, 'db')) 60 | 61 | with psycopg2.connect(**config) as conn: 62 | with conn.cursor() as cursor: 63 | with open(os.path.join(_root_dir, "tests", "db-data.psql"), "rt") as f: 64 | cursor.execute(f.read()) 65 | conn.commit() 66 | 67 | 68 | @contextmanager 69 | def setup_database_test_case(): 70 | '''Create the test database, migrate it to the latest version, and 71 | destroy after test case.''' 72 | user_config, maintenance_config = _read_configurations() 73 | 74 | maintenance_in_user_config = maintenance_config.copy() 75 | maintenance_in_user_config['database'] = user_config['database'] 76 | 77 | user = user_config.get('user', 'postgres') 78 | database = user_config['database'] 79 | 80 | maintenance_connection = psycopg2.connect(**maintenance_config) 81 | maintenance_connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) 82 | maintenance_cursor = maintenance_connection.cursor() 83 | 84 | create_database_query = f'CREATE DATABASE {database} OWNER {user};' 85 | maintenance_cursor.execute(create_database_query) 86 | 87 | maintenance_cursor.close() 88 | maintenance_connection.close() 89 | 90 | _standard_seed_db(maintenance_in_user_config) 91 | 92 | try: 93 | yield user_config 94 | finally: 95 | maintenance_connection = psycopg2.connect(**maintenance_config) 96 | maintenance_connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) 97 | maintenance_cursor = maintenance_connection.cursor() 98 | 99 | drop_database_query = f'DROP DATABASE {database};' 100 | maintenance_cursor.execute(drop_database_query) 101 | 102 | maintenance_cursor.close() 103 | maintenance_connection.close() 104 | 105 | 106 | def use_repository(f): 107 | '''The test case decorator that passes the repository object 108 | as the first argument. The repository uses the test database. 109 | The database is destroyed after the test case.''' 110 | @wraps(f) 111 | def wrapper(self, *args, **kwargs): 112 | with setup_database_test_case() as config: 113 | repository = Repository(config) 114 | return f(self, repository, *args, **kwargs) 115 | return wrapper 116 | -------------------------------------------------------------------------------- /server/tests/test_pagination.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | import unittest 3 | from unittest import mock 4 | 5 | from app import create_app 6 | from app.pagination import Pagination, use_pagination 7 | 8 | 9 | class TestPagination(unittest.TestCase): 10 | 11 | def test_limit_and_offset(self): 12 | items_per_page = 5 13 | paginaton = Pagination(items_per_page) 14 | 15 | limit_and_offset = paginaton.limit_and_offset(1) 16 | self.assertLessEqual({ 17 | 'limit': 5, 18 | 'offset': 0 19 | }.items(), limit_and_offset.items()) 20 | 21 | limit_and_offset = paginaton.limit_and_offset(2) 22 | self.assertLessEqual({ 23 | 'limit': 5, 24 | 'offset': 5 25 | }.items(), limit_and_offset.items()) 26 | 27 | limit_and_offset = paginaton.limit_and_offset(3) 28 | self.assertLessEqual({ 29 | 'limit': 5, 30 | 'offset': 10 31 | }.items(), limit_and_offset.items()) 32 | 33 | limit_and_offset = paginaton.limit_and_offset(4) 34 | self.assertLessEqual({ 35 | 'limit': 5, 36 | 'offset': 15 37 | }.items(), limit_and_offset.items()) 38 | 39 | def test_items_current(self): 40 | items = 15 41 | items_per_page = 5 42 | pagination = Pagination(items_per_page) 43 | 44 | def current(p): return pagination.template_kwargs( 45 | items, url="http://example.com", page=p 46 | )["pagination"]["items_current"] 47 | 48 | self.assertEqual(current(1), 5) 49 | self.assertEqual(current(2), 5) 50 | self.assertEqual(current(3), 5) 51 | self.assertEqual(current(4), 0) 52 | 53 | @mock.patch('flask.templating._render', return_value='') 54 | def test_use_pagination_single(self, mock_render): 55 | 56 | received_limit_and_offset = None 57 | app = create_app() 58 | app.testing = True 59 | 60 | with app.test_client() as client: 61 | @app.route("/test-single-pagination") 62 | @use_pagination(10) 63 | def pagination_single(limit_and_offset): 64 | nonlocal received_limit_and_offset 65 | received_limit_and_offset = limit_and_offset 66 | return 'index.html', dict(item_count=100) 67 | response = client.get("/test-single-pagination", 68 | query_string=dict(page=3)) 69 | 70 | self.assertTrue(mock_render.called) 71 | self.assertEqual(response.status_code, 200) 72 | self.assertIsNotNone(received_limit_and_offset) 73 | self.assertDictEqual({'limit': 10, 'offset': 20}, 74 | received_limit_and_offset) # type: ignore 75 | 76 | _, _, context = mock_render.call_args[0] 77 | self.assertTrue("pagination" in context) 78 | self.assertEqual(context["pagination"]["items_count"], 100) 79 | 80 | @mock.patch('flask.templating._render', return_value='') 81 | def test_use_pagination_multiple(self, mock_render): 82 | 83 | received_limit_and_offset: Any = None 84 | app = create_app() 85 | app.testing = True 86 | 87 | with app.test_client() as client: 88 | @app.route("/test-multi-pagination") 89 | @use_pagination({ 90 | 'items_per_page': 50, 91 | 'count_name': 'item_a_count', 92 | 'page_param': 'page_a', 93 | 'template_arg_name': 'pagination_a' 94 | }, 95 | { 96 | 'items_per_page': 50, 97 | 'count_name': 'item_b_count', 98 | 'page_param': 'page_b', 99 | 'template_arg_name': 'pagination_b' 100 | }) 101 | def pagination_multiple(limit_and_offset): 102 | nonlocal received_limit_and_offset 103 | received_limit_and_offset = limit_and_offset 104 | return 'index.html', dict(item_a_count=100, item_b_count=200) 105 | response = client.get("/test-multi-pagination", 106 | query_string=dict(page_a=3)) 107 | 108 | self.assertTrue(mock_render.called) 109 | self.assertEqual(response.status_code, 200) 110 | 111 | self.assertIsNotNone(received_limit_and_offset) 112 | limit_and_offset_a, limit_and_offset_b = received_limit_and_offset 113 | self.assertDictEqual({'limit': 50, 'offset': 100}, 114 | limit_and_offset_a) # type: ignore 115 | self.assertDictEqual({'limit': 50, 'offset': 0}, 116 | limit_and_offset_b) # type: ignore 117 | 118 | _, _, context = mock_render.call_args[0] 119 | 120 | self.assertTrue("pagination_a" in context) 121 | self.assertTrue("pagination_b" in context) 122 | self.assertEqual(context["pagination_a"]["items_count"], 100) 123 | self.assertEqual(context["pagination_b"]["items_count"], 200) 124 | -------------------------------------------------------------------------------- /doc/install.md: -------------------------------------------------------------------------------- 1 | # [doc](../README.md) > Installation 2 | 3 | This system consists of two elements: station (which is intended to run on a Raspberry Pi with an SDR dongle, but can 4 | be run on any Linux box) and a server (which is intended to be run in a place with good uplink connectivity). If 5 | you are interested in running your own station, you most likely want to deploy just the station and use existing 6 | server. Please contact someone from the Svarog team and we'll hook you up. 7 | 8 | ## Server installation 9 | 10 | Server installation is a manual process. It is assumed that you already have running apache server. 11 | Here are the steps needed to get it up and running. 12 | 13 | 1. **Get the latest code** 14 | 15 | ``` 16 | git clone https://github.com/gut-space/svarog-server 17 | ``` 18 | 19 | 2. **Install PostgreSQL**: 20 | 21 | ``` 22 | apt install postgresql postgresql-client 23 | su - postgres 24 | psql 25 | CREATE DATABASE svarog; 26 | CREATE USER svarog WITH PASSWORD 'secret'; -- make sure to use an actual password here 27 | GRANT ALL PRIVILEGES ON DATABASE svarog TO svarog; 28 | ``` 29 | 30 | If upgrading from earlier Postgres, the following might be helpful: 31 | 32 | ```sql 33 | grant all ON ALL TABLES IN SCHEMA public to svarog; 34 | grant all ON ALL SEQUENCES IN SCHEMA public to svarog; 35 | grant all ON ALL ROUTINES IN SCHEMA public to svarog; 36 | ``` 37 | 38 | Make sure to either run `setup.py` or run DB schema migration manually: `python3 migrate_db.py`. 39 | 40 | 3. **Install Flask dependencies** 41 | 42 | ``` 43 | cd svarog-server/server 44 | python3 -m venv venv 45 | source venv/bin/activate 46 | python setup.py install 47 | ``` 48 | 49 | Sometimes it's necessary to explicitly say which python version to use: `python3 -m virtualenv --python=python3 venv` 50 | 51 | This step will install necessary dependencies. It is a good practice to install them in virtual environment. If you don't have virtualenv 52 | installed, you can add it with `sudo apt install python-virtualenv` 53 | or similar command for your system. Alternatively, you may use venv. 54 | However, make sure the virtual environment is created in venv directory. 55 | 56 | You can start flask manually to check if it's working. This is not needed once you have apache integration complete. 57 | 58 | ``` 59 | cd server 60 | ./svarog-web.py 61 | ``` 62 | 63 | 4. **Set up your HTTP server** 64 | 65 | Svarog has been run successfully with both Apache and Nginx. The very subjective experience of 66 | one of Svarog authors is that Apache's WSGI configuration is much more fragile, but is somewhat 67 | simpler due to fewer components. On the other hand, using Nginx requires additional application 68 | server (Unit), but it is much more robust and flexible. Other stacks are most likely possible, 69 | but were not tried. 70 | 71 | Depending on your choice, please follow either 4A or 4B sections. 72 | 73 | 4A. **Apache configuration** 74 | 75 | The general goal is to have an apache2 running with WSGI scripting capability that runs Flask. See an [example 76 | apache2 configuation](apache2/svarog.conf). You may want to tweak the paths and TLS configuration to use LetsEncrypt 77 | or another certificate of your choice. Make sure the paths are actually pointing to the right directory. 78 | There is an example WSGI script in [svarog.wsgi](apache2/svarog.wsgi). It requires some tuning specific to your deloyment. 79 | 80 | 4B. **NGINX + UNIT Configuration** 81 | 82 | An alternative to apache is to run Nginx with Unit application server. Example configuration 83 | for nginx is available [here](nginx/nginx). This file should in general be copied to 84 | `/etc/nginx/sites-available/svarog` and then linked to `/etc/nginx/sites-enabled/svarog`. 85 | Make sure you tweak it to your specific deployment. 86 | 87 | This deployment requires Unit app server to run and be configured with the [unit config](nginx/unit.json) 88 | file. The configuration can be uploaded using command similar to this: 89 | 90 | ```curl -X PUT --data-binary @nginx/unit.json --unix-socket /var/run/control.unit.sock http://localhost/config``` 91 | 92 | Please consult with [Unit docs](https://unit.nginx.org/configuration/) for 93 | details. 94 | 95 | You can check Unit's configuration using: 96 | 97 | ```curl --unix-socket /var/run/control.unit.sock http://localhost/config/``` 98 | 99 | 100 | 4C. **NGINX + Gunicorn** 101 | 102 | Gunicorn is a lightweight application server. You can install it: 103 | 104 | ``` 105 | cd server 106 | source venv/bin/activate 107 | pip install gunicorn 108 | ``` 109 | 110 | And then serve the service: `gunicorn app:app`. Plenty of extra options available for 111 | logging (-access-logfile, --error-logfile), binding to specific address or port (--bind 192.168.1.1:1234), 112 | serving over HTTPS/TLS (--keyfile, --certfile) and more. 113 | 114 | The gunicorn process can be run from systemd. An example systemd file is available in doc/gunicorn. 115 | 116 | 5. **Grant sudo privileges** 117 | 118 | Also, you should update the /etc/sudoers file to allow ordinary user (svarog) to restart (apache) or (nginx and unit) server. 119 | This will be used by the update script that's being run every day. You should use `visudo` command to add the following line: 120 | 121 | ``` 122 | %svarog ALL= NOPASSWD: /bin/systemctl restart apache2 123 | ``` 124 | 125 | or 126 | 127 | ``` 128 | %svarog ALL= NOPASSWD: /bin/systemctl restart nginx 129 | %svarog ALL= NOPASSWD: /bin/systemctl restart unit 130 | ``` 131 | 132 | Alternatively, you can allow restarting all services: 133 | ``` 134 | %svarog ALL= NOPASSWD: /bin/systemctl 135 | ``` 136 | 137 | This is more convenient, but may be a bit risky from the security perspective. 138 | -------------------------------------------------------------------------------- /server/app/authorize_station.py: -------------------------------------------------------------------------------- 1 | from app.repository import Repository 2 | 3 | from functools import wraps 4 | 5 | from typing import Optional, Tuple 6 | from flask import abort, request 7 | import logging 8 | 9 | from app import app 10 | from app.hmac_token import parse_token, validate_token, AUTHORIZATION_ALGORITHM 11 | 12 | 13 | def _normalize_request_dict(dict_): 14 | '''Accepts multi-dict. If entry has multiple values then handle it as pair 15 | with single key and array of values. Else handle entry as key-value pair''' 16 | res = {} 17 | for key, values in dict_.lists(): 18 | value = None 19 | if len(values) == 1: 20 | value = values[0] 21 | elif len(values) > 1: 22 | value = values 23 | res[key] = value 24 | return res 25 | 26 | 27 | def _get_body(request): 28 | ''' 29 | Return dict with request arguments.. 30 | ''' 31 | body = {} 32 | body.update(_normalize_request_dict(request.form)) 33 | body.update(_normalize_request_dict(request.files)) 34 | return body 35 | 36 | 37 | def _get_secret(station_id) -> bytes: 38 | ''' 39 | Fetch station secret from database 40 | 41 | ToDo: Returned value should be cached to avoid DDoS and 42 | DB call before authorization 43 | ''' 44 | repository = Repository() 45 | return repository.read_station_secret(station_id) 46 | 47 | 48 | def _verify_request() -> Tuple[Optional[str], Optional[str]]: 49 | '''Verify Authorization header in current request. 50 | 51 | Returns 52 | ------- 53 | bool 54 | None if successful, error message otherwise. 55 | str 56 | station_id (if successful) or None (if unsuccessful) 57 | ''' 58 | header = request.headers.get("Authorization") # type: ignore 59 | if header is None: 60 | return "Missing Authorization header", None 61 | algorithm, token = header.split() 62 | if algorithm != AUTHORIZATION_ALGORITHM: 63 | return "Wrong authorization method", None 64 | 65 | station_id, *_ = parse_token(token) 66 | secret = _get_secret(station_id) 67 | 68 | if secret is None: 69 | return "Secret not set", station_id 70 | 71 | body = _get_body(request) 72 | 73 | return validate_token(token, secret, body) 74 | 75 | 76 | def authorize_station(f): 77 | ''' 78 | Security decorator for authorize ground station 79 | using HMAC signature. 80 | 81 | Returns 82 | ======= 83 | Station id is passed as argument to your action. 84 | If authorization failed then request is abort with HTTP 401. 85 | 86 | Notes 87 | ===== 88 | 89 | Authorization data must be included as "Authorize" HTTP header 90 | with value in format: 91 | HMAC-SHA256 [id],[timestamp],[sig] 92 | where: 93 | [id] - station id, corespoding to value in DB 94 | [timestamp] - timestamp in ISO 8601 format with second 95 | precision in UTC time zone. For example: 96 | "2020-02-20T18:59:59" (without quotes) 97 | Timestamp may be max 2.5 minute older or newer 98 | then now. It means that max token lifetime 99 | is 5 minutes. 100 | [sig] - HMAC signature in hex format. See details below. 101 | Example: 102 | Authorization: HMAC-SHA256 1,2020-02-21T20:36:57,b9b64a880293cd6007c0f69a06cea6efd463782eb86ddb47f2686aa9294ff4ec 103 | 104 | Signature is created using HMAC algorithm with secred shared 105 | with content server on station id, timestamp and request body. 106 | First you need create a correct body. 107 | 1. Get all your parameters (form data) as key-value pairs. 108 | Key and value should be treat as strings. 109 | If you attach any file then as value use SHA-1 hash 110 | from content (digest, hex format). 111 | If your value is array of strings or files then you create pair 112 | for each value (with the same key) 113 | 2. Sort your pairs alphabetically by key (keep order for pairs with 114 | the same key, order must be the same as in sended request) 115 | 3. Concat key and value using equal sign as hyphen 116 | For example if your key is named "foo" and value is "bar" 117 | then your pair representation is "foo=bar" 118 | 4. Join all pair representation using "&" as delimiter 119 | For example: For pairs: ('foo', '1'), ('bar', '2'), ('baz', '3') 120 | you should use: "bar=2&baz=3&foo=1" 121 | Now you can create basestring for signature: 122 | 5. Save your timestamp in ISO 8601 format with second precision. 123 | It must be the same timestamp as you used in header-level 124 | 6. Concat station id, timestamp and basestring using colons 125 | For example for id equals "1", create date 2020-02-20 18:59:59 UTC, 126 | and body { 'foo': 1, 'bar': 2 } you should get: 127 | 1:2020-02-20T18:59:59:bar=2&foo=1 128 | Next you should create signature: 129 | 7. Use HMAC algorithm with your secret on basestring 130 | 131 | Secret must be at least 16 cryptographic-safe random bytes. 132 | 133 | Example 134 | ======= 135 | @app.route('/api') 136 | @authorize_station 137 | def api(station_id: str): 138 | pass 139 | ''' 140 | @wraps(f) 141 | def decorated_function(*args, **kws): 142 | logging.debug("Verifying request.") 143 | err, id_ = _verify_request() 144 | if err is not None: 145 | if app.config["security"].get("ignore_hmac_validation_errors", "false").lower() != "true": 146 | logging.info("Authorization failed %s" % err) 147 | abort(401, description="Authorization failed: %s" % (err,)) 148 | return f(id_, *args, **kws) 149 | return decorated_function 150 | -------------------------------------------------------------------------------- /server/app/tle_diagrams.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | import io 3 | import math 4 | from collections import namedtuple 5 | from typing import List, Sequence, Tuple 6 | 7 | from matplotlib import pyplot as plt 8 | from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas 9 | from orbit_predictor.sources import get_predictor_from_tle_lines 10 | from orbit_predictor.locations import Location as PredictLocation 11 | 12 | Location = namedtuple("Location", ("latitude", "longitude", "elevation")) 13 | 14 | 15 | def _calculate_series(location: Location, tle: Sequence[str], 16 | aos: datetime, los: datetime, 17 | time_step: timedelta) -> Tuple[Sequence[datetime], 18 | Sequence[float], Sequence[float]]: 19 | '''Calculate data for plot diagrams''' 20 | date_series: List[datetime] = [] 21 | azimuth_series: List[float] = [] 22 | elevation_series: List[float] = [] 23 | 24 | location = PredictLocation("server", *location) 25 | predictor = get_predictor_from_tle_lines(tle) 26 | 27 | date: datetime = aos 28 | while (date <= los): 29 | position = predictor.get_position(date) 30 | az, el = location.get_azimuth_elev_deg(position) 31 | 32 | date_series.append(date) 33 | azimuth_series.append(az) 34 | elevation_series.append(el) 35 | 36 | date += time_step 37 | 38 | return date_series, azimuth_series, elevation_series 39 | 40 | 41 | def _produce_azimuth_elevation_by_time_figure(dates: Sequence[datetime], 42 | azimuths: Sequence[float], elevations: Sequence[float]) -> plt.Figure: 43 | '''Return figure with azimuth/elevation plot by time. X axis contains dates. 44 | Plot has two Y axis. Colors are compatible with gPredict.''' 45 | fig: plt.Figure = plt.figure() 46 | ax1: plt.Axes = fig.add_subplot() 47 | 48 | ax1.plot(dates, azimuths, 'b') 49 | ax1.set_ylim(0, 360) 50 | ax1.set_ylabel("Azimuth [deg]") 51 | ax1.set_xlabel("Time") 52 | 53 | ax2 = ax1.twinx() 54 | ax2.plot(dates, elevations, 'r') 55 | ax2.set_ylim(0, 90) 56 | ax2.set_ylabel("Elevation [deg]") 57 | 58 | fig.legend(("Azimuth", "Elevation")) 59 | return fig 60 | 61 | 62 | def _produce_azimuth_elevation_polar_figure(dates: Sequence[datetime], 63 | azimuths: Sequence[float], elevations: Sequence[float], 64 | time_step: timedelta) -> plt.Figure: 65 | '''Returns figure with azimuth/elevation polar plot. @time_step define 66 | temporal distance between labels near trajectory.''' 67 | fig: plt.Figure = plt.figure() 68 | ax: plt.Axes = fig.add_subplot(projection="polar") 69 | ax.set_theta_zero_location('N') 70 | ax.set_theta_direction(-1) 71 | ax.set_ylim(90, 0) 72 | 73 | angles = [math.radians(az) for az in azimuths] 74 | 75 | ax.plot(angles, elevations) 76 | 77 | previous_date = None 78 | for date, angle, el in zip(dates, angles, elevations): 79 | if previous_date is not None and date - previous_date < time_step: 80 | continue 81 | ax.annotate(date.strftime("%H:%M"), (angle, el)) 82 | previous_date = date 83 | 84 | return fig 85 | 86 | 87 | def _save_to_png(figure: plt.Figure) -> io.BytesIO: 88 | '''Generate PNG from figure and return binary stream. 89 | @see https://stackoverflow.com/a/50728936''' 90 | output = io.BytesIO() 91 | FigureCanvas(figure).print_png(output) 92 | plt.close(figure) 93 | return output 94 | 95 | 96 | def generate_polar_plot_png(location: Location, tle: Sequence[str], 97 | aos: datetime, los: datetime, 98 | predict_time_step: timedelta = timedelta(seconds=30), 99 | polar_time_step: timedelta = timedelta(minutes=2, seconds=30)): 100 | ''' 101 | Return binary stream with azimuth/elevation polar plot in PNG file. 102 | 103 | Parameters 104 | ========== 105 | location: Location 106 | Location of ground station 107 | tle: two strings 108 | TLE data 109 | aos: datetime.datetime 110 | Acquisition of Signal 111 | los: datetime.datetime 112 | Loss of Signal 113 | predict_time_step: datetime.timedelta, optional 114 | Step between data samples to predict. Lower produces more accurate 115 | plot, but increase computation time 116 | polar_time_step: datetime.timedelta, optional 117 | Temporal distance between date labels on plot. Higher reduces 118 | readability. 119 | ''' 120 | series = _calculate_series(location, tle, aos, los, predict_time_step) 121 | # by_time_figure = _produce_azimuth_elevation_by_time_figure(*series) # type: ignore 122 | figure = _produce_azimuth_elevation_polar_figure(*series, polar_time_step) # type: ignore 123 | 124 | return _save_to_png(figure) 125 | 126 | 127 | def generate_by_time_plot_png(location: Location, tle: Sequence[str], 128 | aos: datetime, los: datetime, 129 | predict_time_step: timedelta = timedelta(seconds=30)): 130 | ''' 131 | Return binary stream with azimuth/elevation by time plot in PNG file. 132 | 133 | Parameters 134 | ========== 135 | location: Location 136 | Location of ground station 137 | tle: two strings 138 | TLE data 139 | aos: datetime.datetime 140 | Acquisition of Signal 141 | los: datetime.datetime 142 | Loss of Signal 143 | predict_time_step: datetime.timedelta, optional 144 | Step between data samples to predict. Lower produces more accurate 145 | plot, but increase computation time 146 | ''' 147 | series = _calculate_series(location, tle, aos, los, predict_time_step) 148 | figure = _produce_azimuth_elevation_by_time_figure(*series) # type: ignore 149 | return _save_to_png(figure) 150 | -------------------------------------------------------------------------------- /server/app/hmac_token.py: -------------------------------------------------------------------------------- 1 | ''' 2 | HMAC based token processing - creation, parsing and verification. 3 | This package is designed for use in server and station side. 4 | It should have only standard library dependencies. 5 | ''' 6 | 7 | import datetime 8 | from dateutil.parser import isoparse 9 | import hashlib 10 | import hmac 11 | from typing import Dict, Union, Optional 12 | 13 | 14 | AUTHORIZATION_ALGORITHM = "HMAC-SHA256" 15 | SIG_LIFETIME = datetime.timedelta(minutes=2, seconds=30) 16 | 17 | 18 | def _is_file_like(obj) -> bool: 19 | '''Return True if @obj is file-like. Otherwise False''' 20 | return hasattr(obj, "read") and hasattr(obj, "seek") 21 | 22 | 23 | def _hash_file(obj): 24 | '''Return file-like hash''' 25 | hash_ = hashlib.sha1(obj.read()).hexdigest() 26 | obj.seek(0) 27 | return hash_ 28 | 29 | 30 | def _serialize_single_item(key, value): 31 | '''Serialize key-value pair for non-array value''' 32 | if _is_file_like(value): 33 | value = _hash_file(value) 34 | return "%s=%s" % (key, value) 35 | 36 | 37 | def _serialize_iterable_item(key, values): 38 | '''Serialize key-value pair for array value''' 39 | return "&".join(_serialize_single_item(key, v) for v in values) 40 | 41 | 42 | def _serialize_body(body: Dict) -> str: 43 | '''Serialize dictionary to string''' 44 | serialized_items = [] 45 | for key, value in sorted(body.items(), key=lambda p: p[0]): 46 | if isinstance(value, (list, tuple)): 47 | serialized_item = _serialize_iterable_item(key, value) 48 | else: 49 | serialized_item = _serialize_single_item(key, value) 50 | serialized_items.append(serialized_item) 51 | return "&".join(serialized_items) 52 | 53 | 54 | def _get_sig_basestring(id_: str, body: Dict, date: datetime.datetime): 55 | '''Create basestring for signature''' 56 | timestamp = date.isoformat(timespec='seconds') 57 | 58 | body_string = _serialize_body(body) 59 | sig_basestring = ("%s:%s:%s" % (id_, timestamp, body_string)).encode() 60 | return sig_basestring 61 | 62 | 63 | def _get_signature(secret: Union[bytes, bytearray], id_: str, body: Dict, date: datetime.datetime): 64 | '''Create HMAC signature using provided parameters.''' 65 | sig_basestring = _get_sig_basestring(id_, body, date) 66 | return hmac.new(secret, sig_basestring, digestmod=hashlib.sha256).hexdigest() 67 | 68 | 69 | def _verify_signature(sig: str, secret: bytes, id_: str, body: Dict, create_date: datetime.datetime): 70 | ''' 71 | Check if passed signature was created using provided parameters. 72 | It doesn't check if timestamp lifetime expired. 73 | ''' 74 | computed_sig = _get_signature(secret, id_, body, create_date) 75 | return sig == computed_sig 76 | 77 | 78 | def get_token(id_: str, secret: Union[bytes, bytearray], body: Dict, date: datetime.datetime): 79 | '''Create HMAC based token using provided parameters.''' 80 | sig = _get_signature(secret, id_, body, date) 81 | token = ",".join((id_, date.isoformat(timespec='seconds'), sig)) 82 | return token 83 | 84 | 85 | def get_authorization_header_value(id_: str, secret: Union[bytes, bytearray], body: Dict, date: Optional[datetime.datetime] = None): 86 | ''' 87 | Shorthand function for create Authorization header value 88 | 89 | Parameters 90 | ========== 91 | id_: str 92 | Station ID 93 | secret: bytes or bytearray 94 | Secret of station 95 | body: dict 96 | Body of request. Values should be: 97 | - strings 98 | - file-like objects 99 | - list of strings or file-like objects 100 | date: datetime.datetime 101 | Datetime in UTC (may be naive). Date when token should be valid. Default: now. 102 | 103 | Returns 104 | ======= 105 | Valid Authorization HTTP header value. 106 | ''' 107 | if date is None: 108 | date = datetime.datetime.now(datetime.timezone.utc) 109 | token = get_token(id_, secret, body, date) 110 | return "%s %s" % (AUTHORIZATION_ALGORITHM, token) 111 | 112 | 113 | def parse_token(token: str): 114 | '''Split token to id, timestamp and signature''' 115 | id_, timestamp, sig = token.split(",") 116 | create_date = isoparse(timestamp) 117 | return id_, create_date, sig 118 | 119 | 120 | def validate_token(token: str, secret: bytes, body: Dict, check_date=None): 121 | ''' 122 | Check if passed token was created using provided arguments 123 | 124 | Parameters 125 | ========== 126 | token: str 127 | HMAC based token, created from id, timestamp in ISO 8601 with second precision 128 | and HMAC signature delimited by comma 129 | secret: bytes 130 | Crypto-safe bytes used by HMAC algorithm 131 | body: Dict 132 | Dictionary with key-value pairs which are signed by HMAC signature in token 133 | Key and values should be strings (or at lease primitive types) or file-like 134 | check_date: datetime.datetime 135 | The moment when token should be valid. If None is passed then now is used. 136 | 137 | Returns 138 | ======= 139 | Pair with validation result and id_ - (bool, str). 140 | Validation result is None if token verify successfully or string with error message otherwise. 141 | Id is returned even if validation fails, but you shouldn't trust that it is valid. 142 | 143 | Notes 144 | ===== 145 | Timestamp may be max 2.5 minutes older or newer 146 | then now. It means that max token lifetime is 5 minutes. 147 | 148 | Examples 149 | ======== 150 | secret = b'\x9a\x19=\xfc\xd8\xe6\x13V4tD%\xf1\xfe\x1c\xdd' 151 | body = { 'foo': 4, 'bar':2 } 152 | token = '1,2020-02-21T21:09:45,2a6b59a8971f6c98bafa73abbfc8bc2809f31d205a3b081f8bc1b55a9970e778' 153 | error, id_ = validate_token(token, secret, body) 154 | if error is None: 155 | print("Authorized with %s id" % (id_,)) 156 | else: 157 | print("Unauthorized: %s" % (error,)) 158 | ''' 159 | id_, create_date, sig = parse_token(token) 160 | 161 | if check_date is None: 162 | check_date = datetime.datetime.now(datetime.timezone.utc) 163 | delta = abs(check_date - create_date) 164 | if delta > SIG_LIFETIME: 165 | return "Token expired", id_ 166 | 167 | if _verify_signature(sig, secret, id_, body, create_date): 168 | return None, id_ 169 | else: 170 | return "Invalid signature", id_ 171 | -------------------------------------------------------------------------------- /server/templates/obs.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% from "macros.jinja" import rating_icon %} 3 | 4 | {% block content %} 5 |

Observation {{ obs.obs_id }}

6 | 7 |
8 | 9 |
10 | 11 |

General parameters

12 |

13 |

14 |
15 | AOS 16 |
{{ obs.aos }}
17 | 18 |
19 | TCA 20 |
21 |
{{ obs.tca }}
22 | 23 |
LOS
{{ obs.los }}
24 | {% if obs.notes %} 25 |
Notes
{{ obs.notes }}
26 | {% endif %} 27 | {% if obs.rating %} 28 |
Rating
29 |
30 | {{ rating_icon(obs.rating, border=False) }} 31 |
32 | {% endif %} 33 |
Satellite
{{ sat_name }}, 34 | norad id {{ obs.sat_id }}
35 |
Ground station
{{station.name}}
36 |
37 |

38 |

Configuration

39 |

40 | {% if obs.config %} 41 | {% for c in obs.config %} 42 | {% if obs.config[c] %} 43 |

{{ c }}
{{ obs.config[c] }}
44 | {% endif %} 45 | {% endfor %} 46 | {% endif %} 47 |

48 | {% if obs.tle %} 49 |

Pass plots

50 | 51 | Azimuth/elevation by time 52 | Azimuth/elevation polar 53 | 54 |

Orbital parameters

55 |

56 |

57 |
Orbit overview
{{ orbit.overview }}
58 |
Inclination i
{{ orbit.inc }}
59 |
Major semi-axis a
{{ "%4.1f km" % (orbit.a) }}
60 |
Eccentricity e
{{ orbit.ecc }}
61 |
Apogee
{{ orbit.r_a}}
62 |
Perigee
{{ orbit.r_p}}
63 |
RAAN Ω 66 |
{{orbit.raan}}
67 |
Epoch
{{ orbit.epoch }}
68 |
Period
{{ orbit.period }}
69 | 70 | 71 |
TLE format
72 |
73 |
 74 |         
 75 |             
 76 |             {{ obs.tle[0][0] }}
 77 |             {{ obs.tle[0][2:7] }}{{ obs.tle[0][7] }}
 79 |             {{ obs.tle[0][9:11] }}{{ obs.tle[0][11:14] }}{{ obs.tle[0][14:17] }}
 82 |             {{ obs.tle[0][18:20] }}{{ obs.tle[0][20:32] }}
 84 |             {{ obs.tle[0][33:43] }}
 85 |             {{ obs.tle[0][44:52] }}
 86 |             {{ obs.tle[0][53:61] }}
 88 |             {{ obs.tle[0][62] }}
 90 |             {{ obs.tle[0][64:68] }}{{ obs.tle[0][68] }}
 93 | 
 94 |             
95 | 96 | {{ obs.tle[1][0] }} 97 | {{ obs.tle[1][2:7] }} 98 | {{ obs.tle[1][8:16] }} 99 | {{ obs.tle[1][17:25] }} 100 | {{ obs.tle[1][26:33] }} 101 | {{ obs.tle[1][34:42] }} 102 | {{ obs.tle[1][43:51] }} 103 | {{ obs.tle[1][52:63] }}{{ obs.tle[1][63:68] }}{{ obs.tle[1][68] }} 106 |
107 |
108 |
109 |
110 |

111 | 112 | {% endif %} 113 | 114 |

Products

115 |

Click on images to get full resolution.

116 | {% for file_ in files %} 117 |
118 | {% if file_.rating %} 119 | {{ rating_icon(file_.rating )}} 120 | {% endif %} 121 | 122 |
123 | {% endfor %} 124 | {% include 'pagination.html' %} 125 | 126 | {% if is_owner %} 127 |
128 |
129 | Admin Panel 130 | The following actions are available, because you're the owner of the station:
131 |
134 |
135 |
136 | {% endif %} 137 | 138 | {% endblock %} 139 | -------------------------------------------------------------------------------- /server/templates/obslist.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% from "macros.jinja" import rating_icon %} 3 | 4 | {% block content %} 5 |
6 |

Observations

7 | 8 |
9 |
10 | 11 | 13 |
14 |
15 | 16 | 18 |
19 |
20 | 21 | 32 |
33 |
34 | 35 | 46 |
47 |
48 | 53 |
54 |
55 | 56 | 57 |
58 |
59 | 60 |

61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | {% for obs in obslist %} 76 | 77 | 78 | 79 | 84 | 89 | 96 | 101 | 106 | 112 | 117 | 122 | 123 | {% else %} 124 | 125 | 126 | 127 | {% endfor %} 128 |
IDAOSTCALOSSatelliteTLERatingStationFrequencyProtocolImage
{{ obs.obs_id }}{{ obs.aos|datetime }}{{ obs.tca|datetime }} {% if obs.aos == obs.tca %} 80 | 81 | 82 |
({{ (obs.aos + (obs.los - obs.aos)/2)|datetime }}) 83 | {% endif %}
{{ obs.los|datetime }} 85 | 86 | {{ obs.sat_name }} 87 | 88 | 90 | {% if obs.tle %} 91 | 92 | {% else %} 93 | 94 | {% endif %} 95 | 97 | {% if obs.rating %} 98 | {{ rating_icon(obs.rating) }} 99 | {% endif %} 100 | 102 | 103 | {{ obs.station_name }} 104 | 105 | 107 | {% if obs.config.frequency is defined %} 108 | {{ "%.3f MHz"|format(obs.config.frequency | int/1000000) }} 109 | {% else %} ??? 110 | {% endif %} 111 | 113 | {% if obs.config.protocol is defined %} 114 | {{ obs.config.protocol }} 115 | {% endif %} 116 | 118 | 119 | 120 | 121 |
No data for provided criteria
129 |

130 | {% include 'pagination.html' %} 131 | 132 | {% endblock %} 133 | 134 | {% block addToHead %} 135 | 141 | {% endblock %} 142 | 143 | {% block scripts %} 144 | 223 | {% endblock %} -------------------------------------------------------------------------------- /server/app/controllers/obs.py: -------------------------------------------------------------------------------- 1 | from flask import abort, render_template 2 | 3 | from app import app 4 | from app.repository import ObservationId, Repository, Observation 5 | from app.pagination import use_pagination 6 | from math import floor 7 | from app.utils import strfdelta 8 | from flask_login import current_user 9 | 10 | # from tletools import TLE 11 | from pyorbital.orbital import Orbital, A as EARTH_RADIUS 12 | from astropy import units as u 13 | import os 14 | 15 | 16 | @app.route('/obs/') 17 | @use_pagination(5) 18 | def obs(obs_id: ObservationId = None, limit_and_offset=None): 19 | if obs_id is None: 20 | abort(300, description="ID is required") 21 | return 22 | 23 | repository = Repository() 24 | with repository.transaction(): 25 | observation = repository.read_observation(obs_id) 26 | 27 | orbit = None 28 | 29 | if observation is None: 30 | abort(404, "Observation not found") 31 | 32 | files = repository.read_observation_files(observation["obs_id"], 33 | **limit_and_offset) 34 | files_count = repository.count_observation_files(obs_id) 35 | satellite = repository.read_satellite(observation["sat_id"]) 36 | 37 | orbit = observation 38 | if observation['tle'] is not None: 39 | # observation['tle'] is always an array of exactly 2 strings. 40 | orbit = parse_tle(*observation['tle'], satellite["sat_name"]) 41 | 42 | station = repository.read_station(observation["station_id"]) 43 | 44 | # Now tweak some observation parameters to make them more human readable 45 | observation = human_readable_obs(observation) 46 | 47 | # Now determine if there is a logged user and if there is, if this user is the owner of this 48 | # station. If he is, we should show the admin panel. 49 | user_id = 0 50 | owner = False 51 | if current_user.is_authenticated: 52 | user_id = current_user.get_id() 53 | 54 | # Check if the current user is the owner of the station. 55 | station_id = station['station_id'] 56 | 57 | owner = repository.is_station_owner(user_id, station_id) 58 | 59 | return 'obs.html', dict(obs=observation, files=files, 60 | sat_name=satellite["sat_name"], item_count=files_count, orbit=orbit, station=station, is_owner=owner) 61 | 62 | 63 | def calculate_orbit_parameters(perigee: float, eccentricity: float, earth_radius: float = 0): 64 | """ 65 | Calculates the semi-major axis and apogee. 66 | 67 | Parameters: 68 | perigee (float): The perigee distance. If using altitude, pass the altitude. 69 | eccentricity (float): The orbit's eccentricity. 70 | earth_radius (float): (Optional) Earth's radius to add to the altitude. 71 | Defaults to 0 if perigee is already the distance from the center. 72 | 73 | Returns: 74 | tuple: (semi_major_axis, apogee) 75 | """ 76 | # If perigee is provided as altitude, convert it to distance from the center 77 | r_p = perigee + earth_radius 78 | 79 | # Calculate the semi-major axis using r_p = a (1 - e) 80 | a = r_p / (1 - eccentricity) 81 | 82 | # Calculate the apogee: r_a = a (1 + e) 83 | r_a = a * (1 + eccentricity) 84 | 85 | return a, r_a 86 | 87 | 88 | def parse_tle(tle1: str, tle2: str, name: str) -> dict: 89 | """ Parses orbital data in TLE format and returns a dictionary with printable orbital elements 90 | and other parameters.""" 91 | 92 | # Create Orbital object from TLE data 93 | orb = Orbital(name, line1=tle1, line2=tle2) 94 | 95 | # Get orbital elements 96 | elements = orb.orbit_elements 97 | 98 | # Calculate period in minutes and seconds 99 | period_minutes = elements.period # Period is returned in minutes 100 | m = floor(period_minutes) 101 | s = (period_minutes - m) * 60 102 | 103 | # Calculate apogee and perigee (in km) 104 | perigee = float(elements.perigee) # Already in km above Earth's surface 105 | semi_major, apogee = calculate_orbit_parameters(perigee, elements.excentricity, EARTH_RADIUS) 106 | 107 | # Format the orbital parameters 108 | orb_dict = {} 109 | orb_dict["overview"] = f"Satellite {name} at {apogee:.1f}km x {perigee:.1f}km" 110 | orb_dict["inc"] = f"{elements.inclination:.1f} deg" 111 | orb_dict["ecc"] = elements.excentricity 112 | orb_dict["a"] = semi_major # in km above the center of the earth 113 | orb_dict["r_a"] = f"{(apogee - EARTH_RADIUS):.1f} km above surface" 114 | orb_dict["r_p"] = f"{(perigee):.1f} km above surface" 115 | orb_dict["raan"] = f"{elements.right_ascension:.1f} deg" 116 | orb_dict["period"] = f"{period_minutes:.1f} min ({m}m {int(s)}s)" 117 | orb_dict["epoch"] = str(elements.epoch) + " UTC" # orb.tle.epoch.strftime("%Y-%m-%d %H:%M:%S") + " UTC" 118 | 119 | return orb_dict 120 | 121 | 122 | def human_readable_obs(obs: Observation) -> Observation: 123 | """Gets an observation and formats some of its parameters to make it more human readable. 124 | Returns an observation.""" 125 | 126 | aos_los_duration = obs["los"] - obs["aos"] 127 | tca_correction = "" 128 | 129 | if obs["aos"] == obs["tca"]: 130 | obs["tca"] = obs["aos"] + aos_los_duration / 2 131 | tca_correction = " (corrected, the original observation record incorrectly says TCA = AOS)" 132 | 133 | aos_tca_duration = obs["tca"] - obs["aos"] 134 | 135 | if "config" in obs and obs['config'] and "recipe" in obs["config"] and "', methods=["GET", "POST"]) 145 | def obs_delete(obs_id: ObservationId = None): 146 | 147 | # First check if such an observation even exists. 148 | repository = Repository() 149 | observation = repository.read_observation(obs_id) 150 | if observation is None: 151 | return render_template('obs_delete.html', status=["There is no observation %s" % obs_id], obs_id=obs_id) 152 | 153 | # Second, check if the guy is logged in. 154 | if not current_user.is_authenticated: 155 | return render_template('obs_delete.html', status=["You are not logged in, you can't delete anything."], obs_id=obs_id) 156 | 157 | # Ok, at least this guy is logged in. Let's check who he is. 158 | user_id = current_user.get_id() 159 | 160 | # Check if the current user is the owner of the station. 161 | station = repository.read_station(observation["station_id"]) 162 | station_id = station['station_id'] 163 | 164 | owner = repository.is_station_owner(user_id, station_id) 165 | 166 | if not owner: 167 | return render_template('obs_delete.html', status=["You are not the owner of station %s, you can't delete observation %s." 168 | % (station.name, obs_id)], obs_id=obs_id) 169 | 170 | # If you got that far, this means the guy is logged in, he's the owner and is deleting his own observation. 171 | 172 | status = obs_delete_db_and_disk(repository, obs_id) 173 | return render_template('obs_delete.html', status=status, obs_id=obs_id) 174 | 175 | 176 | def obs_delete_db_and_disk(repository: Repository, obs_id: ObservationId): 177 | 178 | app.logger.info("About to delete observation %s and all its files" % obs_id) 179 | 180 | # Step 1: Create a list of files to be deleted. There may be several products. 181 | products = repository.read_observation_files(obs_id) 182 | obs = repository.read_observation(obs_id) 183 | files = [[f['filename'], "product"] for f in products] 184 | 185 | # Step 2: thumbnail is stored with the observation. There's at most one thumbnail. 186 | files.append([os.path.join("thumbs", obs['thumbnail']), "thumbnail"]) 187 | 188 | # Step 3: And there are two charts: alt-az pass chart and polar pass chart. 189 | files.append([os.path.join("charts", "by_time-%s.png" % obs_id), "pass chart"]) 190 | files.append([os.path.join("charts", "polar-%s.png" % obs_id), "polar pass chart"]) 191 | 192 | # All those files are stored in this dir 193 | root = app.config["storage"]['image_root'] 194 | 195 | status = [] 196 | for f in files: 197 | path = os.path.join(root, f[0]) 198 | app.logger.info("Trying to delete [%s]" % path) 199 | try: 200 | os.remove(path) 201 | status.append("Deleted %s file %s." % (f[1], f[0])) 202 | except Exception as ex: 203 | status.append("Failed to delete %s file: %s, reason: %s" % (f[1], path, repr(ex))) 204 | 205 | # Step 4: delete entries in the db 206 | repository.delete_observation(obs_id) 207 | status.append("DB removal complete.") 208 | 209 | return status 210 | -------------------------------------------------------------------------------- /server/tests/test_web_interface.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | import os.path 4 | import shutil 5 | import unittest 6 | import logging 7 | from logging import FileHandler 8 | 9 | from app import app 10 | from app.hmac_token import get_authorization_header_value 11 | from app.repository import Repository 12 | from tests.utils import check_output 13 | from tests.dbtest import setup_database_test_case 14 | 15 | 16 | IMAGE_ROOT = "tests/images" 17 | LOG_FILE = "test.log" 18 | 19 | # Setting the secret is necessary for storing login details is a session. 20 | app.config["SECRET_KEY"] = "test secret" 21 | 22 | 23 | class BasicTests(unittest.TestCase): 24 | def setUp(self): 25 | self.db = setup_database_test_case() 26 | database_config = self.db.__enter__() 27 | app.config['TESTING'] = True 28 | app.config['WTF_CSRF_ENABLED'] = False 29 | app.config['DEBUG'] = False 30 | app.config["database"] = database_config 31 | app.config["storage"] = {} 32 | app.config["storage"]["image_root"] = IMAGE_ROOT 33 | os.makedirs(os.path.join(IMAGE_ROOT, "thumbs"), exist_ok=True) 34 | os.makedirs(os.path.join(IMAGE_ROOT, "charts"), exist_ok=True) 35 | 36 | app.config["view"] = {} 37 | app.config["view"]["items_per_page"] = 100 38 | 39 | self.app = app.test_client() 40 | 41 | # This is a test. Log EVERYTHING. 42 | logHandler = FileHandler(LOG_FILE) 43 | logHandler.setLevel(logging.DEBUG) 44 | app.logger.setLevel(logging.DEBUG) 45 | app.logger.addHandler(logHandler) 46 | 47 | def tearDown(self): 48 | self.db.__exit__(None, None, None) 49 | shutil.rmtree(IMAGE_ROOT, ignore_errors=True) 50 | os.remove(LOG_FILE) 51 | 52 | def test_main_page(self): 53 | response = self.app.get('/', follow_redirects=True) 54 | self.assertEqual(response.status_code, 200) 55 | # todo: check actual contents of the response 56 | 57 | def test_obslist(self): 58 | response = self.app.get('/obslist', follow_redirects=True) 59 | self.assertEqual(response.status_code, 200) 60 | 61 | def test_obs(self): 62 | response = self.app.get('/obs/750', follow_redirects=True) 63 | self.assertEqual(response.status_code, 200) 64 | 65 | def test_obs_missing(self): 66 | response = self.app.get('/obs/1', follow_redirects=True) 67 | self.assertEqual(response.status_code, 404) 68 | 69 | def test_rating_missing_for_observation(self): 70 | response = self.app.get('/obs/1276', follow_redirects=True) 71 | self.assertEqual(response.status_code, 200) 72 | 73 | def test_stations(self): 74 | response = self.app.get('/stations', follow_redirects=True) 75 | self.assertEqual(response.status_code, 200) 76 | 77 | def test_station(self): 78 | response = self.app.get('/station/1', follow_redirects=True) 79 | self.assertEqual(response.status_code, 200) 80 | 81 | def test_receive_obs(self): 82 | repository = Repository() 83 | station_id = 1 84 | secret = repository.read_station_secret(station_id) 85 | 86 | data = { 87 | 'aos': datetime.datetime(2020, 3, 28, 12, 00), 88 | 'tca': datetime.datetime(2020, 3, 28, 12, 15), 89 | 'los': datetime.datetime(2020, 3, 28, 12, 30), 90 | 'sat': 'NOAA 15', 91 | 'config': '{"text":"note text"}', 92 | "file0": open("tests/x.png", 'rb'), 93 | "file1": open("tests/x.png", 'rb'), 94 | "tle": [ 95 | # Include trailling character 96 | "1 25544U 98067A 08264.51782528 -.00002182 00000-0 -11606-4 0 2927 ", 97 | "2 25544 51.6416 247.4627 0006703 130.5360 325.0288 15.72125391563537" 98 | ], 99 | "rating": 0.75 100 | } 101 | 102 | header_value = get_authorization_header_value(str(station_id), secret, 103 | data) 104 | headers = { 105 | 'Authorization': header_value 106 | } 107 | response = self.app.post('/receive', data=data, headers=headers) 108 | self.assertEqual(response.status_code, 201) 109 | 110 | def file_count(dir_): return len( 111 | [f for f in os.listdir(dir_) 112 | if os.path.isfile(os.path.join(dir_, f))] 113 | ) 114 | self.assertEqual(file_count(IMAGE_ROOT), 2) 115 | self.assertEqual(file_count(os.path.join(IMAGE_ROOT, "thumbs")), 1) 116 | chart_dir = os.path.join(IMAGE_ROOT, "charts") 117 | self.assertEqual(file_count(chart_dir), 2) 118 | chart_files = sorted(os.listdir(chart_dir)) 119 | self.assertEqual(chart_files, ["by_time-1.png", "polar-1.png"]) 120 | # Todo: Need to check if the DB entries have been added. 121 | 122 | # Check if there are appropriate entries in the log file. 123 | self.check_log(["0-tests_x.png written to tests/images", 124 | "1-tests_x.png written to tests/images"]) 125 | 126 | def check_log(self, strings): 127 | """Checks if there are specific strings present in the log file""" 128 | 129 | # Check that the log is there 130 | self.assertTrue(os.path.isfile(LOG_FILE)) 131 | with open(LOG_FILE, 'r') as log_file: 132 | log = log_file.read() 133 | check_output(self, log, strings) 134 | 135 | def test_receive_obs_error(self): 136 | """Test error handling in the receive routine.""" 137 | 138 | repository = Repository() 139 | station_id = 1 140 | 141 | # Check what happens if the path is misconfigured (or the server is not able to write file) 142 | self.app.root = app.config["storage"]['image_root'] = '/nonexistent/path' 143 | secret = repository.read_station_secret(station_id) 144 | 145 | data = { 146 | 'aos': datetime.datetime(2020, 3, 28, 12, 00), 147 | 'tca': datetime.datetime(2020, 3, 28, 12, 15), 148 | 'los': datetime.datetime(2020, 3, 28, 12, 30), 149 | 'sat': 'NOAA 15', 150 | # 'notes': optional, 151 | "file0": open("tests/x.png", 'rb'), 152 | "file1": open("tests/x.png", 'rb') 153 | } 154 | 155 | header_value = get_authorization_header_value(str(station_id), secret, 156 | data) 157 | headers = { 158 | 'Authorization': header_value 159 | } 160 | response = self.app.post('/receive', data=data, headers=headers) 161 | 162 | self.assertEqual(response.status_code, 503) 163 | 164 | # Check if there's appropriate entry in the log file. 165 | self.check_log(["Failed to write /nonexistent/path/", "tests_x.png (image_root=/nonexistent/path)"]) 166 | 167 | def test_login(self): 168 | """Tests login mechanism (invalid username, password, disabled account, successful login).""" 169 | 170 | # CASE 1 (not logged in): Make sure the page contains a form field. 171 | response = self.app.post('/login', follow_redirects=True) 172 | self.assertEqual(response.status_code, 200) 173 | check_output(self, response.data, ['