├── migrations ├── README ├── script.py.mako ├── alembic.ini ├── versions │ └── a432eb7353a3_users_table.py └── env.py ├── README.md ├── .dockerignore ├── Dockerfile ├── boot.sh ├── .travis.yml ├── tox.ini ├── .gitignore ├── requirements.txt ├── config.py ├── LICENSE ├── tests.py └── app.py /migrations/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | MicroFlack's Users Service 2 | ========================== 3 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .* 2 | venv 3 | tests* 4 | tox.ini 5 | __pycache__ 6 | Dockerfile 7 | build.sh 8 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.6-alpine 2 | ARG SERVICE_NAME 3 | ARG SERVICE_VERSION 4 | ENV SERVICE_NAME $SERVICE_NAME 5 | ENV SERVICE_VERSION $SERVICE_VERSION 6 | RUN mkdir /app 7 | COPY . /app 8 | RUN touch /app/.env 9 | RUN pip install --find-links /app/wheels -r /app/requirements.txt 10 | RUN pip install pymysql gunicorn 11 | WORKDIR /app 12 | EXPOSE 5000 13 | CMD ["./boot.sh"] 14 | -------------------------------------------------------------------------------- /boot.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | # sync database to latest migration 4 | FLASK_APP=app.py flask db upgrade 5 | 6 | # start service discovery task in the background 7 | if [ "$SERVICE_URL" != "" ]; then 8 | python -c "from microflack_common.container import register; register()" & 9 | fi 10 | 11 | # run web server 12 | exec gunicorn -b 0.0.0.0:5000 --access-logfile - --error-logfile - app:app 13 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | env: 3 | global: 4 | - WHEELHOUSE=$PWD/../wheels 5 | matrix: 6 | include: 7 | - python: 3.4 8 | - python: 3.5 9 | - python: 3.6 10 | install: 11 | - git clone https://github.com/miguelgrinberg/microflack_common 12 | - mkdir $WHEELHOUSE 13 | - pip install --upgrade pip wheel tox 14 | - cd microflack_common; ./mkwheel all; cd .. 15 | - pip install --find-links $WHEELHOUSE -r requirements.txt 16 | script: 17 | - tox 18 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist=flake8,tests 3 | skipsdist=true 4 | 5 | [testenv] 6 | basepython=python3 7 | setenv= 8 | PIP_FIND_LINKS={env:WHEELHOUSE} 9 | 10 | [testenv:tests] 11 | deps= 12 | mock 13 | coverage 14 | commands= 15 | pip install -r requirements.txt 16 | coverage erase 17 | coverage run --branch --include=app.py ./tests.py 18 | coverage report --show-missing 19 | 20 | [testenv:flake8] 21 | deps= 22 | flake8 23 | commands= 24 | flake8 --ignore=E402 app.py tests.py 25 | -------------------------------------------------------------------------------- /migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[cod] 2 | 3 | # C extensions 4 | *.so 5 | 6 | # Packages 7 | *.egg 8 | *.egg-info 9 | dist 10 | build 11 | eggs 12 | parts 13 | bin 14 | var 15 | sdist 16 | develop-eggs 17 | .installed.cfg 18 | lib 19 | lib64 20 | __pycache__ 21 | 22 | # Installer logs 23 | pip-log.txt 24 | 25 | # Unit test / coverage reports 26 | .coverage 27 | .tox 28 | nosetests.xml 29 | 30 | # Translations 31 | *.mo 32 | 33 | # Mr Developer 34 | .mr.developer.cfg 35 | .project 36 | .pydevproject 37 | 38 | # SQLite databases 39 | *.sqlite 40 | 41 | # Virtual environment 42 | .python-version 43 | venv 44 | 45 | # Redis 46 | dump.rdb 47 | 48 | # environment 49 | .env 50 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | alembic==0.8.10 2 | cachetools==2.0.0 3 | click==6.7 4 | dnspython==1.15.0 5 | docker==2.1.0 6 | docker-pycreds==0.2.1 7 | Flask==0.12.1 8 | Flask-HTTPAuth==3.2.2 9 | Flask-Migrate==2.0.3 10 | Flask-Script==2.0.5 11 | Flask-SocketIO==2.8.6 12 | Flask-SQLAlchemy==2.1 13 | itsdangerous==0.24 14 | Jinja2==2.9.6 15 | Mako==1.0.6 16 | MarkupSafe==1.0 17 | MicroFlack-Common==0.3 18 | PyJWT==1.4.2 19 | python-dotenv==0.6.4 20 | python-editor==1.0.3 21 | python-engineio==3.8.2.post1 22 | python-etcd==0.4.5 23 | python-socketio==1.7.4 24 | redis==2.10.5 25 | requests==2.20.0 26 | six==1.10.0 27 | SQLAlchemy==1.1.5 28 | urllib3==1.24.2 29 | websocket-client==0.40.0 30 | Werkzeug==0.15.3 31 | -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from dotenv import load_dotenv 4 | 5 | basedir = os.path.abspath(os.path.dirname(__file__)) 6 | env = os.path.join(basedir, '.env') 7 | if os.path.exists(env): 8 | load_dotenv(env) 9 | else: 10 | print('Warning: .env file not found') 11 | 12 | 13 | class Config(object): 14 | DEBUG = False 15 | TESTING = False 16 | SECRET_KEY = os.environ.get('SECRET_KEY', 17 | '51f52814-0071-11e6-a247-000ec6c2372c') 18 | JWT_SECRET_KEY = os.environ.get('JWT_SECRET_KEY', SECRET_KEY) 19 | SQLALCHEMY_DATABASE_URI = os.environ.get( 20 | 'DATABASE_URL', 'sqlite:///' + os.path.join(basedir, 'users.sqlite')) 21 | SQLALCHEMY_TRACK_MODIFICATIONS = False 22 | 23 | 24 | class DevConfig(Config): 25 | DEBUG = True 26 | 27 | 28 | class TestConfig(Config): 29 | TESTING = True 30 | SQLALCHEMY_DATABASE_URI = 'sqlite://' 31 | 32 | 33 | class ProdConfig(Config): 34 | pass 35 | -------------------------------------------------------------------------------- /migrations/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # template used to generate migration files 5 | # file_template = %%(rev)s_%%(slug)s 6 | 7 | # set to 'true' to run the environment during 8 | # the 'revision' command, regardless of autogenerate 9 | # revision_environment = false 10 | 11 | 12 | # Logging configuration 13 | [loggers] 14 | keys = root,sqlalchemy,alembic 15 | 16 | [handlers] 17 | keys = console 18 | 19 | [formatters] 20 | keys = generic 21 | 22 | [logger_root] 23 | level = WARN 24 | handlers = console 25 | qualname = 26 | 27 | [logger_sqlalchemy] 28 | level = WARN 29 | handlers = 30 | qualname = sqlalchemy.engine 31 | 32 | [logger_alembic] 33 | level = INFO 34 | handlers = 35 | qualname = alembic 36 | 37 | [handler_console] 38 | class = StreamHandler 39 | args = (sys.stderr,) 40 | level = NOTSET 41 | formatter = generic 42 | 43 | [formatter_generic] 44 | format = %(levelname)-5.5s [%(name)s] %(message)s 45 | datefmt = %H:%M:%S 46 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Miguel Grinberg 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /migrations/versions/a432eb7353a3_users_table.py: -------------------------------------------------------------------------------- 1 | """users table 2 | 3 | Revision ID: a432eb7353a3 4 | Revises: 5 | Create Date: 2017-02-25 23:40:17.946256 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'a432eb7353a3' 14 | down_revision = None 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('users', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('created_at', sa.Integer(), nullable=True), 24 | sa.Column('updated_at', sa.Integer(), nullable=True), 25 | sa.Column('last_seen_at', sa.Integer(), nullable=True), 26 | sa.Column('nickname', sa.String(length=32), nullable=False), 27 | sa.Column('password_hash', sa.String(length=256), nullable=False), 28 | sa.Column('token', sa.String(length=64), nullable=True), 29 | sa.Column('online', sa.Boolean(), nullable=True), 30 | sa.PrimaryKeyConstraint('id'), 31 | sa.UniqueConstraint('nickname'), 32 | sa.UniqueConstraint('token') 33 | ) 34 | # ### end Alembic commands ### 35 | 36 | 37 | def downgrade(): 38 | # ### commands auto generated by Alembic - please adjust! ### 39 | op.drop_table('users') 40 | # ### end Alembic commands ### 41 | -------------------------------------------------------------------------------- /migrations/env.py: -------------------------------------------------------------------------------- 1 | from __future__ import with_statement 2 | from alembic import context 3 | from sqlalchemy import engine_from_config, pool 4 | from logging.config import fileConfig 5 | import logging 6 | 7 | # this is the Alembic Config object, which provides 8 | # access to the values within the .ini file in use. 9 | config = context.config 10 | 11 | # Interpret the config file for Python logging. 12 | # This line sets up loggers basically. 13 | fileConfig(config.config_file_name) 14 | logger = logging.getLogger('alembic.env') 15 | 16 | # add your model's MetaData object here 17 | # for 'autogenerate' support 18 | # from myapp import mymodel 19 | # target_metadata = mymodel.Base.metadata 20 | from flask import current_app 21 | config.set_main_option('sqlalchemy.url', 22 | current_app.config.get('SQLALCHEMY_DATABASE_URI')) 23 | target_metadata = current_app.extensions['migrate'].db.metadata 24 | 25 | # other values from the config, defined by the needs of env.py, 26 | # can be acquired: 27 | # my_important_option = config.get_main_option("my_important_option") 28 | # ... etc. 29 | 30 | 31 | def run_migrations_offline(): 32 | """Run migrations in 'offline' mode. 33 | 34 | This configures the context with just a URL 35 | and not an Engine, though an Engine is acceptable 36 | here as well. By skipping the Engine creation 37 | we don't even need a DBAPI to be available. 38 | 39 | Calls to context.execute() here emit the given string to the 40 | script output. 41 | 42 | """ 43 | url = config.get_main_option("sqlalchemy.url") 44 | context.configure(url=url) 45 | 46 | with context.begin_transaction(): 47 | context.run_migrations() 48 | 49 | 50 | def run_migrations_online(): 51 | """Run migrations in 'online' mode. 52 | 53 | In this scenario we need to create an Engine 54 | and associate a connection with the context. 55 | 56 | """ 57 | 58 | # this callback is used to prevent an auto-migration from being generated 59 | # when there are no changes to the schema 60 | # reference: http://alembic.readthedocs.org/en/latest/cookbook.html 61 | def process_revision_directives(context, revision, directives): 62 | if getattr(config.cmd_opts, 'autogenerate', False): 63 | script = directives[0] 64 | if script.upgrade_ops.is_empty(): 65 | directives[:] = [] 66 | logger.info('No changes in schema detected.') 67 | 68 | engine = engine_from_config(config.get_section(config.config_ini_section), 69 | prefix='sqlalchemy.', 70 | poolclass=pool.NullPool) 71 | 72 | connection = engine.connect() 73 | context.configure(connection=connection, 74 | target_metadata=target_metadata, 75 | process_revision_directives=process_revision_directives, 76 | **current_app.extensions['migrate'].configure_args) 77 | 78 | try: 79 | with context.begin_transaction(): 80 | context.run_migrations() 81 | finally: 82 | connection.close() 83 | 84 | if context.is_offline_mode(): 85 | run_migrations_offline() 86 | else: 87 | run_migrations_online() 88 | -------------------------------------------------------------------------------- /tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | os.environ['FLASK_CONFIG'] = 'test' 4 | 5 | import mock 6 | import time 7 | import unittest 8 | 9 | from microflack_common.auth import generate_token 10 | from microflack_common.test import FlackTestCase 11 | 12 | import app 13 | app.socketio = mock.MagicMock() 14 | from app import app, db, User, socketio 15 | 16 | 17 | class UserTests(FlackTestCase): 18 | def setUp(self): 19 | self.ctx = app.app_context() 20 | self.ctx.push() 21 | db.drop_all() # just in case 22 | db.create_all() 23 | self.client = app.test_client() 24 | 25 | def tearDown(self): 26 | db.drop_all() 27 | self.ctx.pop() 28 | 29 | def test_user(self): 30 | # get users without auth 31 | r, s, h = self.get('/api/users') 32 | self.assertEqual(s, 200) 33 | 34 | # get users with bad auth 35 | r, s, h = self.get('/api/users', token_auth='bad-token') 36 | self.assertEqual(s, 401) 37 | 38 | # create a new user 39 | r, s, h = self.post('/api/users', data={'nickname': 'foo', 40 | 'password': 'bar'}) 41 | self.assertEqual(s, 201) 42 | self.assertEqual(socketio.emit.call_args[0][0], 'updated_model') 43 | self.assertEqual(socketio.emit.call_args[0][1]['class'], 'User') 44 | self.assertEqual(socketio.emit.call_args[0][1]['model']['nickname'], 45 | 'foo') 46 | url = h['Location'] 47 | 48 | # create a duplicate user 49 | r, s, h = self.post('/api/users', data={'nickname': 'foo', 50 | 'password': 'baz'}) 51 | self.assertEqual(s, 400) 52 | 53 | # create an incomplete user 54 | r, s, h = self.post('/api/users', data={'nickname': 'foo1'}) 55 | self.assertEqual(s, 400) 56 | 57 | # request a token 58 | token = generate_token(1) 59 | 60 | # get user 61 | r, s, h = self.get(url) 62 | self.assertEqual(s, 200) 63 | self.assertEqual(r['nickname'], 'foo') 64 | self.assertEqual('http://localhost' + r['_links']['self'], url) 65 | self.assertEqual(r['_links']['tokens'], '/api/tokens') 66 | 67 | # modify nickname 68 | r, s, h = self.put(url, data={'nickname': 'foo2'}, token_auth=token) 69 | self.assertEqual(s, 204) 70 | self.assertEqual(socketio.emit.call_args[0][0], 'updated_model') 71 | self.assertEqual(socketio.emit.call_args[0][1]['class'], 'User') 72 | self.assertEqual(socketio.emit.call_args[0][1]['model']['nickname'], 73 | 'foo2') 74 | 75 | # create second user 76 | r, s, h = self.post('/api/users', data={'nickname': 'bar', 77 | 'password': 'baz'}) 78 | self.assertEqual(s, 201) 79 | url2 = h['Location'] 80 | 81 | # edit second user with first user token 82 | r, s, h = self.put(url2, data={'nickname': 'bar2'}, token_auth=token) 83 | self.assertEqual(s, 403) 84 | 85 | # check new nickname 86 | r, s, h = self.get(url) 87 | self.assertEqual(r['nickname'], 'foo2') 88 | 89 | # get list of users 90 | r, s, h = self.get('/api/users') 91 | self.assertEqual(s, 200) 92 | self.assertEqual(len(r['users']), 2) 93 | 94 | def test_user_online_offline(self): 95 | # create a couple of users and a token 96 | r, s, h = self.post('/api/users', data={'nickname': 'foo', 97 | 'password': 'foo'}) 98 | self.assertEqual(s, 201) 99 | r, s, h = self.post('/api/users', data={'nickname': 'bar', 100 | 'password': 'bar'}) 101 | self.assertEqual(s, 201) 102 | r, s, h = self.get('/api/users/me', basic_auth='foo:foo') 103 | self.assertEqual(s, 200) 104 | token = generate_token(1) 105 | 106 | # update online status 107 | User.find_offline_users() 108 | 109 | # get list of offline users 110 | r, s, h = self.get('/api/users?online=0', token_auth=token) 111 | self.assertEqual(s, 200) 112 | self.assertEqual(len(r['users']), 1) 113 | self.assertEqual(r['users'][0]['nickname'], 'bar') 114 | 115 | # get list of online users 116 | r, s, h = self.get('/api/users?online=1', token_auth=token) 117 | self.assertEqual(s, 200) 118 | self.assertEqual(len(r['users']), 1) 119 | self.assertEqual(r['users'][0]['nickname'], 'foo') 120 | 121 | # alter last seen time of the two users 122 | user = User.query.filter_by(nickname='foo').first() 123 | user.last_seen_at = int(time.time()) - 65 124 | db.session.add(user) 125 | user = User.query.filter_by(nickname='bar').first() 126 | user.last_seen_at = int(time.time()) - 1000 127 | db.session.add(user) 128 | db.session.commit() 129 | 130 | # update online status 131 | User.find_offline_users() 132 | 133 | # get list of offline users 134 | r, s, h = self.get('/api/users?online=0', token_auth=token) 135 | self.assertEqual(s, 200) 136 | self.assertEqual(len(r['users']), 1) 137 | self.assertEqual(r['users'][0]['nickname'], 'bar') 138 | 139 | # get list of online users (only foo, who owns the token) 140 | r, s, h = self.get('/api/users?online=1', token_auth=token) 141 | self.assertEqual(s, 200) 142 | self.assertEqual(len(r['users']), 1) 143 | self.assertEqual(r['users'][0]['nickname'], 'foo') 144 | 145 | # get users updated since a timestamp 146 | since = r['users'][0]['updated_at'] 147 | with mock.patch('app.time.time', return_value=since + 10): 148 | r, s, h = self.get('/api/users?updated_since=' + str(since + 2), 149 | token_auth=token) 150 | self.assertEqual(s, 200) 151 | self.assertEqual(len(r['users']), 1) 152 | self.assertEqual(r['users'][0]['nickname'], 'foo') 153 | 154 | # update the other user 155 | user = User.query.filter_by(nickname='bar').first() 156 | user.password = 'bar2' 157 | db.session.add(user) 158 | db.session.commit() 159 | 160 | # get updated users again 161 | with mock.patch('app.time.time', return_value=since + 10): 162 | r, s, h = self.get('/api/users?updated_since=' + str(since - 1), 163 | token_auth=token) 164 | self.assertEqual(s, 200) 165 | self.assertEqual(len(r['users']), 2) 166 | self.assertEqual(r['users'][0]['nickname'], 'bar') 167 | self.assertEqual(r['users'][1]['nickname'], 'foo') 168 | 169 | # set one user offline 170 | r, s, h = self.delete('/api/users/me', token_auth=token) 171 | self.assertEqual(s, 204) 172 | user = User.query.filter_by(nickname='foo').first() 173 | self.assertFalse(user.online) 174 | 175 | # set one user online 176 | r, s, h = self.put('/api/users/me', token_auth=token) 177 | self.assertEqual(s, 204) 178 | user = User.query.filter_by(nickname='foo').first() 179 | self.assertTrue(user.online) 180 | 181 | 182 | if __name__ == '__main__': 183 | unittest.main(verbosity=2) 184 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | import os 2 | import threading 3 | import time 4 | 5 | from flask import Flask, jsonify, request, abort, g 6 | from flask_httpauth import HTTPBasicAuth 7 | from flask_migrate import Migrate 8 | from flask_sqlalchemy import SQLAlchemy 9 | from flask_socketio import SocketIO 10 | from werkzeug.security import generate_password_hash, check_password_hash 11 | 12 | import config 13 | from microflack_common.auth import token_auth, token_optional_auth 14 | from microflack_common.utils import timestamp, url_for 15 | 16 | app = Flask(__name__) 17 | config_name = os.environ.get('FLASK_CONFIG', 'dev') 18 | app.config.from_object(getattr(config, config_name.title() + 'Config')) 19 | 20 | db = SQLAlchemy(app) 21 | migrate = Migrate(app, db) 22 | basic_auth = HTTPBasicAuth() 23 | 24 | message_queue = 'redis://' + os.environ['REDIS'] if 'REDIS' in os.environ \ 25 | else None 26 | if message_queue: 27 | socketio = SocketIO(message_queue=message_queue) 28 | else: 29 | socketio = None 30 | 31 | 32 | @basic_auth.verify_password 33 | def verify_password(nickname, password): 34 | """Password verification callback.""" 35 | if not nickname or not password: 36 | return False 37 | user = User.query.filter_by(nickname=nickname).first() 38 | if user is None or not user.verify_password(password): 39 | return False 40 | user.ping() 41 | db.session.commit() 42 | g.current_user = user 43 | return True 44 | 45 | 46 | @basic_auth.error_handler 47 | def password_error(): 48 | """Return a 401 error to the client.""" 49 | # To avoid login prompts in the browser, use the "Bearer" realm. 50 | return (jsonify({'error': 'authentication required'}), 401, 51 | {'WWW-Authenticate': 'Bearer realm="Authentication Required"'}) 52 | 53 | 54 | class User(db.Model): 55 | """The User model.""" 56 | __tablename__ = 'users' 57 | id = db.Column(db.Integer, primary_key=True) 58 | created_at = db.Column(db.Integer, default=timestamp) 59 | updated_at = db.Column(db.Integer, default=timestamp, onupdate=timestamp) 60 | last_seen_at = db.Column(db.Integer, default=timestamp) 61 | nickname = db.Column(db.String(32), nullable=False, unique=True) 62 | password_hash = db.Column(db.String(256), nullable=False) 63 | online = db.Column(db.Boolean, default=False) 64 | 65 | @property 66 | def password(self): 67 | raise AttributeError('password is not a readable attribute') 68 | 69 | @password.setter 70 | def password(self, password): 71 | self.password_hash = generate_password_hash(password) 72 | 73 | def verify_password(self, password): 74 | return check_password_hash(self.password_hash, password) 75 | 76 | def ping(self): 77 | """Marks the user as recently seen and online.""" 78 | self.last_seen_at = timestamp() 79 | self.online = True 80 | 81 | @staticmethod 82 | def create(data): 83 | """Create a new user.""" 84 | user = User() 85 | user.from_dict(data, partial_update=False) 86 | return user 87 | 88 | def from_dict(self, data, partial_update=True): 89 | """Import user data from a dictionary.""" 90 | for field in ['nickname', 'password']: 91 | try: 92 | setattr(self, field, data[field]) 93 | except KeyError: 94 | if not partial_update: 95 | abort(400) 96 | 97 | def to_dict(self): 98 | """Export user to a dictionary.""" 99 | return { 100 | 'id': self.id, 101 | 'created_at': self.created_at, 102 | 'updated_at': self.updated_at, 103 | 'nickname': self.nickname, 104 | 'last_seen_at': self.last_seen_at, 105 | 'online': self.online, 106 | '_links': { 107 | 'self': url_for('get_user', id=self.id), 108 | 'messages': '/api/messages/{}'.format(self.id), 109 | 'tokens': '/api/tokens' 110 | } 111 | } 112 | 113 | @staticmethod 114 | def find_offline_users(): 115 | """Find users that haven't been active and mark them as offline.""" 116 | users = User.query.filter(User.last_seen_at < timestamp() - 60, 117 | User.online == True).all() # noqa 118 | for user in users: 119 | user.online = False 120 | db.session.add(user) 121 | db.session.commit() 122 | 123 | 124 | @db.event.listens_for(User, 'after_insert') 125 | @db.event.listens_for(User, 'after_update') 126 | def after_user_update(mapper, connection, target): 127 | if socketio is not None: 128 | socketio.emit('updated_model', {'class': target.__class__.__name__, 129 | 'model': target.to_dict()}) 130 | 131 | 132 | @app.before_first_request 133 | def before_first_request(): 134 | """Start a background thread that looks for users that leave.""" 135 | def find_offline_users(): 136 | with app.app_context(): 137 | while True: 138 | User.find_offline_users() 139 | db.session.remove() 140 | time.sleep(5) 141 | 142 | if not app.config['TESTING']: 143 | thread = threading.Thread(target=find_offline_users) 144 | thread.start() 145 | 146 | 147 | @app.before_request 148 | def before_request(): 149 | if hasattr(g, 'jwt_claims') and 'user_id' in g.jwt_claims: 150 | user = User.query.get(g.jwt_claims['user_id']) 151 | if user is None: 152 | abort(500) 153 | user.ping() 154 | db.session.add(user) 155 | db.session.commit() 156 | 157 | 158 | @app.route('/api/users', methods=['POST']) 159 | def new_user(): 160 | """ 161 | Register a new user. 162 | This endpoint is publicly available. 163 | """ 164 | user = User.create(request.get_json() or {}) 165 | if User.query.filter_by(nickname=user.nickname).first() is not None: 166 | abort(400) 167 | db.session.add(user) 168 | db.session.commit() 169 | r = jsonify(user.to_dict()) 170 | r.status_code = 201 171 | r.headers['Location'] = url_for('get_user', id=user.id) 172 | return r 173 | 174 | 175 | @app.route('/api/users', methods=['GET']) 176 | @token_optional_auth.login_required 177 | def get_users(): 178 | """ 179 | Return list of users. 180 | This endpoint is publicly available, but if the client has a token it 181 | should send it, as that indicates to the server that the user is online. 182 | """ 183 | users = User.query.order_by(User.updated_at.asc(), User.nickname.asc()) 184 | if request.args.get('online'): 185 | users = users.filter_by(online=(request.args.get('online') != '0')) 186 | if request.args.get('updated_since'): 187 | users = users.filter( 188 | User.updated_at >= int(request.args.get('updated_since'))) 189 | return jsonify({'users': [user.to_dict() for user in users.all()]}) 190 | 191 | 192 | @app.route('/api/users/', methods=['GET']) 193 | @token_optional_auth.login_required 194 | def get_user(id): 195 | """ 196 | Return a user. 197 | This endpoint is publicly available, but if the client has a token it 198 | should send it, as that indicates to the server that the user is online. 199 | """ 200 | return jsonify(User.query.get_or_404(id).to_dict()) 201 | 202 | 203 | @app.route('/api/users/', methods=['PUT']) 204 | @token_auth.login_required 205 | def edit_user(id): 206 | """ 207 | Modify an existing user. 208 | This endpoint requires a valid user token. 209 | Note: users are only allowed to modify themselves. 210 | """ 211 | user = User.query.get_or_404(id) 212 | if user.id != g.jwt_claims['user_id']: 213 | abort(403) 214 | user.from_dict(request.get_json() or {}) 215 | db.session.add(user) 216 | db.session.commit() 217 | return '', 204 218 | 219 | 220 | @app.route('/api/users/me', methods=['GET']) 221 | @basic_auth.login_required 222 | def get_me_user(): 223 | """ 224 | Return the authenticated user. 225 | This endpoint requires basic auth with nickname and password. 226 | """ 227 | return jsonify(g.current_user.to_dict()) 228 | 229 | 230 | @app.route('/api/users/me', methods=['PUT']) 231 | @token_auth.login_required 232 | def set_user_online(): 233 | """Set the user that owns the token online.""" 234 | user = User.query.get(g.jwt_claims['user_id']) 235 | if user is not None: 236 | user.ping() 237 | db.session.commit() 238 | return '', 204 239 | 240 | 241 | @app.route('/api/users/me', methods=['DELETE']) 242 | @token_auth.login_required 243 | def set_user_offline(): 244 | """Set the user that owns the token offline.""" 245 | user = User.query.get(g.jwt_claims['user_id']) 246 | if user is not None: 247 | user.online = False 248 | db.session.commit() 249 | return '', 204 250 | 251 | 252 | if __name__ == '__main__': 253 | app.run() 254 | --------------------------------------------------------------------------------