├── gengine
├── app
│ ├── __init__.py
│ ├── tests
│ │ ├── __init__.py
│ │ ├── db.py
│ │ ├── runner.py
│ │ └── base.py
│ ├── alembic
│ │ ├── versions
│ │ │ ├── DUMMY
│ │ │ ├── 3512efb5496d_additional_public_data.py
│ │ │ ├── 62026366cd60_evaluation_timezone.py
│ │ │ ├── 8e65de1ed535_force_password_change.py
│ │ │ ├── e41a1c7304bb_grouptype_relevance2.py
│ │ │ ├── 2012674516fc_has_been_pushed.py
│ │ │ ├── 87dfedb58883_goal_triggers_achievement_date.py
│ │ │ ├── d4a70083f72e_add_user_language.py
│ │ │ ├── e8799ed00992_grouptype_relevance.py
│ │ │ ├── 65c7a32b7322_achievement_date_unique.py
│ │ │ ├── a90076b18837_tasks_and_shift.py
│ │ │ └── 46e4971dc46f_extended_groups.py
│ │ ├── README
│ │ ├── script.py.mako
│ │ └── env.py
│ ├── tests_old
│ │ ├── __init__.py
│ │ ├── test_variable.py
│ │ ├── db.py
│ │ ├── test_value.py
│ │ ├── test_device.py
│ │ ├── runner.py
│ │ ├── base.py
│ │ ├── test_auth.py
│ │ ├── test_groups.py
│ │ └── test_eval_types_and_rewards.py
│ ├── jsscripts
│ │ ├── .gitignore
│ │ ├── src
│ │ │ ├── storeState
│ │ │ │ ├── ui
│ │ │ │ │ ├── persist.js
│ │ │ │ │ ├── constants.js
│ │ │ │ │ ├── actions.js
│ │ │ │ │ ├── selectors.js
│ │ │ │ │ ├── index.js
│ │ │ │ │ └── reducer.js
│ │ │ │ ├── apiConfig.js
│ │ │ │ └── index.js
│ │ │ ├── config.js
│ │ │ ├── locales
│ │ │ │ ├── de.js
│ │ │ │ ├── en.js
│ │ │ │ └── index.js
│ │ │ ├── index.css
│ │ │ ├── App.css
│ │ │ ├── App.scss
│ │ │ ├── service
│ │ │ │ ├── ApiService.js
│ │ │ │ ├── SecurityService.js
│ │ │ │ ├── HistoryService.js
│ │ │ │ ├── InputValidators.js
│ │ │ │ ├── LocaleService.js
│ │ │ │ ├── LayoutService.js
│ │ │ │ ├── FetchService.js
│ │ │ │ └── URLService.js
│ │ │ ├── App.test.js
│ │ │ ├── lib
│ │ │ │ ├── swagger
│ │ │ │ │ ├── index.js
│ │ │ │ │ └── provider.js
│ │ │ │ └── persistence
│ │ │ │ │ └── index.js
│ │ │ ├── initLocales.js
│ │ │ ├── components
│ │ │ │ └── views
│ │ │ │ │ ├── leaderboard-creation
│ │ │ │ │ ├── leaderboardCreation.scss
│ │ │ │ │ └── leaderboardCreation.css
│ │ │ │ │ └── group-assignment
│ │ │ │ │ ├── groupAssignment.scss
│ │ │ │ │ └── groupAssignment.css
│ │ │ ├── style
│ │ │ │ └── index.js
│ │ │ ├── App.js
│ │ │ ├── index.js
│ │ │ └── logo.svg
│ │ ├── build
│ │ │ ├── favicon.ico
│ │ │ ├── asset-manifest.json
│ │ │ ├── manifest.json
│ │ │ ├── index.html
│ │ │ └── service-worker.js
│ │ ├── public
│ │ │ ├── favicon.ico
│ │ │ ├── manifest.json
│ │ │ └── index.html
│ │ ├── __init__.py
│ │ └── package.json
│ ├── templates
│ │ ├── leaderboard_wizard.html
│ │ ├── error.html
│ │ ├── jscomponent.html
│ │ ├── admin_maintenance.html
│ │ ├── admin_layout.html
│ │ └── admin
│ │ │ └── layout.html
│ ├── static
│ │ ├── admin_layout.css
│ │ ├── admin.js
│ │ └── api.js
│ ├── tasks.py
│ ├── route.py
│ ├── registries.py
│ ├── cache.py
│ ├── permissions.py
│ ├── tasksystem.py
│ ├── api
│ │ └── schemas.py
│ └── leaderboard.py
├── base
│ ├── __init__.py
│ ├── settings.py
│ ├── monkeypatch_flaskadmin.py
│ ├── context.py
│ ├── errors.py
│ ├── cache.py
│ ├── util.py
│ └── model.py
├── maintenance
│ ├── __init__.py
│ └── scripts
│ │ ├── __init__.py
│ │ ├── generate_erd.py
│ │ ├── push_messages.py
│ │ ├── generate_revision.py
│ │ ├── scheduler_worker.py
│ │ └── scheduler_beat.py
├── wsgiutil.py
├── metadata.py
└── __init__.py
├── .coveragerc
├── erd.png
├── .dockerignore
├── README.txt
├── docs
├── internals
│ └── index.rst
├── upgrading
│ └── index.rst
├── roadmap.rst
├── index.rst
├── installing
│ └── index.rst
└── rest
│ └── index.rst
├── docker-files
├── uwsgi.ini
├── production.uwsgi
├── development.uwsgi
├── nginx-dev.conf
├── nginx-prod.conf
└── production.ini
├── optional-requirements.txt
├── MANIFEST.in
├── paster-script.py
├── Dockerfile
├── CHANGES.txt
├── .travis.yml
├── .gitignore
├── requirements.txt
├── LICENSE
├── init.sh
├── docker-compose.yml
├── docker-compose.production.yml
├── setup.py
├── development.ini
├── README.md
└── wait-for-it.sh
/gengine/app/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/gengine/base/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/gengine/app/tests/__init__.py:
--------------------------------------------------------------------------------
1 | pass
--------------------------------------------------------------------------------
/gengine/maintenance/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/DUMMY:
--------------------------------------------------------------------------------
1 | ...
--------------------------------------------------------------------------------
/gengine/app/tests_old/__init__.py:
--------------------------------------------------------------------------------
1 | pass
--------------------------------------------------------------------------------
/gengine/app/tests_old/test_variable.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | source = gengine
3 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules/
2 |
--------------------------------------------------------------------------------
/gengine/maintenance/scripts/__init__.py:
--------------------------------------------------------------------------------
1 | # package
2 |
--------------------------------------------------------------------------------
/gengine/app/alembic/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/erd.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ActiDoo/gamification-engine/HEAD/erd.png
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | Dockerfile
2 | docker-compose*.yml
3 | gengine/app/jsscripts/node_modules
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/storeState/ui/persist.js:
--------------------------------------------------------------------------------
1 | export default [
2 | ["locale"],
3 | ];
4 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/config.js:
--------------------------------------------------------------------------------
1 | export default {
2 | apiUrl: window.ADMIN_API_BASE_URL
3 | };
4 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/locales/de.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | welcome: 'Hallo in Deutsch',
3 | };
4 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/locales/en.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | welcome: 'Hello in English',
3 | };
4 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/index.css:
--------------------------------------------------------------------------------
1 | body {
2 | margin: 0;
3 | padding: 0;
4 | font-family: sans-serif;
5 | }
6 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/App.css:
--------------------------------------------------------------------------------
1 | @import url(https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css);
2 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/App.scss:
--------------------------------------------------------------------------------
1 | @import url(https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css);
2 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/build/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ActiDoo/gamification-engine/HEAD/gengine/app/jsscripts/build/favicon.ico
--------------------------------------------------------------------------------
/gengine/app/jsscripts/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ActiDoo/gamification-engine/HEAD/gengine/app/jsscripts/public/favicon.ico
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/storeState/ui/constants.js:
--------------------------------------------------------------------------------
1 | export const PREFIX = "UI_";
2 |
3 | export const SET_LOCALE = PREFIX + "SET_LOCALE";
4 |
--------------------------------------------------------------------------------
/README.txt:
--------------------------------------------------------------------------------
1 | gengine README
2 | ==================
3 |
4 | Getting Started
5 | ---------------
6 |
7 | - see https://github.com/ActiDoo/gamification-engine
8 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/locales/index.js:
--------------------------------------------------------------------------------
1 | import en from './en';
2 | import de from './de';
3 |
4 | export default {
5 | en: en,
6 | de: de,
7 | };
8 |
--------------------------------------------------------------------------------
/docs/internals/index.rst:
--------------------------------------------------------------------------------
1 | :title: internals
2 | :description: module documentation
3 |
4 | Modules
5 | -------
6 |
7 | .. image:: /_static/2017-03-28-erm.svg
8 | :width: 1000
9 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/service/ApiService.js:
--------------------------------------------------------------------------------
1 | import config from '../config';
2 |
3 | export function getApiUrl(url) {
4 | return `${config.apiUrl}/${url ? url : ''}`;
5 | };
6 |
--------------------------------------------------------------------------------
/gengine/base/settings.py:
--------------------------------------------------------------------------------
1 | _settings = None
2 |
3 | def set_settings(settings):
4 | global _settings
5 | _settings = settings
6 |
7 | def get_settings():
8 | global _settings
9 | return _settings
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/service/SecurityService.js:
--------------------------------------------------------------------------------
1 | import uuidV4 from 'uuid/v4';
2 |
3 | module.exports = {
4 |
5 | getRandomID: function () {
6 | return uuidV4();
7 | },
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/service/HistoryService.js:
--------------------------------------------------------------------------------
1 | import { browserHistory, hashHistory } from 'react-router';
2 |
3 | export function getHistory() {
4 | let history = browserHistory;
5 | return history;
6 | }
7 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/storeState/ui/actions.js:
--------------------------------------------------------------------------------
1 | import * as types from './constants';
2 |
3 | export function setLocale(locale, initial) {
4 | return { type: types.SET_LOCALE, payload: { locale: locale } };
5 | }
6 |
--------------------------------------------------------------------------------
/docker-files/uwsgi.ini:
--------------------------------------------------------------------------------
1 | [uwsgi]
2 | socket = 127.0.0.1:8585
3 | master = true
4 | processes = 1
5 | module = activements
6 | home = /home/cello/workspace/activements
7 | paste = config:/home/cello/git/gamification/development.ini
8 | plugins = python
9 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/storeState/ui/selectors.js:
--------------------------------------------------------------------------------
1 | import { createSelector } from 'reselect'
2 |
3 | export const getState = (state) => state.ui;
4 |
5 | export const getLocale = createSelector(
6 | getState,
7 | (ui) => ui.locale
8 | );
9 |
--------------------------------------------------------------------------------
/gengine/base/monkeypatch_flaskadmin.py:
--------------------------------------------------------------------------------
1 | def do_monkeypatch():
2 | def get_url(self):
3 | return self._view.get_url('%s.%s' % (self._view.endpoint, self._view._default_view))
4 |
5 | import flask_admin.menu
6 | flask_admin.menu.MenuView.get_url = get_url
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/App.test.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom';
3 | import App from './App';
4 |
5 | it('renders without crashing', () => {
6 | const div = document.createElement('div');
7 | ReactDOM.render(, div);
8 | });
9 |
--------------------------------------------------------------------------------
/optional-requirements.txt:
--------------------------------------------------------------------------------
1 | argon2==0.1.10
2 | names==0.3.0
3 | pbr==2.0.0
4 | pg8000==1.10.6
5 | python-gcm==0.4
6 | redis==2.10.5
7 | requests==2.22.0
8 | tapns3==3.0.0
9 | testing.common.database==2.0.0
10 | testing.postgresql==1.3.0
11 | testing.redis==1.1.1
12 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/lib/swagger/index.js:
--------------------------------------------------------------------------------
1 | import {buildSwaggerApi, generateID} from './builder';
2 | import {DynamicProvider, connectDynamic} from './provider';
3 |
4 | export {
5 | buildSwaggerApi,
6 | DynamicProvider,
7 | connectDynamic,
8 | generateID
9 | }
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/initLocales.js:
--------------------------------------------------------------------------------
1 | import {IntlProvider, defineMessages, addLocaleData} from 'react-intl';
2 |
3 | import en from 'react-intl/locale-data/en';
4 | import de from 'react-intl/locale-data/de';
5 |
6 | addLocaleData([
7 | ...en,
8 | ...de,
9 | ]);
10 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include *.txt *.ini *.cfg *.rst *.ico *.png *.css *.gif *.jpg *.pt *.txt *.mak *.mako *.js *.html *.xml *.py README DUMMY LICENSE
2 | recursive-include gengine *.ico *.png *.css *.gif *.jpg *.pt *.txt *.mak *.mako *.js *.html *.xml README DUMMY app/alembic app/alembic/versions/*.py app/alembic/env.py
3 |
--------------------------------------------------------------------------------
/docker-files/production.uwsgi:
--------------------------------------------------------------------------------
1 | [uwsgi]
2 | uid=gengine
3 | gid=gengine
4 | listen = 30
5 | uwsgi-socket = /run/uwsgi/uwsgi.socket
6 | chmod-socket = 666
7 | master = true
8 | processes = 4
9 | die-on-term = true
10 | paste = config:/usr/src/app/production.ini
11 | memory-report = true
12 | hook-accepting1 = exec:touch /tmp/app-initialized
13 |
--------------------------------------------------------------------------------
/docker-files/development.uwsgi:
--------------------------------------------------------------------------------
1 | [uwsgi]
2 | uid=gengine
3 | gid=gengine
4 | listen = 30
5 | uwsgi-socket = /run/uwsgi/uwsgi.socket
6 | chmod-socket = 666
7 | master = true
8 | processes = 4
9 | die-on-term = true
10 | paste = config:/usr/src/app/production.ini
11 | memory-report = true
12 | hook-accepting1 = exec:touch /tmp/app-initialized
13 |
--------------------------------------------------------------------------------
/gengine/base/context.py:
--------------------------------------------------------------------------------
1 | import threading
2 |
3 | from gengine.base.util import DictObjectProxy
4 |
5 | _local = threading.local()
6 |
7 | def get_context():
8 | if not hasattr(_local, "context"):
9 | _local.context = DictObjectProxy()
10 | return _local.context
11 |
12 | def reset_context():
13 | _local.context = DictObjectProxy()
--------------------------------------------------------------------------------
/gengine/app/jsscripts/build/asset-manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "main.css": "static/css/main.a5a0edab.css",
3 | "main.css.map": "static/css/main.a5a0edab.css.map",
4 | "main.js": "static/js/main.4589375e.js",
5 | "main.js.map": "static/js/main.4589375e.js.map",
6 | "static/js/0.f7d7d8b6.chunk.js": "static/js/0.f7d7d8b6.chunk.js",
7 | "static/js/0.f7d7d8b6.chunk.js.map": "static/js/0.f7d7d8b6.chunk.js.map"
8 | }
--------------------------------------------------------------------------------
/gengine/app/jsscripts/build/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "short_name": "React App",
3 | "name": "Create React App Sample",
4 | "icons": [
5 | {
6 | "src": "favicon.ico",
7 | "sizes": "192x192",
8 | "type": "image/png"
9 | }
10 | ],
11 | "start_url": "./index.html",
12 | "display": "standalone",
13 | "theme_color": "#000000",
14 | "background_color": "#ffffff"
15 | }
16 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/public/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "short_name": "React App",
3 | "name": "Create React App Sample",
4 | "icons": [
5 | {
6 | "src": "favicon.ico",
7 | "sizes": "192x192",
8 | "type": "image/png"
9 | }
10 | ],
11 | "start_url": "./index.html",
12 | "display": "standalone",
13 | "theme_color": "#000000",
14 | "background_color": "#ffffff"
15 | }
16 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/storeState/ui/index.js:
--------------------------------------------------------------------------------
1 | import reducer from './reducer';
2 | import * as actions from './actions';
3 | import * as constants from './constants';
4 | import * as selectors from './selectors';
5 | import persist from './persist';
6 |
7 | export default {
8 | reducer: reducer,
9 | actions: actions,
10 | constants: constants,
11 | selectors: selectors,
12 | persist: persist
13 | };
14 |
--------------------------------------------------------------------------------
/gengine/app/templates/leaderboard_wizard.html:
--------------------------------------------------------------------------------
1 | {% extends 'admin/master.html' %}
2 | {% import 'admin/lib.html' as lib with context %}
3 |
4 | {% macro extra() %}
5 |
6 | {% endmacro %}
7 |
8 | {% block head %}
9 | {{ super() }}
10 | {% endblock %}
11 |
12 | {% block body %}
13 |
14 |
15 |
16 | {% endblock %}
17 |
18 | {% block tail %}
19 | {{ super() }}
20 | {% endblock %}
--------------------------------------------------------------------------------
/paster-script.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | import sys
4 |
5 | try:
6 | here = __file__
7 | except NameError:
8 | # Python 2.2
9 | here = sys.argv[0]
10 |
11 | relative_paste = os.path.join(
12 | os.path.dirname(os.path.dirname(os.path.abspath(here))), 'paste')
13 |
14 | if os.path.exists(relative_paste):
15 | sys.path.insert(0, os.path.dirname(relative_paste))
16 |
17 | from paste.script import command
18 | command.run()
19 |
--------------------------------------------------------------------------------
/gengine/app/static/admin_layout.css:
--------------------------------------------------------------------------------
1 | input, textarea, .uneditable-input {
2 | width: 412px;
3 | }
4 |
5 | /* json syntax highlighting */
6 | pre {outline: 1px solid #ccc; padding: 5px; margin: 5px; }
7 | .string { color: green; }
8 | .number { color: darkorange; }
9 | .boolean { color: blue; }
10 | .null { color: magenta; }
11 | .key { color: red; }
12 |
13 | .auth_active {
14 | color:green;
15 | font-weight:bold;
16 | }
17 |
18 | .auth_inactive {
19 | color:red;
20 | font-weight:bold;
21 | }
--------------------------------------------------------------------------------
/gengine/app/templates/error.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Error
6 |
7 |
8 |
9 |
10 |
11 |
12 | ${message}
13 |
14 |
15 | ${description}
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/gengine/wsgiutil.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | force_https = False
4 |
5 | def init_reverse_proxy(settings_force_https,settings_prefix):
6 | global force_https,prefix
7 | force_https = settings_force_https
8 | prefix = settings_prefix
9 |
10 | class HTTPSProxied(object):
11 | def __init__(self, app):
12 | self.app = app
13 |
14 | def __call__(self, environ, start_response):
15 | if force_https:
16 | environ['wsgi.url_scheme'] = "https"
17 | return self.app(environ, start_response)
18 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/build/index.html:
--------------------------------------------------------------------------------
1 | React App
--------------------------------------------------------------------------------
/gengine/app/templates/jscomponent.html:
--------------------------------------------------------------------------------
1 | {% extends 'admin/master.html' %}
2 | {% import 'admin/lib.html' as lib with context %}
3 |
4 | {% macro extra() %}
5 |
6 | {% endmacro %}
7 |
8 | {% block head %}
9 | {{ super() }}
10 | {% endblock %}
11 |
12 | {% block body %}
13 |
14 |
18 | {% endblock %}
19 |
20 | {% block tail %}
21 | {{ super() }}
22 | {% endblock %}
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/service/InputValidators.js:
--------------------------------------------------------------------------------
1 | import validator from 'validator';
2 |
3 | export function isEmpty({value}){
4 | if(!value || value==""){
5 | return 'empty';
6 | }
7 | return false;
8 | }
9 |
10 | export function isInvalidEmail({value}){
11 | let isInvalidEmail = !validator.isEmail(value);
12 | if(isInvalidEmail){
13 | return 'invalidEmail';
14 | }
15 | return false;
16 | }
17 |
18 | export function isInvalidPassword({value}) {
19 |
20 | if(value.toString().length < 8) {
21 | return 'invalidPassword';
22 | }
23 | return false;
24 | }
--------------------------------------------------------------------------------
/gengine/app/alembic/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = ${repr(up_revision)}
11 | down_revision = ${repr(down_revision)}
12 | branch_labels = ${repr(branch_labels)}
13 | depends_on = ${repr(depends_on)}
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 | ${imports if imports else ""}
18 |
19 | def upgrade():
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade():
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.6-alpine3.10
2 | WORKDIR /usr/src/app
3 | RUN apk add --no-cache postgresql-dev build-base musl-dev libffi libffi-dev bash
4 | RUN addgroup -S gengine
5 | RUN adduser -S -D -h /usr/src/app gengine gengine
6 | RUN mkdir /run/uwsgi
7 | RUN chown -R gengine:gengine /run/uwsgi
8 | COPY requirements.txt ./
9 | COPY optional-requirements.txt ./
10 | COPY docker-files/* ./
11 | RUN cat optional-requirements.txt >> requirements.txt && pip install -r requirements.txt
12 | COPY . .
13 | RUN pip install -e . && touch /tmp/nginx.socket
14 | RUN chown -R gengine:gengine /usr/src/app
15 | CMD [ "/bin/sh", "/usr/src/app/init.sh" ]
16 |
--------------------------------------------------------------------------------
/CHANGES.txt:
--------------------------------------------------------------------------------
1 | 0.4.0
2 | * introduce docker support
3 | * fix tests
4 | * fixed some bugs
5 | * updated dependencies
6 |
7 | 0.3.0
8 | * task scheduler
9 | * extended group features
10 | * several further minor improvements and fixes
11 |
12 | 0.2.2
13 | * Fix several test cases
14 |
15 | 0.2.1
16 | * Fix of PyPI packaging
17 |
18 | 0.2.0
19 | * Implement new rule syntax
20 | * Add time-aware / recurring achievements
21 | * Add optional authentication & authorization
22 | * Introduce goal triggers
23 | * Introduce mobile pushes
24 | * Introduce messages
25 | * Lots of bugfixes
26 | * Remove Python 2.x support
27 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/storeState/ui/reducer.js:
--------------------------------------------------------------------------------
1 | import * as types from "./constants";
2 | import {getInitialLocale} from '../../service/LocaleService';
3 |
4 | let initState = {
5 | locale: getInitialLocale(),
6 | };
7 |
8 | export default function reducer(state = initState, action = "") {
9 |
10 | if(action.type.startsWith(types.PREFIX)){
11 |
12 | switch (action.type) {
13 | case types.SET_LOCALE:
14 | return {
15 | ...state,
16 | locale: action.payload.locale,
17 | }
18 | break;
19 | default:
20 | return state
21 | break;
22 | }
23 | }
24 |
25 | return state;
26 | }
27 |
--------------------------------------------------------------------------------
/gengine/app/templates/admin_maintenance.html:
--------------------------------------------------------------------------------
1 | {% extends 'admin/master.html' %}
2 | {% import 'admin/lib.html' as lib with context %}
3 |
4 | {% macro extra() %}
5 |
6 | {% endmacro %}
7 |
8 | {% block head %}
9 | {{ super() }}
10 | {{ lib.form_css() }}
11 | {% endblock %}
12 |
13 | {% block body %}
14 |
15 | {% for msg in msgs %}
16 | - {{ msg }}
17 | {% endfor %}
18 |
19 |
20 | {{ lib.render_form(clear_caches_form) }}
21 | {% endblock %}
22 |
23 | {% block tail %}
24 | {{ super() }}
25 | {{ lib.form_js() }}
26 | {% endblock %}
27 |
--------------------------------------------------------------------------------
/gengine/app/tests_old/db.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import logging
4 | log = logging.getLogger(__name__)
5 |
6 | try:
7 | import testing.postgresql
8 | except ImportError as e:
9 | log.info("testing.postgresql not installed")
10 |
11 | db = None
12 |
13 | def setupDB():
14 | # Generate Postgresql class which shares the generated database
15 | global db
16 | db = testing.postgresql.PostgresqlFactory(
17 | postgres=os.environ.get("TEST_POSTGRES",None),
18 | initdb=os.environ.get("TEST_INITDB",None),
19 | cache_initialized_db=False,
20 | auto_start=0
21 | )
22 |
23 | def unsetupDB():
24 | # clear cached database at end of tests
25 | global db
26 | db.clear_cache()
27 |
--------------------------------------------------------------------------------
/gengine/app/tests/db.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import logging
4 | log = logging.getLogger(__name__)
5 |
6 | try:
7 | import testing.postgresql
8 | except ImportError as e:
9 | log.info("testing.postgresql not installed")
10 |
11 | db = None
12 |
13 | def setupDB():
14 | # Generate Postgresql class which shares the generated database
15 | global db
16 | db = testing.postgresql.PostgresqlFactory(
17 | postgres=os.environ.get("TEST_POSTGRES",None),
18 | initdb=os.environ.get("TEST_INITDB",None),
19 | cache_initialized_db=True,
20 | base_dir="/tmp/test_pgdata"
21 | )
22 |
23 | def unsetupDB():
24 | # clear cached database at end of tests
25 | global db
26 | db.clear_cache()
27 |
--------------------------------------------------------------------------------
/docs/upgrading/index.rst:
--------------------------------------------------------------------------------
1 | :title: gamification-engine installation
2 | :description: installing the gamification-engine
3 |
4 | Upgrading
5 | ------------
6 |
7 | From 0.1 to 0.2
8 | ============
9 |
10 | In version 0.2 we have introduced **breaking changes** that make it impossible to do an automatic upgrade. If you are happy with 0.1, there is no need to upgrade. Furthermore, we have switched to Python 3.x as our main target environment.
11 | For performing a manual upgrade the following steps are required:
12 |
13 | - Install a new instance of 0.2
14 | - Recreate all settings / achievements manually using the new goal condition syntax
15 | - Recreate users
16 | - Copy values data
17 |
18 | *For future updates we will try to keep the goal condition syntax backwards compatible.*
19 |
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/3512efb5496d_additional_public_data.py:
--------------------------------------------------------------------------------
1 | """additional_public_data
2 |
3 | Revision ID: 3512efb5496d
4 | Revises:
5 | Create Date: 2016-07-21 13:30:45.257569
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = '3512efb5496d'
11 | down_revision = None
12 | branch_labels = None
13 | depends_on = None
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 |
18 |
19 | def upgrade():
20 | ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('users', sa.Column('additional_public_data', sa.JSON(), nullable=True))
22 | ### end Alembic commands ###
23 |
24 |
25 | def downgrade():
26 | ### commands auto generated by Alembic - please adjust! ###
27 | op.drop_column('users', 'additional_public_data')
28 | ### end Alembic commands ###
29 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/storeState/apiConfig.js:
--------------------------------------------------------------------------------
1 | import {getApiUrl} from '../service/ApiService';
2 |
3 | export default {
4 | id: "api",
5 | url: getApiUrl("_swagger"), //http://127.0.0.1:6543/api/_swagger
6 | apis: {
7 | /*auth: {
8 | register: {
9 | responseReducer: ({state, data, requestParams, is_error}) => {
10 | console.log("state.AUTH_REGISTER",state.AUTH_REGISTER)
11 | let registrationState = state.authRegister || {};
12 | registrationState[requestParams.requestID] = data;
13 | return {
14 | ...state,
15 | authRegister: registrationState
16 | };
17 | }
18 | }
19 | }*/
20 | },
21 | persist: [
22 | ]
23 | }
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/62026366cd60_evaluation_timezone.py:
--------------------------------------------------------------------------------
1 | """evaluation_timezone
2 |
3 | Revision ID: 62026366cd60
4 | Revises: 87dfedb58883
5 | Create Date: 2017-02-17 13:54:40.545893
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = '62026366cd60'
11 | down_revision = '87dfedb58883'
12 | branch_labels = None
13 | depends_on = None
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 |
18 |
19 | def upgrade():
20 | ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('achievements', sa.Column('evaluation_timezone', sa.String(), nullable=True))
22 | ### end Alembic commands ###
23 |
24 |
25 | def downgrade():
26 | ### commands auto generated by Alembic - please adjust! ###
27 | op.drop_column('achievements', 'evaluation_timezone')
28 | ### end Alembic commands ###
29 |
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/8e65de1ed535_force_password_change.py:
--------------------------------------------------------------------------------
1 | """force password change
2 |
3 | Revision ID: 8e65de1ed535
4 | Revises: 2012674516fc
5 | Create Date: 2017-05-04 14:39:30.110799
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = '8e65de1ed535'
11 | down_revision = '2012674516fc'
12 | branch_labels = None
13 | depends_on = None
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 |
18 |
19 | def upgrade():
20 | ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('auth_users', sa.Column('force_password_change', sa.Boolean(), server_default='0', nullable=False))
22 | ### end Alembic commands ###
23 |
24 |
25 | def downgrade():
26 | ### commands auto generated by Alembic - please adjust! ###
27 | op.drop_column('auth_users', 'force_password_change')
28 | ### end Alembic commands ###
29 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/service/LocaleService.js:
--------------------------------------------------------------------------------
1 | import messages from '../locales';
2 |
3 | export const getUserLocale = () => {
4 |
5 | let userLocale = 'en';
6 | var language = navigator.languages && navigator.languages[0] ||
7 | navigator.language ||
8 | navigator.userLanguage;
9 | if(messages[language] != null){
10 | userLocale = language;
11 | }else if(messages[language.split('-')[0]] != null){
12 | userLocale = language.split('-')[0];
13 | }
14 |
15 | return userLocale;
16 | }
17 |
18 | export const getInitialLocale = () => {
19 |
20 | //check window locale
21 | if(window && window.location && window.location.pathname){
22 | const urlParts = window.location.pathname.split('/');
23 | if(urlParts.length > 1){
24 | if(messages[urlParts[1]]){
25 | return urlParts[1];
26 | }
27 | }
28 | }
29 |
30 | return getUserLocale();
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/components/views/leaderboard-creation/leaderboardCreation.scss:
--------------------------------------------------------------------------------
1 | .leaderboard-creation {
2 | min-width:300px;
3 | position: relative;
4 | vertical-align: top;
5 |
6 | > *, > * > * {
7 | vertical-align: top;
8 | }
9 |
10 | .form-row {
11 | margin: 30px 0px;
12 | }
13 |
14 | .form-row-label {
15 | display: inline-block;
16 | width: 200px;
17 | font-weight: bold;
18 | }
19 |
20 | .form-row-field {
21 | display:inline-block;
22 | width: calc(100% - 200px);
23 |
24 | }
25 | .form-row-description {
26 | margin-left: 200px;
27 | font-size: 0.9em;
28 | color: #666666;
29 | }
30 | .im-btn {
31 | display: none !important;
32 | }
33 | .timespan-cb {
34 | margin-bottom: 30px;
35 | width: 100%;
36 | > span {
37 | display: inline-block;
38 | padding: 0px 0px 0px 10px;
39 | top: 3px;
40 | position: relative;
41 | }
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/docs/roadmap.rst:
--------------------------------------------------------------------------------
1 | :title: roadmap
2 | :description: features in future versions
3 |
4 | Roadmap
5 | -------
6 |
7 | Anyone is invited to work on new features, even if they are not listed here.
8 | Features which might influence the overall performance or cause greater changes should be discussed in a feature request before.
9 |
10 | At ActiDoo.com we implement new functions as we need them and push them as soon as they are somewhat stable.
11 |
12 | Todo
13 | ==============
14 | - Review and improve tests
15 | - Improve Caching
16 |
17 | Future Features
18 | ===============
19 | - possibility to store events (values table) in noSQL systems
20 | - implement callback for time-aware achievements
21 | - nicer admin UI
22 | - statistics
23 | - maybe a possiblity to plugin authentication/authorization to allow users to directly push events to the engine
24 | - this still needs to be discussed from an architectural point of view
25 | - this would also introduce the need for security constraints to detect cheaters
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/e41a1c7304bb_grouptype_relevance2.py:
--------------------------------------------------------------------------------
1 | """grouptype_relevance2
2 |
3 | Revision ID: e41a1c7304bb
4 | Revises: e8799ed00992
5 | Create Date: 2017-05-30 12:16:00.979845
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = 'e41a1c7304bb'
11 | down_revision = 'e8799ed00992'
12 | branch_labels = None
13 | depends_on = None
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.execute("COMMIT")
22 | op.execute("ALTER TYPE relevance_types ADD value 'groups' after 'global'")
23 | # ### end Alembic commands ###
24 |
25 |
26 | def downgrade():
27 | # ### commands auto generated by Alembic - please adjust! ###
28 | op.execute("""
29 | DELETE FROM pg_enum
30 | WHERE enumlabel = 'groups'
31 | AND enumtypid = (
32 | SELECT oid FROM pg_type WHERE typname = 'relevance_types'
33 | )
34 | """)
35 | # ### end Alembic commands ###
36 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | dist: bionic
3 | python:
4 | - "3.6"
5 | sudo: required
6 | global:
7 | - TEST_POSTGRES=/usr/lib/postgresql/10/bin/postgres
8 | - TEST_INITDB=/usr/lib/postgresql/10/bin/initdb
9 | # command to install dependencies
10 | install:
11 | - sudo rm -rf /var/lib/postgresql
12 | - sudo DEBIAN_FRONTEND=noninteractive apt-get install -qq postgresql-10 postgresql-contrib-10 postgis
13 | - pip install coveralls
14 | - "pip install --upgrade -r requirements.txt"
15 | - "pip install --upgrade -r optional-requirements.txt"
16 | - pip install -e .
17 |
18 | # command to run tests
19 | script:
20 | - coverage run --source=gengine gengine/app/tests/runner.py
21 |
22 | after_success:
23 | coveralls
24 |
25 | # deploy to pypi
26 | deploy:
27 | provider: pypi
28 | user: $PYPI_USER
29 | password: $PYPI_PASSWORD
30 | on:
31 | tags: true
32 |
33 | notifications:
34 | email:
35 | recipients:
36 | - $ADMIN_EMAIL
37 | on_success: always # default: change
38 | on_failure: always # default: always
39 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/components/views/leaderboard-creation/leaderboardCreation.css:
--------------------------------------------------------------------------------
1 | .leaderboard-creation {
2 | min-width: 300px;
3 | position: relative;
4 | vertical-align: top; }
5 | .leaderboard-creation > *, .leaderboard-creation > * > * {
6 | vertical-align: top; }
7 | .leaderboard-creation .form-row {
8 | margin: 30px 0px; }
9 | .leaderboard-creation .form-row-label {
10 | display: inline-block;
11 | width: 200px;
12 | font-weight: bold; }
13 | .leaderboard-creation .form-row-field {
14 | display: inline-block;
15 | width: calc(100% - 200px); }
16 | .leaderboard-creation .form-row-description {
17 | margin-left: 200px;
18 | font-size: 0.9em;
19 | color: #666666; }
20 | .leaderboard-creation .im-btn {
21 | display: none !important; }
22 | .leaderboard-creation .timespan-cb {
23 | margin-bottom: 30px;
24 | width: 100%; }
25 | .leaderboard-creation .timespan-cb > span {
26 | display: inline-block;
27 | padding: 0px 0px 0px 10px;
28 | top: 3px;
29 | position: relative; }
30 |
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/2012674516fc_has_been_pushed.py:
--------------------------------------------------------------------------------
1 | """has_been_pushed
2 |
3 | Revision ID: 2012674516fc
4 | Revises: 62026366cd60
5 | Create Date: 2017-03-08 17:44:02.214248
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = '2012674516fc'
11 | down_revision = '62026366cd60'
12 | branch_labels = None
13 | depends_on = None
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 |
18 |
19 | def upgrade():
20 | ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('user_messages', sa.Column('has_been_pushed', sa.Boolean(), nullable=False, server_default='1'))
22 | op.create_index(op.f('ix_user_messages_has_been_pushed'), 'user_messages', ['has_been_pushed'], unique=False)
23 | ### end Alembic commands ###
24 |
25 |
26 | def downgrade():
27 | ### commands auto generated by Alembic - please adjust! ###
28 | op.drop_index(op.f('ix_user_messages_has_been_pushed'), table_name='user_messages')
29 | op.drop_column('user_messages', 'has_been_pushed')
30 | ### end Alembic commands ###
31 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/style/index.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | let _export = {};
4 |
5 | _export.colors = {
6 | "primary1" : "#f1f1f1",
7 | "primary2": "#989b9d",
8 | "secondary1": "#FFFFFF",
9 | "secondary2": "#FFFFFF",
10 | "white" : "#FFFFFF",
11 | "black" : "#000000",
12 | }
13 | // Let's make all colors available in the root (i.e. $primary1)
14 | for( let key in _export.colors) {
15 | _export[key] = _export.colors[key];
16 | }
17 |
18 | _export.layout = {
19 | "mobile-max" : "(max-width : 980px)",
20 | "mobile-min" : "(min-width : 981px)"
21 | }
22 | // Let's make all layout settings available in the root (i.e. $mobile-max)
23 | for( let key in _export.layout) {
24 | _export[key] = _export.layout[key];
25 | }
26 |
27 | _export.fontsizes = {
28 | "fontsize-small" : "10px",
29 | "fontsize-normal" : "12px",
30 | "fontsize-big" : "14px",
31 | }
32 | // Let's make all fontsizes available in the root (i.e. $primary1)
33 | for( let key in _export.fontsizes) {
34 | _export[key] = _export.fontsizes[key];
35 | }
36 |
37 |
38 | module.exports = _export;
39 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. gamification-engine documentation master file, created by
2 | sphinx-quickstart on Mon Mar 2 13:23:03 2015.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | :title: gamification-engine is an open source software for integrating any kinds of gamification features into your product
7 | :description: Welcome to gamification-engine, a flexible open source gamification solution
8 |
9 |
10 |
11 |
12 | Welcome to gamification-engine's documentation!
13 | ===============================================
14 |
15 | gamification-engine is a flexible open source gamification solution that allows you to easily integrate gamification features into your own products.
16 |
17 | Contents:
18 |
19 | .. toctree::
20 | :maxdepth: 2
21 |
22 | installing/index
23 | upgrading/index
24 | concepts/index
25 | rest/index
26 | internals/index
27 | roadmap
28 |
29 |
30 | Indices and tables
31 | ==================
32 |
33 | * :ref:`genindex`
34 | * :ref:`modindex`
35 | * :ref:`search`
36 |
37 |
38 |
--------------------------------------------------------------------------------
/gengine/app/templates/admin_layout.html:
--------------------------------------------------------------------------------
1 | {% import 'admin/layout.html' as layout with context -%}
2 | {% extends 'admin/base.html' %}
3 |
4 | {% block head_tail %}
5 | {{ super() }}
6 |
7 |
11 |
12 |
13 | {% endblock %}
14 |
15 | {% block tail %}
16 | {{ super() }}
17 |
18 |
19 |
20 |
21 | {% endblock %}
22 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 |
5 | # C extensions
6 | *.so
7 |
8 | # Distribution / packaging
9 | .Python
10 | env/
11 | develop-eggs/
12 | dist/
13 | downloads/
14 | eggs/
15 | parts/
16 | sdist/
17 | var/
18 | *.egg-info/
19 | .installed.cfg
20 | *.egg
21 |
22 | # PyInstaller
23 | # Usually these files are written by a python script from a template
24 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
25 | *.manifest
26 | *.spec
27 |
28 | # Installer logs
29 | pip-log.txt
30 | pip-delete-this-directory.txt
31 |
32 | # Unit test / coverage reports
33 | htmlcov/
34 | .tox/
35 | .coverage
36 | .cache
37 | nosetests.xml
38 | coverage.xml
39 |
40 | # Translations
41 | *.mo
42 | *.pot
43 |
44 | # Django stuff:
45 | *.log
46 |
47 | # Sphinx documentation
48 | docs/_build/
49 |
50 | # PyBuilder
51 | target/
52 |
53 | # PyDev
54 | .project
55 | .metadata
56 | .settings/
57 | *.pydevproject
58 | /*.dbm
59 |
60 | venv/
61 | # Elastic Beanstalk Files
62 | .elasticbeanstalk/*
63 | !.elasticbeanstalk/*.cfg.yml
64 | !.elasticbeanstalk/*.global.yml
65 |
66 | .idea
67 |
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/87dfedb58883_goal_triggers_achievement_date.py:
--------------------------------------------------------------------------------
1 | """goal_triggers_achievement_date
2 |
3 | Revision ID: 87dfedb58883
4 | Revises: 65c7a32b7322
5 | Create Date: 2017-02-08 15:59:53.780748
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = '87dfedb58883'
11 | down_revision = '65c7a32b7322'
12 | branch_labels = None
13 | depends_on = None
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 |
18 |
19 | def upgrade():
20 | ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('goal_trigger_executions', sa.Column('achievement_date', sa.DateTime(), nullable=True))
22 | op.create_index(op.f('ix_goal_trigger_executions_achievement_date'), 'goal_trigger_executions', ['achievement_date'], unique=False)
23 | ### end Alembic commands ###
24 |
25 |
26 | def downgrade():
27 | ### commands auto generated by Alembic - please adjust! ###
28 | op.drop_index(op.f('ix_goal_trigger_executions_achievement_date'), table_name='goal_trigger_executions')
29 | op.drop_column('goal_trigger_executions', 'achievement_date')
30 | ### end Alembic commands ###
31 |
--------------------------------------------------------------------------------
/gengine/maintenance/scripts/generate_erd.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import os
3 | import sys
4 | import transaction
5 |
6 | from sqlalchemy import engine_from_config
7 |
8 | from pyramid.paster import (
9 | get_appsettings,
10 | setup_logging,
11 | )
12 |
13 | from pyramid.scripts.common import parse_vars
14 | import pyramid_dogpile_cache
15 | from pyramid.config import Configurator
16 |
17 | def usage(argv):
18 | cmd = os.path.basename(argv[0])
19 | print('usage: %s \n'
20 | '(example: "%s development.ini erd.png")' % (cmd, cmd))
21 | sys.exit(1)
22 |
23 |
24 | def main(argv=sys.argv):
25 | if len(argv) < 3:
26 | usage(argv)
27 | config_uri = argv[1]
28 | output_file = argv[2]
29 | options = parse_vars(argv[3:])
30 | setup_logging(config_uri)
31 | settings = get_appsettings(config_uri, options=options)
32 |
33 | durl = os.environ.get("DATABASE_URL") #heroku
34 | if durl:
35 | settings['sqlalchemy.url']=durl
36 |
37 | from eralchemy import render_er
38 | ## Draw from SQLAlchemy base
39 | render_er(settings.get("sqlalchemy.url"), output_file)
40 |
--------------------------------------------------------------------------------
/gengine/base/errors.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from pyramid.view import view_config
3 | from pyramid.response import Response
4 | import json
5 | from pyramid.renderers import render_to_response
6 |
7 | class APIError(Exception):
8 | def __init__(self, code, status, message):
9 | self.code = code
10 | self.status = status
11 | self.message = message
12 |
13 | class HTMLError(Exception):
14 | def __init__(self, code, message, description):
15 | self.code = code
16 | self.message = message
17 | self.description = description
18 |
19 | @view_config(context=APIError)
20 | def json_exception_view(exc, request):
21 | s = json.dumps({
22 | "status": exc.status,
23 | "message": exc.message,
24 | })
25 | response = Response(s)
26 | response.content_type = "application/json"
27 | response.status_int = exc.code
28 | return response
29 |
30 |
31 | @view_config(context=HTMLError)
32 | def html_exception_view(exc, request):
33 | response = render_to_response("../templates/error.html", {
34 | "description": exc.description,
35 | "message": exc.message,
36 | }, request)
37 | response.status_int = exc.code
38 | return response
39 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | alembic==0.9.1
2 | appdirs==1.4.3
3 | certifi==2019.6.16
4 | Chameleon==3.1
5 | chardet==3.0.4
6 | Click==7.0
7 | contextlib2==0.5.5
8 | croniter==0.3.16
9 | dogpile.cache==0.6.2
10 | Flask==1.1.1
11 | Flask-Admin==1.5.3
12 | hupper==0.4.4
13 | idna==2.8
14 | itsdangerous==1.1.0
15 | Jinja2==2.10.1
16 | jsl==0.2.4
17 | jsonschema==2.6.0
18 | Mako==1.0.6
19 | MarkupSafe==1.1.1
20 | mock==2.0.0
21 | packaging==16.8
22 | PasteDeploy==1.5.2
23 | plaster==1.0
24 | plaster-pastedeploy==0.6
25 | psycopg2==2.8.3
26 | Pygments==2.2.0
27 | pymemcache==1.4.2
28 | pyparsing==2.2.0
29 | pyramid==1.9.1
30 | pyramid-chameleon==0.3
31 | pyramid-debugtoolbar==3.0.5
32 | pyramid-dogpile-cache==0.0.4
33 | pyramid-jinja2==2.6.2
34 | pyramid-mako==1.0.2
35 | pyramid-swagger-spec==0.0.7
36 | pyramid-tm==1.1.1
37 | python-crontab==2.2.2
38 | python-dateutil==2.5.3
39 | python-editor==1.0.3
40 | pytz==2016.10
41 | raven==6.0.0
42 | repoze.lru==0.6
43 | six==1.10.0
44 | SQLAlchemy==1.3.0
45 | transaction==2.1.2
46 | translationstring==1.3
47 | urllib3==1.25.3
48 | venusian==1.0
49 | waitress==1.4.2
50 | WebOb==1.7.2
51 | Werkzeug==0.16.0
52 | WTForms==2.2.1
53 | zope.deprecation==4.2.0
54 | zope.interface==4.4.2
55 | zope.sqlalchemy==0.7.7
56 | uWSGI==2.0.18
57 |
--------------------------------------------------------------------------------
/gengine/app/tasks.py:
--------------------------------------------------------------------------------
1 | from .tasksystem import EngineTask
2 |
3 | def includeme(config):
4 |
5 | # @EngineTask(
6 | # name="demo_task",
7 | # description="print sth",
8 | # config_scheme=None,
9 | # default_config=None,
10 | # default_cron="* * * * *",
11 | # default_activated=True
12 | # )
13 | # def demo_task(config):
14 | # return {
15 | # 'log': None,
16 | # 'success': True
17 | # }
18 |
19 |
20 | from gengine.app.registries import get_task_registration_points_registry
21 | get_task_registration_points_registry().run_extensions()
22 |
23 |
24 | @EngineTask(
25 | name="import_test_data",
26 | description="import the test demo data",
27 | config_scheme=None,
28 | default_config=None,
29 | default_cron="0 0 5 31 2", # 31st feb will never exec
30 | default_activated=True
31 | )
32 | def demo_task(config):
33 |
34 | from gengine.app.tests.helpers import create_subjecttypes, create_subjects, create_achievements, create_variables
35 |
36 | create_subjecttypes()
37 | create_subjects()
38 | create_variables()
39 | create_achievements()
40 |
41 | return {
42 | 'log': None,
43 | 'success': True
44 | }
45 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2016 Marcel Sander (marcel@gamification-software.com), Jens Janiuk (jens@gamification-software.com)
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in
13 | all copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 | THE SOFTWARE.
22 |
--------------------------------------------------------------------------------
/gengine/app/route.py:
--------------------------------------------------------------------------------
1 | from pyramid_swagger_spec.namespace import create_api_namespace
2 | from pyramid_swagger_spec.swagger import create_swagger_view
3 |
4 | api_route = create_api_namespace(namespace="api")
5 |
6 | def config_routes(config):
7 | config.add_route('get_progress', '/progress/{subject_id}')
8 | config.add_route('increase_value', '/increase_value/{variable_name}/{subject_id}')
9 | config.add_route('increase_value_with_key', '/increase_value/{variable_name}/{subject_id}/{key}')
10 | config.add_route('increase_multi_values', '/increase_multi_values')
11 | config.add_route('add_or_update_subject', '/add_or_update_subject/{subject_id}')
12 | config.add_route('delete_subject', '/delete_subject/{subject_id}')
13 | config.add_route('get_achievement_level', '/achievement/{achievement_id}/level/{level}')
14 |
15 | config.add_route('auth_login', '/auth/login')
16 | config.add_route('change_password', '/auth/change_password')
17 |
18 | config.add_route('register_device', '/register_device/{subject_id}')
19 | config.add_route('get_messages', '/messages/{subject_id}')
20 | config.add_route('read_messages', '/read_messages/{subject_id}')
21 |
22 | create_swagger_view(config, namespace="api", title="Admin Api", version="0.1")
23 |
--------------------------------------------------------------------------------
/gengine/app/tests_old/test_value.py:
--------------------------------------------------------------------------------
1 | from gengine.app.tests.base import BaseDBTest
2 | from gengine.app.tests.helpers import create_user, create_variable,create_value
3 | from gengine.app.model import Value
4 |
5 |
6 | class TestValue(BaseDBTest):
7 | def test_increase_value(self):
8 | user = create_user()
9 | variable = create_variable(variable_name="participate", variable_group="day")
10 |
11 | value1 = Value.increase_value(variable.name, user, value=3, key="5")
12 | value2 = Value.increase_value(variable.name, user, value=3, key="5")
13 | value3 = Value.increase_value(variable.name, user, value=6, key="7")
14 |
15 | # Correct cases
16 | self.assertGreater(value2, value1)
17 | self.assertEqual(value3, value2)
18 |
19 | # Doesn't work when give variable_group = none i.e. current_datetime check which differes for two successive calls
20 | # Increase value is being called only in evaluate_achievement function and not in evaluate_goal
21 |
22 | def test_increase_value_null_key(self):
23 | user = create_user()
24 | variable = create_variable(variable_name="login", variable_group="day")
25 | value1 = Value.increase_value(variable.name, user, value=1, key=None)
26 | self.assertIs(value1, 1)
27 |
--------------------------------------------------------------------------------
/init.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | cd /usr/src/app/
4 |
5 | if [[ ! $APP_DB_HOST ]]; then
6 | echo "\$APP_DB_HOST is unset";
7 | exit 1;
8 | fi
9 |
10 | if [[ ! $APP_DB_PORT ]]; then
11 | echo "\$APP_DB_PORT is unset";
12 | exit 1;
13 | fi
14 |
15 | if [[ ! $APP_DB_USER ]]; then
16 | echo "\$APP_DB_USER is unset";
17 | exit 1;
18 | fi
19 |
20 | if [[ ! $APP_DB_PASSWORD ]]; then
21 | echo "\$APP_DB_PASSWORD is unset";
22 | exit 1;
23 | fi
24 |
25 | if [[ ! $APP_DB_NAME ]]; then
26 | echo "\$APP_DB_NAME is unset";
27 | exit 1;
28 | fi
29 |
30 | if [[ ! $APP_DB_DRIVER ]]; then
31 | export APP_DB_DRIVER="postgres";
32 | echo "\$APP_DB_DRIVER is unset, assuming postgres";
33 | fi
34 |
35 | export DATABASE_URL="${APP_DB_DRIVER}://${APP_DB_USER}:${APP_DB_PASSWORD}@${APP_DB_HOST}:${APP_DB_PORT}/${APP_DB_NAME}"
36 |
37 | ./wait-for-it.sh -t 30 -s ${APP_DB_HOST}:${APP_DB_PORT}
38 |
39 | sleep 10
40 |
41 | if [ "$MODE" == "dev" ]
42 | then
43 | initialize_gengine_db development.ini admin_password=$ADMIN_PASSWORD admin_user=$ADMIN_USER
44 | uwsgi --lazy-apps --ini-paste development.uwsgi --py-autoreload=1
45 | else
46 | initialize_gengine_db production.ini admin_password=$ADMIN_PASSWORD admin_user=$ADMIN_USER
47 | uwsgi --lazy-apps --ini-paste production.uwsgi
48 | fi
49 |
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/d4a70083f72e_add_user_language.py:
--------------------------------------------------------------------------------
1 | """Add User Language
2 |
3 | Revision ID: d4a70083f72e
4 | Revises: 3512efb5496d
5 | Create Date: 2016-07-22 14:04:43.900826
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = 'd4a70083f72e'
11 | down_revision = '3512efb5496d'
12 | branch_labels = None
13 | depends_on = None
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 |
18 |
19 | def upgrade():
20 | ### commands auto generated by Alembic - please adjust! ###
21 | op.create_index(op.f('ix_languages_name'), 'languages', ['name'], unique=False)
22 | op.create_index(op.f('ix_translationvariables_name'), 'translationvariables', ['name'], unique=False)
23 | op.add_column('users', sa.Column('language_id', sa.Integer(), nullable=True))
24 | op.create_foreign_key(op.f('fk_users_language_id_languages'), 'users', 'languages', ['language_id'], ['id'])
25 | ### end Alembic commands ###
26 |
27 |
28 | def downgrade():
29 | ### commands auto generated by Alembic - please adjust! ###
30 | op.drop_constraint(op.f('fk_users_language_id_languages'), 'users', type_='foreignkey')
31 | op.drop_column('users', 'language_id')
32 | op.drop_index(op.f('ix_translationvariables_name'), table_name='translationvariables')
33 | op.drop_index(op.f('ix_languages_name'), table_name='languages')
34 | ### end Alembic commands ###
35 |
--------------------------------------------------------------------------------
/gengine/app/registries.py:
--------------------------------------------------------------------------------
1 | registries = {}
2 |
3 | class HookPointRegistry:
4 | def __init__(self, single_execution=False):
5 | self.registrations = {}
6 | self.single_execution = single_execution
7 | self.has_been_executed = set()
8 |
9 | def register(self, id, fun):
10 | self.registrations[id]=fun
11 |
12 | def run_extensions(self, **kw):
13 | for id in self.registrations.keys():
14 | if (not self.single_execution) or (id not in self.has_been_executed):
15 | x = self.registrations[id]
16 | x(**kw)
17 | self.has_been_executed.add(id)
18 |
19 |
20 | def get_task_registration_points_registry():
21 | global registries
22 | if "tasks_registration_points" not in registries:
23 | registries["tasks_registration_points"] = HookPointRegistry(single_execution=True)
24 | return registries["tasks_registration_points"]
25 |
26 |
27 | def get_task_registry():
28 | global registries
29 | if "tasks" not in registries:
30 | from gengine.app.tasksystem import TaskRegistry
31 | registries["tasks"] = TaskRegistry()
32 | return registries["tasks"]
33 |
34 |
35 | def get_admin_extension_registry():
36 | global registries
37 | if "admin_extensions" not in registries:
38 | registries["admin_extensions"] = HookPointRegistry(single_execution=True)
39 | return registries["admin_extensions"]
--------------------------------------------------------------------------------
/gengine/app/jsscripts/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | import json
4 |
5 | from gengine.base.settings import get_settings
6 | from gengine.base.util import lstrip_word
7 | from pyramid.settings import asbool
8 |
9 | def includeme(config):
10 | config.add_static_view(name='admin/jsstatic', path='gengine:app/jsscripts/build/static')
11 |
12 | def get_jsmain():
13 | debug = asbool(get_settings().get("load_from_webpack_dev_server", False))
14 | if debug:
15 | return "http://localhost:3000/static/js/bundle.js"
16 | else:
17 | modpath = os.path.dirname(sys.modules[__name__].__file__)
18 |
19 | buildpath = os.path.join(modpath, "build")
20 | with open(os.path.join(buildpath, "asset-manifest.json"), "r") as f:
21 | manifest = json.load(f)
22 | return "/admin/jsstatic/"+lstrip_word(manifest["main.js"], "static/")
23 |
24 | return None
25 |
26 | def get_cssmain():
27 | debug = asbool(get_settings().get("load_from_webpack_dev_server", False))
28 | if debug:
29 | return "http://localhost:3000/static/css/bundle.css"
30 | else:
31 | modpath = os.path.dirname(sys.modules[__name__].__file__)
32 |
33 | buildpath = os.path.join(modpath, "build")
34 | with open(os.path.join(buildpath, "asset-manifest.json"), "r") as f:
35 | manifest = json.load(f)
36 | return "/admin/jsstatic/"+lstrip_word(manifest["main.css"],"static/")
37 |
38 | return None
39 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.1'
2 | services:
3 | python:
4 | image: gamification-engine:dev
5 | build:
6 | context: .
7 | expose:
8 | - 6543
9 | volumes:
10 | - ./gengine:/usr/src/app/gengine
11 | - uwsgi_socket:/run/uwsgi/
12 | depends_on:
13 | - postgres
14 | - memcached
15 | networks:
16 | - backend
17 | environment:
18 | - ADMIN_PASSWORD=admin@gamification-software.com
19 | - ADMIN_USER=admin@gamification-software.com
20 | - APP_DB_HOST=postgres
21 | - APP_DB_PORT=5432
22 | - APP_DB_USER=postgres
23 | - APP_DB_PASSWORD=password
24 | - APP_DB_NAME=gengine
25 | - MODE=dev
26 |
27 | nginx:
28 | image: nginx:alpine
29 | depends_on:
30 | - python
31 | volumes:
32 | - ./docker-files/nginx-dev.conf:/etc/nginx/nginx.conf
33 | - uwsgi_socket:/run/uwsgi/
34 | ports:
35 | - 9000:80
36 | networks:
37 | - backend
38 |
39 | postgres:
40 | image: postgres:11-alpine
41 | networks:
42 | - backend
43 | environment:
44 | - POSTGRES_PASSWORD=password
45 | - POSTGRES_DB=gengine
46 | - PGDATA=/pgdata
47 | volumes:
48 | - ./pgdata:/pgdata
49 |
50 | memcached:
51 | image: memcached:alpine
52 | networks:
53 | - backend
54 |
55 | adminer:
56 | image: adminer
57 | ports:
58 | - 6060:8080
59 | networks:
60 | - backend
61 |
62 | networks:
63 | backend:
64 |
65 | volumes:
66 | uwsgi_socket:
--------------------------------------------------------------------------------
/docker-compose.production.yml:
--------------------------------------------------------------------------------
1 | version: '2.1'
2 | services:
3 | python:
4 | image: gamification-engine:dev
5 | build:
6 | context: .
7 | expose:
8 | - 6543
9 | volumes:
10 | - ./gengine:/usr/src/app/gengine
11 | - uwsgi_socket:/run/uwsgi/
12 | depends_on:
13 | - postgres
14 | - memcached
15 | networks:
16 | - backend
17 | environment:
18 | - ADMIN_PASSWORD=admin@gamification-software.com
19 | - ADMIN_USER=admin@gamification-software.com
20 | - APP_DB_HOST=postgres
21 | - APP_DB_PORT=5432
22 | - APP_DB_USER=postgres
23 | - APP_DB_PASSWORD=password
24 | - APP_DB_NAME=gengine
25 | - MODE=prod
26 |
27 | nginx:
28 | image: nginx:alpine
29 | depends_on:
30 | - python
31 | volumes:
32 | - ./docker-files/nginx-prod.conf:/etc/nginx/nginx.conf
33 | - uwsgi_socket:/run/uwsgi/
34 | ports:
35 | - 9000:80
36 | networks:
37 | - backend
38 |
39 | postgres:
40 | image: postgres:11-alpine
41 | networks:
42 | - backend
43 | environment:
44 | - POSTGRES_PASSWORD=password
45 | - POSTGRES_DB=gengine
46 | - PGDATA=/pgdata
47 | volumes:
48 | - ./pgdata:/pgdata
49 |
50 | memcached:
51 | image: memcached:alpine
52 | networks:
53 | - backend
54 |
55 | adminer:
56 | image: adminer
57 | ports:
58 | - 6060:8080
59 | networks:
60 | - backend
61 |
62 | networks:
63 | backend:
64 |
65 | volumes:
66 | uwsgi_socket:
--------------------------------------------------------------------------------
/gengine/app/jsscripts/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "gengine",
3 | "version": "0.1.0",
4 | "private": true,
5 | "dependencies": {
6 | "font-awesome": "^4.7.0",
7 | "input-moment": "^0.3.4",
8 | "intl": "^1.2.5",
9 | "lodash": "^4.17.13",
10 | "mobile-detect": "^1.3.6",
11 | "moment": "^2.18.1",
12 | "react": "^15.5.4",
13 | "react-dom": "^15.5.4",
14 | "react-fontawesome": "^1.6.1",
15 | "react-intl": "^2.3.0",
16 | "react-redux": "^5.0.5",
17 | "react-router": "^3.0.5",
18 | "react-select": "^1.0.0-rc.5",
19 | "redux": "^3.6.0",
20 | "redux-saga": "^0.15.3",
21 | "reselect": "^3.0.1",
22 | "swagger-client": "^3.0.12",
23 | "urijs": "^1.18.10",
24 | "uuid": "^3.0.1"
25 | },
26 | "devDependencies": {
27 | "node-sass-chokidar": "0.0.1",
28 | "nodemon": "^1.11.0",
29 | "npm-run-all": "^4.0.2",
30 | "react-scripts": "1.0.3",
31 | "remote-redux-devtools": "^0.5.11"
32 | },
33 | "scripts": {
34 | "test": "react-scripts test --env=jsdom",
35 | "eject": "react-scripts eject",
36 | "watch": "nodemon -w src -w public -x \"npm run build\"",
37 | "build-css": "node-sass-chokidar --include-path ./src --include-path ./node_modules src/ -o src/",
38 | "watch-css": "npm run build-css && node-sass-chokidar --include-path ./src --include-path ./node_modules src/ -o src/ --watch --recursive",
39 | "start-js": "react-scripts start",
40 | "start": "npm-run-all -p watch-css start-js",
41 | "build": "npm run build-css && react-scripts build"
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/gengine/app/tests_old/test_device.py:
--------------------------------------------------------------------------------
1 | from gengine.app.tests.base import BaseDBTest
2 | from gengine.app.tests.helpers import create_user, create_device, update_device
3 |
4 |
5 | class TestUserDevice(BaseDBTest):
6 |
7 | def test_create_user_device(self):
8 |
9 | user = create_user()
10 |
11 | device = create_device(
12 | device_id='3424',
13 | user_id=user.id,
14 | device_os='Android',
15 | push_id='1234',
16 | app_version='1.1'
17 | )
18 |
19 | self.assertTrue(device.device_id == '3424')
20 | self.assertTrue(device.user_id == user.id)
21 | self.assertTrue(device.device_os == 'Android')
22 | self.assertTrue(device.push_id == '1234')
23 | self.assertTrue(device.app_version == '1.1')
24 |
25 | def test_update_user_device(self):
26 |
27 | user = create_user()
28 | create_device(user_id=user.id)
29 |
30 | device = update_device(
31 | user_id=user.id,
32 | device_id='1256',
33 | push_id='5126',
34 | device_os='iOS',
35 | app_version='1.2'
36 | )
37 |
38 | # Correct cases
39 | self.assertTrue(device.device_id == '1256')
40 | self.assertTrue(device.user_id == user.id)
41 | self.assertTrue(device.push_id == '5126')
42 | self.assertTrue(device.app_version == '1.2')
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/lib/persistence/index.js:
--------------------------------------------------------------------------------
1 | import {
2 | getLocalPersistenceMiddleware,
3 | enableLocalStoragePersistenceMiddleware,
4 | localPersistenceReducer,
5 | loadReduxStoreFromLocalStorage,
6 | LOAD_REDUX_STORE_FROM_LOCAL_STORAGE
7 | } from './persistence';
8 |
9 | export {
10 | // The Middleware to save all store changes to the persistence store
11 | getLocalPersistenceMiddleware,
12 | // The reducer loads all persisted element into the store
13 | // And enables the middleware
14 | localPersistenceReducer,
15 | // Action Creator to enable the Middleware
16 | enableLocalStoragePersistenceMiddleware,
17 | /*
18 | const store = createStore(rootReducer, composeWithDevTools(
19 | applyMiddleware(sagaMiddleware, getLocalPersistenceMiddleware([
20 | // Store paths, that should be persisted
21 | // e.g. [["auth","user"],["auth","token"]]
22 | //...data.localPersistence.map(x => ["data",...x]),
23 | ])),
24 | ));
25 | */
26 | // The action creator to trigger the loading
27 | loadReduxStoreFromLocalStorage,
28 | // The action type for loading is needed to create the RootReducer, as it has to work on the full state
29 | /*
30 |
31 | const rootReducer = (state,action) => {
32 | if(action.type == LOAD_REDUX_STORE_FROM_LOCAL_STORAGE) {
33 | return localPersistenceReducer(state, action);
34 | } else {
35 | return combinedReducer(state, action);
36 | }
37 | }
38 |
39 | */
40 | LOAD_REDUX_STORE_FROM_LOCAL_STORAGE
41 | }
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/App.js:
--------------------------------------------------------------------------------
1 | import React, { Component, PropTypes } from 'react';
2 | import { Provider } from 'react-redux';
3 | import { IntlProvider } from 'react-intl';
4 | import { DynamicProvider } from './lib/swagger';
5 | import { bindActionCreators } from "redux";
6 | import { connect } from "react-redux";
7 | import * as UIActions from './storeState/ui/actions';
8 | import { getLocale } from './storeState/ui/selectors';
9 | import messages from './locales';
10 | import './App.css'
11 |
12 | class App extends Component {
13 |
14 | componentWillMount = () => {
15 | let lang = this.props.lang;
16 | if(!messages[lang]){
17 | lang = "en";
18 | }
19 | this.props.uiActions.setLocale(lang);
20 | }
21 |
22 | getIntlProviderData = () => {
23 | const lang = this.props.lang;
24 | return {
25 | locale: lang,
26 | messages: messages[lang],
27 | };
28 | }
29 |
30 | render() {
31 | return (
32 |
33 |
34 |
35 | {this.props.children}
36 |
37 |
38 |
39 | );
40 | }
41 | }
42 |
43 | function mapStateToProps(state, props) {
44 | return {
45 | locale: getLocale(state),
46 | }
47 | }
48 |
49 | function mapDispatchToProps(dispatch) {
50 | return {
51 | uiActions: bindActionCreators(UIActions, dispatch),
52 | }
53 | }
54 |
55 | export default connect(
56 | mapStateToProps,
57 | mapDispatchToProps
58 | )(App)
59 |
--------------------------------------------------------------------------------
/gengine/base/cache.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | from dogpile.cache import make_region
3 | from pyramid_dogpile_cache import get_region
4 |
5 | force_redis = None
6 |
7 | def setup_redis_cache(host,port,db):
8 | """ This is used to override all caching settings in the ini file. Needed for Testing. """
9 | global force_redis
10 | force_redis = {
11 | 'host': host,
12 | 'port': port,
13 | 'db': db,
14 | 'redis_expiration_time': 60 * 60 * 2, # 2 hours
15 | 'distributed_lock': True
16 | }
17 |
18 |
19 | def my_key_mangler(prefix):
20 | def s(o):
21 | if type(o) == dict:
22 | return "_".join(["%s=%s" % (str(k), str(v)) for k, v in o.items()])
23 | if type(o) == tuple:
24 | return "_".join([str(v) for v in o])
25 | if type(o) == list:
26 | return "_".join([str(v) for v in o])
27 | else:
28 | return str(o)
29 |
30 | def generate_key(key):
31 | ret = ""
32 | ret += prefix + s(key).replace(" ", "")
33 | return ret
34 |
35 | return generate_key
36 |
37 |
38 | def create_cache(name):
39 | ch = None
40 |
41 | if force_redis:
42 | ch = make_region().configure(
43 | 'dogpile.cache.redis',
44 | arguments=force_redis
45 | )
46 | else:
47 | try:
48 | ch = get_region(name)
49 | except:
50 | ch = make_region().configure('dogpile.cache.memory')
51 | warnings.warn("Warning: cache objects are in memory, are you creating docs?")
52 |
53 | ch.key_mangler = my_key_mangler(name)
54 |
55 | return ch
56 |
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/e8799ed00992_grouptype_relevance.py:
--------------------------------------------------------------------------------
1 | """grouptype_relevance
2 |
3 | Revision ID: e8799ed00992
4 | Revises: 46e4971dc46f
5 | Create Date: 2017-05-30 11:55:48.345622
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = 'e8799ed00992'
11 | down_revision = '46e4971dc46f'
12 | branch_labels = None
13 | depends_on = None
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('achievements', sa.Column('relevant_grouptype_id', sa.Integer(), nullable=True))
22 | op.create_index(op.f('ix_achievements_relevant_grouptype_id'), 'achievements', ['relevant_grouptype_id'], unique=False)
23 | op.create_foreign_key(op.f('fk_achievements_relevant_grouptype_id_grouptypes'), 'achievements', 'grouptypes', ['relevant_grouptype_id'], ['id'], ondelete='RESTRICT')
24 |
25 | op.execute("COMMIT")
26 | op.execute("""
27 | DELETE FROM pg_enum
28 | WHERE enumlabel = 'city'
29 | AND enumtypid = (
30 | SELECT oid FROM pg_type WHERE typname = 'relevance_types'
31 | )
32 | """)
33 | # ### end Alembic commands ###
34 |
35 |
36 | def downgrade():
37 | # ### commands auto generated by Alembic - please adjust! ###
38 | op.drop_constraint(op.f('fk_achievements_relevant_grouptype_id_grouptypes'), 'achievements', type_='foreignkey')
39 | op.drop_index(op.f('ix_achievements_relevant_grouptype_id'), table_name='achievements')
40 | op.drop_column('achievements', 'relevant_grouptype_id')
41 |
42 | op.execute("ALTER TYPE relevance_types ADD value 'city' after 'friends'")
43 | # ### end Alembic commands ###
44 |
--------------------------------------------------------------------------------
/gengine/app/cache.py:
--------------------------------------------------------------------------------
1 | from gengine.base.cache import create_cache
2 |
3 | caches = {}
4 |
5 | cache_general = None
6 | cache_goal_evaluation = None
7 | cache_achievement_eval = None
8 | cache_achievements_subjects_levels = None
9 | cache_achievements_by_subject_for_today = None
10 | #cache_goal_statements = None
11 | cache_translations = None
12 |
13 | def init_caches():
14 | global cache_general
15 | cache_general = create_cache("general")
16 |
17 | global cache_achievement_eval
18 | cache_achievement_eval = create_cache("achievement_eval")
19 |
20 | global cache_achievements_by_subject_for_today
21 | cache_achievements_by_subject_for_today = create_cache("achievements_by_subject_for_today")
22 |
23 | global cache_achievements_subjects_levels
24 | cache_achievements_subjects_levels = create_cache("achievements_subjects_levels")
25 |
26 | global cache_translations
27 | cache_translations = create_cache("translations")
28 |
29 | # The Goal evaluation Cache is implemented as a two-level cache (persistent in db, non-persistent as dogpile)
30 | global cache_goal_evaluation
31 | cache_goal_evaluation = create_cache("goal_evaluation")
32 |
33 | #global cache_goal_statements
34 | #cache_goal_statements = create_memory_cache("goal_statements")
35 |
36 |
37 | def clear_all_caches():
38 | cache_general.invalidate(hard=True)
39 | cache_achievement_eval.invalidate(hard=True)
40 | cache_achievements_by_subject_for_today.invalidate(hard=True)
41 | cache_achievements_subjects_levels.invalidate(hard=True)
42 | cache_translations.invalidate(hard=True)
43 | cache_goal_evaluation.invalidate(hard=True)
44 | #cache_goal_statements.invalidate(hard=True)
45 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
11 |
12 |
13 |
22 | React App
23 |
24 |
25 |
28 |
29 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/gengine/app/tests/runner.py:
--------------------------------------------------------------------------------
1 | from gengine.app.tests import db as db
2 | from gengine.metadata import init_declarative_base, init_session
3 | import unittest
4 | import os
5 | import pkgutil
6 | import logging
7 | import sys
8 |
9 | log = logging.getLogger(__name__)
10 |
11 | try:
12 | import testing.redis
13 | except ImportError as e:
14 | log.info("testing.redis not installed")
15 |
16 | init_session()
17 | init_declarative_base()
18 |
19 | __path__ = [x[0] for x in os.walk(os.path.dirname(__file__))]
20 |
21 | def create_test_suite():
22 | suite = unittest.TestSuite()
23 | for imp, modname, _ in pkgutil.walk_packages(__path__):
24 | #if modname in ('test_achievement_integration_tests',):
25 | mod = imp.find_module(modname).load_module(modname)
26 | for test in unittest.defaultTestLoader.loadTestsFromModule(mod):
27 | suite.addTests(test)
28 | return suite
29 |
30 | if __name__=="__main__":
31 | exit = 1
32 | try:
33 | redis = testing.redis.RedisServer()
34 |
35 | from gengine.base.cache import setup_redis_cache
36 | dsn = redis.dsn()
37 | setup_redis_cache(dsn["host"], dsn["port"], dsn["db"])
38 |
39 | from gengine.app.cache import init_caches
40 | init_caches()
41 |
42 | db.setupDB()
43 | testSuite = create_test_suite()
44 | text_runner = unittest.TextTestRunner(failfast=True).run(testSuite)
45 | if text_runner.wasSuccessful():
46 | exit = 0
47 | finally:
48 | try:
49 | db.unsetupDB()
50 | except:
51 | log.exception("unsetup db failed")
52 | try:
53 | redis.stop()
54 | except:
55 | log.exception("stopping redis failed")
56 | sys.exit(exit)
57 |
--------------------------------------------------------------------------------
/gengine/app/tests_old/runner.py:
--------------------------------------------------------------------------------
1 | from gengine.app.tests import db as db
2 | from gengine.metadata import init_declarative_base, init_session
3 | import unittest
4 | import os
5 | import pkgutil
6 | import logging
7 | import sys
8 |
9 | log = logging.getLogger(__name__)
10 |
11 | try:
12 | import testing.redis
13 | except ImportError as e:
14 | log.info("testing.redis not installed")
15 |
16 | init_session()
17 | init_declarative_base()
18 |
19 | __path__ = [x[0] for x in os.walk(os.path.dirname(__file__))]
20 |
21 | def create_test_suite():
22 | suite = unittest.TestSuite()
23 | for imp, modname, _ in pkgutil.walk_packages(__path__):
24 | #if modname in ('test_achievement_integration_tests',):
25 | mod = imp.find_module(modname).load_module(modname)
26 | for test in unittest.defaultTestLoader.loadTestsFromModule(mod):
27 | suite.addTests(test)
28 | return suite
29 |
30 | if __name__=="__main__":
31 | exit = 1
32 | try:
33 | redis = testing.redis.RedisServer()
34 |
35 | from gengine.base.cache import setup_redis_cache
36 | dsn = redis.dsn()
37 | setup_redis_cache(dsn["host"], dsn["port"], dsn["db"])
38 |
39 | from gengine.app.cache import init_caches
40 | init_caches()
41 |
42 | db.setupDB()
43 | testSuite = create_test_suite()
44 | text_runner = unittest.TextTestRunner(failfast=True).run(testSuite)
45 | if text_runner.wasSuccessful():
46 | exit = 0
47 | finally:
48 | try:
49 | db.unsetupDB()
50 | except:
51 | log.exception("unsetup db failed")
52 | try:
53 | redis.stop()
54 | except:
55 | log.exception("stopping redis failed")
56 | sys.exit(exit)
57 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/service/LayoutService.js:
--------------------------------------------------------------------------------
1 |
2 | import MobileDetect from 'mobile-detect';
3 | const md = new MobileDetect(window.navigator.userAgent);
4 | const mobile = md.mobile();
5 | const os = md.os();
6 |
7 | export const getDimensions = () => {
8 |
9 | let width = window.innerWidth
10 | || document.documentElement.clientWidth
11 | || document.body.clientWidth;
12 |
13 | let height = window.innerHeight
14 | || document.documentElement.clientHeight
15 | || document.body.clientHeight;
16 |
17 | return {width, height};
18 | };
19 |
20 | export const isMobile = () => {
21 | return mobile
22 | };
23 |
24 | export const isAndroid = () => {
25 | return os=='AndroidOS';
26 | };
27 |
28 | export const isIos = () => {
29 | return os=='iOS';
30 | };
31 |
32 | export const isIpad = () => {
33 | return md.is('iPad');
34 | };
35 |
36 | export const isBlackBerry = () => {
37 | return os=="BlackBerryOS";
38 | }
39 |
40 | export const isFF = () => {
41 | return navigator.userAgent.toLowerCase().indexOf('firefox') > -1;
42 | }
43 |
44 | export const isIE = () => {
45 | var ua = window.navigator.userAgent;
46 |
47 | var msie = ua.indexOf('MSIE ');
48 | if (msie > 0) {
49 | // IE 10 or older => return version number
50 | return parseInt(ua.substring(msie + 5, ua.indexOf('.', msie)), 10);
51 | }
52 |
53 | var trident = ua.indexOf('Trident/');
54 | if (trident > 0) {
55 | // IE 11 => return version number
56 | var rv = ua.indexOf('rv:');
57 | return parseInt(ua.substring(rv + 3, ua.indexOf('.', rv)), 10);
58 | }
59 |
60 | var edge = ua.indexOf('Edge/');
61 | if (edge > 0) {
62 | // Edge (IE 12+) => return version number
63 | return parseInt(ua.substring(edge + 5, ua.indexOf('.', edge)), 10);
64 | }
65 |
66 | // other browser
67 | return false;
68 | }
69 |
--------------------------------------------------------------------------------
/gengine/base/util.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import pytz
3 |
4 | class DictObjectProxy:
5 |
6 | def __init__(self, obj={}):
7 | super().__setattr__("obj",obj)
8 |
9 | def __getattr__(self, name):
10 | if not name in super().__getattribute__("obj"):
11 | raise AttributeError
12 | return super().__getattribute__("obj")[name]
13 |
14 | def __setattr__(self, key, value):
15 | super().__getattribute__("obj")[key] = value
16 |
17 |
18 | class Proxy(object):
19 | def __init__(self):
20 | self.target = None
21 |
22 | def __getattr__(self, name):
23 | return getattr(self.target, name)
24 |
25 | def __setattr__(self, name, value):
26 | if name == "target":
27 | return object.__setattr__(self, name, value)
28 | else:
29 | setattr(self.target, name, value)
30 |
31 | def __call__(self, *args, **kwargs):
32 | return self.target(*args, **kwargs)
33 |
34 |
35 | def dt_now():
36 | return datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
37 |
38 | def dt_ago(**kw):
39 | return datetime.datetime.utcnow().replace(tzinfo=pytz.utc) - datetime.timedelta(**kw)
40 |
41 | def dt_in(**kw):
42 | return datetime.datetime.utcnow().replace(tzinfo=pytz.utc) + datetime.timedelta(**kw)
43 |
44 | def seconds_until_end_of_day(timezone):
45 | tzobj = pytz.timezone(timezone)
46 | now = datetime.datetime.now(tzobj)
47 | today = now.replace(hour=0, minute=0, second=0, microsecond=0)
48 | tomorrow = today + datetime.timedelta(days=1)
49 | return int((tomorrow - today).total_seconds())
50 |
51 | def normalize_key(key):
52 | return '' if key is None else str(key)
53 |
54 | def rowproxy2dict(rp):
55 | return {k: v for k, v in dict(rp).items() if not str(k).startswith("_")}
56 |
57 | def lstrip_word(text, word):
58 | return text[len(word):] if text[:len(word)] == word else text
59 |
--------------------------------------------------------------------------------
/gengine/app/alembic/env.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 |
3 | from logging.config import fileConfig
4 |
5 | import os
6 | from alembic import context
7 |
8 | # this is the Alembic Config object, which provides
9 | # access to the values within the .ini file in use.
10 |
11 | config = context.config
12 |
13 | # Interpret the config file for Python logging.
14 | # This line sets up loggers basically.
15 | if config.config_file_name:
16 | fileConfig(config.config_file_name)
17 |
18 | overrides = {}
19 |
20 | durl = os.environ.get("DATABASE_URL") #heroku
21 | if durl:
22 | config.set_main_option('sqlalchemy.url',durl)
23 |
24 | # add your model's MetaData object here
25 | # for 'autogenerate' support
26 | from gengine.metadata import init_session,init_declarative_base
27 | init_session()
28 | init_declarative_base()
29 |
30 | from gengine.metadata import Base
31 | target_metadata = Base.metadata
32 |
33 | # target_metadata = None
34 |
35 | from gengine.app.model import *
36 |
37 | # other values from the config, defined by the needs of env.py,
38 | # can be acquired:
39 | # my_important_option = config.get_main_option("my_important_option")
40 | # ... etc.
41 |
42 | def run_migrations_online():
43 | """Run migrations in 'online' mode.
44 |
45 | In this scenario we need to create an Engine
46 | and associate a connection with the context.
47 |
48 | """
49 |
50 | engine = config.attributes["engine"]
51 |
52 | schema = config.attributes["schema"]
53 |
54 | #connectable = create_engine(url, poolclass=pool.NullPool)
55 |
56 | with engine.connect() as connection:
57 |
58 | connection.execute("SET search_path TO "+schema)
59 |
60 | context.configure(
61 | connection=connection,
62 | target_metadata=target_metadata
63 | )
64 |
65 | with context.begin_transaction():
66 | context.run_migrations()
67 |
68 | run_migrations_online()
69 |
--------------------------------------------------------------------------------
/gengine/metadata.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy.orm.session import Session, sessionmaker
2 | import transaction
3 | from sqlalchemy.orm.scoping import scoped_session
4 | from sqlalchemy.sql.schema import MetaData
5 | from zope.sqlalchemy.datamanager import ZopeTransactionExtension
6 | from sqlalchemy.ext.declarative.api import declarative_base
7 |
8 | from gengine.base.util import Proxy
9 |
10 |
11 | class MySession(Session):
12 | """This allow us to use the flask-admin sqla extension, which uses DBSession.commit() rather than transaction.commit()"""
13 | def commit(self,*args,**kw):
14 | transaction.commit(*args,**kw)
15 |
16 | def rollback(self, *args, **kw):
17 | transaction.abort(*args,**kw)
18 |
19 | DBSession=Proxy()
20 |
21 | def get_sessionmaker(bind=None):
22 | return sessionmaker(
23 | extension=ZopeTransactionExtension(),
24 | class_=MySession,
25 | bind=bind
26 | )
27 |
28 | def init_session(override_session=None, replace=False):
29 | global DBSession
30 | if DBSession.target and not replace:
31 | return
32 | if override_session:
33 | DBSession.target = override_session
34 | else:
35 | DBSession.target = scoped_session(get_sessionmaker())
36 |
37 | Base=None
38 |
39 | def init_declarative_base(override_base=None):
40 | global Base
41 | if Base:
42 | return
43 | if override_base:
44 | Base = override_base
45 | else:
46 | convention = {
47 | "ix": 'ix_%(column_0_label)s',
48 | "uq": "uq_%(table_name)s_%(column_0_name)s",
49 | "ck": "ck_%(table_name)s_%(constraint_name)s",
50 | "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
51 | "pk": "pk_%(table_name)s"
52 | }
53 | metadata = MetaData(naming_convention=convention)
54 | Base = declarative_base(metadata = metadata)
55 |
56 | def init_db(engine):
57 | DBSession.configure(bind=engine)
58 | Base.metadata.bind = engine
59 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/index.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom';
3 | import App from './App';
4 | import GroupAssignment from './components/views/group-assignment/GroupAssignment';
5 | import LeaderboardCreation from './components/views/leaderboard-creation/LeaderboardCreation';
6 |
7 | import { initStoreState } from './storeState';
8 | import { enableLocalStoragePersistenceMiddleware, loadReduxStoreFromLocalStorage } from './lib/persistence';
9 | import initStore from './storeState';
10 |
11 | import 'react-select/dist/react-select.css';
12 | import 'input-moment/dist/input-moment.css';
13 |
14 | const init = (component, domElement) => {
15 |
16 | const renderApp = () => {
17 | initStore().then(({store, dynamic}) => {
18 | ReactDOM.render(
19 | {component},
20 | domElement
21 | );
22 | store.dispatch(enableLocalStoragePersistenceMiddleware());
23 | let loadAction = loadReduxStoreFromLocalStorage();
24 | if(loadAction) {
25 | store.dispatch(loadAction);
26 | }
27 | console.log("dynamic", dynamic);
28 | }).catch(function(e) {
29 | console.error(e);
30 | });
31 | }
32 |
33 | function runMyApp() {
34 | renderApp();
35 | }
36 |
37 | if (!global.Intl) {
38 | require.ensure([
39 | 'intl',
40 | 'intl/locale-data/jsonp/en.js',
41 | 'intl/locale-data/jsonp/de.js'
42 | ], function (require) {
43 | require('intl');
44 | require('intl/locale-data/jsonp/en.js');
45 | require('intl/locale-data/jsonp/de.js');
46 | runMyApp()
47 | });
48 | } else {
49 | runMyApp()
50 | }
51 | }
52 |
53 | window.gengine = {
54 | 'renderComponent': (domElement, component) => {
55 | if(component=="GroupAssignment") {
56 | init(,domElement);
57 | }
58 | if(component=="LeaderboardCreation") {
59 | init(,domElement);
60 | }
61 | }
62 | };
63 |
--------------------------------------------------------------------------------
/gengine/app/permissions.py:
--------------------------------------------------------------------------------
1 | # Used in new API
2 | perm_global_search_subjects = "global_search_subjects"
3 | desc_global_search_subjects = "(API) Can search subjects"
4 |
5 | # Used in new API
6 | perm_global_manage_subjects = "global_manage_subjects"
7 | desc_global_manage_subjects = "(API) Manage Subjects"
8 |
9 | # Used in new API
10 | perm_global_search_subjecttypes = "global_search_subjecttypes"
11 | desc_global_search_subjecttypes = "(API) Can search subjecttypes"
12 |
13 | # Used in new API
14 | perm_global_list_variables = "global_list_variables"
15 | desc_global_list_variables = "(API) List variables"
16 |
17 | # Used in new API
18 | perm_global_list_timezones = "global_list_timezones"
19 | desc_global_list_timezones = "(API) List timezones"
20 |
21 | # Used in new API
22 | perm_global_manage_achievements = "global_manage_achievements"
23 | desc_global_manage_achievements = "(API) Manage achievements"
24 |
25 | # Old Permissions
26 | perm_global_access_admin_ui = "global_access_admin_ui"
27 | desc_global_access_admin_ui = "(Admin) Can access Admin-UI"
28 |
29 | perm_own_update_subject_infos = "own_update_subject_infos"
30 | desc_own_update_subject_infos = "Update my own infos"
31 |
32 | perm_global_delete_subject = "global_delete_subject"
33 | desc_global_delete_subject = "(Admin) Delete all subjects"
34 |
35 | perm_own_delete_subject = "own_delete_subject"
36 | desc_own_delete_subject = "Delete myself"
37 |
38 | perm_global_increase_value = "global_increase_value"
39 | desc_global_increase_value = "(Admin) Increase every subject's values"
40 |
41 | perm_global_register_device = "global_register_device"
42 | desc_global_register_device = "(Admin) Register devices for any subject"
43 |
44 | perm_own_register_device = "own_register_device"
45 | desc_own_register_device = "Register devices for myself"
46 |
47 | perm_global_read_messages= "global_read_messages"
48 | desc_global_read_messages = "(Admin) Read messages of all subjects"
49 |
50 | perm_own_read_messages = "own_read_messages"
51 | desc_own_read_messages = "Read own messages"
52 |
53 |
54 | def yield_all_perms():
55 | for k, v in globals().items():
56 | if k.startswith("perm_"):
57 | yield (v, globals().get("desc_"+k.lstrip("perm_"), k))
58 |
--------------------------------------------------------------------------------
/gengine/app/tests_old/base.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import os
3 | from sqlalchemy.engine import create_engine
4 | from sqlalchemy.sql.schema import Table
5 | from sqlalchemy.orm.scoping import scoped_session
6 | from gengine.metadata import init_session, get_sessionmaker
7 | from gengine.app.tests import db
8 |
9 | class BaseDBTest(unittest.TestCase):
10 |
11 | @classmethod
12 | def setUpClass(cls):
13 | if cls is BaseDBTest:
14 | raise unittest.SkipTest("Skip BaseTest tests, it's a base class")
15 | super(BaseDBTest, cls).setUpClass()
16 |
17 | def setUp(self):
18 | from gengine.app.cache import clear_all_caches
19 | clear_all_caches()
20 | self.db = db.db()
21 | dsn = self.db.dsn()
22 | self.engine = create_engine(
23 | "postgresql://%(user)s@%(host)s:%(port)s/%(database)s" % {
24 | "user": dsn["user"],
25 | "host": dsn["host"],
26 | "port": dsn["port"],
27 | "database": dsn["database"],
28 | }
29 | )
30 | init_session(override_session=scoped_session(get_sessionmaker(bind=self.engine)), replace=True)
31 | from gengine.metadata import Base
32 | Base.metadata.bind = self.engine
33 |
34 | Base.metadata.drop_all(self.engine)
35 | self.engine.execute("DROP SCHEMA IF EXISTS public CASCADE")
36 | self.engine.execute("CREATE SCHEMA IF NOT EXISTS public")
37 |
38 | from alembic.config import Config
39 | from alembic import command
40 |
41 | alembic_cfg = Config(attributes={
42 | 'engine': self.engine,
43 | 'schema': 'public'
44 | })
45 | script_location = os.path.join(
46 | os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
47 | 'app/alembic'
48 | )
49 | alembic_cfg.set_main_option("script_location", script_location)
50 |
51 | from gengine.app import model
52 |
53 | tables = [t for name, t in model.__dict__.items() if isinstance(t, Table)]
54 | Base.metadata.create_all(self.engine, tables=tables)
55 |
56 | command.stamp(alembic_cfg, "head")
57 |
58 | def tearDown(self):
59 | self.db.stop()
60 |
--------------------------------------------------------------------------------
/gengine/maintenance/scripts/push_messages.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import sys
3 | import logging
4 |
5 | from zope.sqlalchemy.datamanager import mark_changed
6 |
7 | from gengine.metadata import MySession
8 |
9 | log = logging.getLogger(__name__)
10 | log.addHandler(logging.StreamHandler())
11 |
12 | import os
13 | import pyramid_dogpile_cache
14 | import transaction
15 | from gengine.app.cache import init_caches
16 | from pyramid.config import Configurator
17 | from pyramid.paster import (
18 | get_appsettings,
19 | setup_logging,
20 | )
21 | from pyramid.scripts.common import parse_vars
22 | from sqlalchemy import engine_from_config
23 |
24 | def usage(argv):
25 | cmd = os.path.basename(argv[0])
26 | print('usage: %s [var=value]\n'
27 | '(example: "%s production.ini")' % (cmd, cmd))
28 | sys.exit(1)
29 |
30 |
31 | def main(argv=sys.argv):
32 | if len(argv) < 2:
33 | usage(argv)
34 | config_uri = argv[1]
35 | options = parse_vars(argv[2:])
36 | setup_logging(config_uri)
37 | settings = get_appsettings(config_uri, options=options)
38 |
39 | from gengine.base.settings import set_settings
40 | set_settings(settings)
41 |
42 | durl = os.environ.get("DATABASE_URL") # heroku
43 | if durl:
44 | settings['sqlalchemy.url'] = durl
45 |
46 | murl = os.environ.get("MEMCACHED_URL")
47 | if murl:
48 | settings['urlcache_url'] = murl
49 |
50 | engine = engine_from_config(settings, 'sqlalchemy.')
51 |
52 | config = Configurator(settings=settings)
53 | pyramid_dogpile_cache.includeme(config)
54 |
55 | from gengine.metadata import (
56 | init_session,
57 | init_declarative_base,
58 | init_db
59 | )
60 | init_session()
61 | init_declarative_base()
62 | init_db(engine)
63 | init_caches()
64 |
65 | from gengine.metadata import (
66 | DBSession
67 | )
68 | sess = DBSession()
69 | init_session(override_session=sess, replace=True)
70 |
71 | import gengine.app.model as m
72 | with transaction.manager:
73 | mark_changed(sess, transaction.manager, True)
74 |
75 | messages = sess.execute(m.t_user_messages.select().where(m.t_user_messages.c.has_been_pushed == False))
76 | for msg in messages:
77 | m.SubjectMessage.deliver(msg)
78 | sess.flush()
79 | sess.commit()
80 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/components/views/group-assignment/groupAssignment.scss:
--------------------------------------------------------------------------------
1 | .group-assignment {
2 | * {
3 | box-sizing: border-box;
4 | }
5 | width: 100%;
6 | margin-bottom: 50px;
7 |
8 | .side-header {
9 | font-size: 1.2em;
10 | font-weight: bold;
11 |
12 | }
13 |
14 | .users {
15 | width: calc(50% - 1px);
16 | padding-left: 30px;
17 | vertical-align: top;
18 | }
19 |
20 | .groups {
21 | width: 50%;
22 | vertical-align: top;
23 | padding-right: 30px;
24 | border-right: 1px solid #9b9b9b;
25 | }
26 |
27 | .groups-list-item {
28 | border-bottom: 1px solid #cccccc;
29 | padding: 5px 0px;
30 | color: blue;
31 | }
32 | .groups-list-item:last-child {
33 | border-bottom: 0px;
34 | }
35 | .groups-list-item:hover {
36 | cursor: pointer;
37 | color: #9c9c9c;
38 | text-decoration: underline;
39 | }
40 |
41 | .groups-list-item-selected {
42 | font-weight: bold;
43 | border-bottom: 2px solid #cccccc;
44 | }
45 |
46 | .search-field-wrapper {
47 | border-bottom: 1px solid #9b9b9b;
48 | margin-bottom: 10px;
49 | }
50 |
51 | .search-field {
52 | border-top: 0px;
53 | border-left: 0px;
54 | border-right: 0px;
55 | border-bottom: 0px;
56 | padding: 5px 5px 5px 5px;
57 | font-size: 16px;
58 | border-radius: 0px;
59 | width: calc(100% - 30px);
60 | height: 30px;
61 | outline: 0;
62 | box-shadow:none !important;
63 | display: inline-block;
64 | }
65 |
66 | .contained-user-list-header {
67 | font-weight: bold;
68 | }
69 | .not-contained-user-list-header {
70 | font-weight: bold;
71 | margin-top: 20px;
72 | }
73 |
74 | .users-list-item {
75 | border-bottom: 1px solid #cccccc;
76 | padding: 5px 0px;
77 | position: relative;
78 | }
79 | .users-list-item:last-child {
80 | border-bottom: 0px;
81 | }
82 | .users-list-item-toggle:hover {
83 | cursor: pointer;
84 | color: #9c9c9c;
85 | text-decoration: underline;
86 | }
87 |
88 | .users-list-item-toggle {
89 | float: right;
90 | color: blue;
91 | }
92 |
93 | .inherited-group-link {
94 | color: blue;
95 | }
96 | .inherited-group-link:hover {
97 | cursor: pointer;
98 | color: #9c9c9c;
99 | text-decoration: underline;
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/components/views/group-assignment/groupAssignment.css:
--------------------------------------------------------------------------------
1 | .group-assignment {
2 | width: 100%;
3 | margin-bottom: 50px; }
4 | .group-assignment * {
5 | box-sizing: border-box; }
6 | .group-assignment .side-header {
7 | font-size: 1.2em;
8 | font-weight: bold; }
9 | .group-assignment .users {
10 | width: calc(50% - 1px);
11 | padding-left: 30px;
12 | vertical-align: top; }
13 | .group-assignment .groups {
14 | width: 50%;
15 | vertical-align: top;
16 | padding-right: 30px;
17 | border-right: 1px solid #9b9b9b; }
18 | .group-assignment .groups-list-item {
19 | border-bottom: 1px solid #cccccc;
20 | padding: 5px 0px;
21 | color: blue; }
22 | .group-assignment .groups-list-item:last-child {
23 | border-bottom: 0px; }
24 | .group-assignment .groups-list-item:hover {
25 | cursor: pointer;
26 | color: #9c9c9c;
27 | text-decoration: underline; }
28 | .group-assignment .groups-list-item-selected {
29 | font-weight: bold;
30 | border-bottom: 2px solid #cccccc; }
31 | .group-assignment .search-field-wrapper {
32 | border-bottom: 1px solid #9b9b9b;
33 | margin-bottom: 10px; }
34 | .group-assignment .search-field {
35 | border-top: 0px;
36 | border-left: 0px;
37 | border-right: 0px;
38 | border-bottom: 0px;
39 | padding: 5px 5px 5px 5px;
40 | font-size: 16px;
41 | border-radius: 0px;
42 | width: calc(100% - 30px);
43 | height: 30px;
44 | outline: 0;
45 | box-shadow: none !important;
46 | display: inline-block; }
47 | .group-assignment .contained-user-list-header {
48 | font-weight: bold; }
49 | .group-assignment .not-contained-user-list-header {
50 | font-weight: bold;
51 | margin-top: 20px; }
52 | .group-assignment .users-list-item {
53 | border-bottom: 1px solid #cccccc;
54 | padding: 5px 0px;
55 | position: relative; }
56 | .group-assignment .users-list-item:last-child {
57 | border-bottom: 0px; }
58 | .group-assignment .users-list-item-toggle:hover {
59 | cursor: pointer;
60 | color: #9c9c9c;
61 | text-decoration: underline; }
62 | .group-assignment .users-list-item-toggle {
63 | float: right;
64 | color: blue; }
65 | .group-assignment .inherited-group-link {
66 | color: blue; }
67 | .group-assignment .inherited-group-link:hover {
68 | cursor: pointer;
69 | color: #9c9c9c;
70 | text-decoration: underline; }
71 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/service/FetchService.js:
--------------------------------------------------------------------------------
1 |
2 | const method = {
3 | GET: 'GET',
4 | PUT: 'PUT',
5 | POST: 'POST',
6 | DELETE: 'DELETE',
7 | HEAD: 'HEAD',
8 | }
9 |
10 | export default class FetchService {
11 |
12 | static get(url) {
13 | return FetchService.fetch({
14 | url: url,
15 | method: method.GET
16 | });
17 | }
18 |
19 | static put(url, data) {
20 | return FetchService.fetch({
21 | url: url,
22 | method: method.PUT,
23 | body: JSON.stringify(data),
24 | headers: FetchService.getJsonHeaders(),
25 | });
26 | }
27 |
28 | static post(url, data) {
29 |
30 | return FetchService.fetch({
31 | url: url,
32 | method: method.POST,
33 | body: JSON.stringify(data),
34 | headers: FetchService.getJsonHeaders(),
35 | });
36 | }
37 |
38 | static delete(url, data) {
39 | return FetchService.fetch({
40 | url: url,
41 | method: method.DELETE,
42 | body: JSON.stringify(data),
43 | headers: FetchService.getJsonHeaders(),
44 | });
45 | }
46 |
47 | static head(url) {
48 | return FetchService.fetch({
49 | url: url,
50 | method: method.HEAD,
51 | });
52 | }
53 |
54 | static getJsonHeaders(){
55 | var headers = new Headers();
56 | headers.append('Accept', 'application/json');
57 | headers.append('Content-Type', 'application/json');
58 | return headers;
59 | }
60 |
61 | static fetch(params) {
62 |
63 | return new Promise((resolve, reject) => {
64 | fetch(params.url, params)
65 | .then(
66 | function (response) {
67 |
68 | var contentType = response.headers.get("content-type");
69 | if (contentType && contentType.indexOf("application/json") !== -1) {
70 | response.json().then((jsonResult) => {
71 | resolve({
72 | response: response,
73 | data: jsonResult
74 | });
75 | }).catch(error => {
76 | console.error("Parse error", error);
77 | reject(error);
78 | });
79 | } else {
80 | resolve({
81 | response: response
82 | });
83 | }
84 | }
85 | )
86 | .catch(function (error) {
87 | console.error("Fetch error", error);
88 | reject(error);
89 | });
90 | });
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/gengine/app/tests/base.py:
--------------------------------------------------------------------------------
1 | import shutil
2 | import unittest
3 | import os
4 | from sqlalchemy.engine import create_engine
5 | from sqlalchemy.sql.schema import Table
6 | from sqlalchemy.orm.scoping import scoped_session
7 | from gengine.metadata import init_session, get_sessionmaker
8 | from gengine.app.tests import db as testDB
9 |
10 | class BaseDBTest(unittest.TestCase):
11 |
12 | @classmethod
13 | def setUpClass(cls):
14 | if cls is BaseDBTest:
15 | raise unittest.SkipTest("Skip BaseTest tests, it's a base class")
16 | super(BaseDBTest, cls).setUpClass()
17 |
18 | def setUp(self):
19 | from gengine.app.cache import clear_all_caches
20 | clear_all_caches()
21 |
22 | if os.path.exists("/tmp/test_pgdata"):
23 | shutil.rmtree("/tmp/test_pgdata")
24 |
25 | self.db = testDB.db()
26 | dsn = self.db.dsn()
27 | self.engine = create_engine(
28 | "postgresql://%(user)s@%(host)s:%(port)s/%(database)s" % {
29 | "user": dsn["user"],
30 | "host": dsn["host"],
31 | "port": dsn["port"],
32 | "database": dsn["database"],
33 | }
34 | )
35 | init_session(override_session=scoped_session(get_sessionmaker(bind=self.engine)), replace=True)
36 | from gengine.metadata import Base
37 | Base.metadata.bind = self.engine
38 |
39 | Base.metadata.drop_all(self.engine)
40 | self.engine.execute("DROP SCHEMA IF EXISTS public CASCADE")
41 | self.engine.execute("CREATE SCHEMA IF NOT EXISTS public")
42 |
43 | from alembic.config import Config
44 | from alembic import command
45 |
46 | alembic_cfg = Config(attributes={
47 | 'engine': self.engine,
48 | 'schema': 'public'
49 | })
50 | script_location = os.path.join(
51 | os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
52 | 'app/alembic'
53 | )
54 | alembic_cfg.set_main_option("script_location", script_location)
55 |
56 | from gengine.app import model
57 |
58 | tables = [t for name, t in model.__dict__.items() if isinstance(t, Table)]
59 | Base.metadata.create_all(self.engine, tables=tables)
60 |
61 | command.stamp(alembic_cfg, "head")
62 |
63 | def tearDown(self):
64 | self.db.stop()
65 | shutil.rmtree("/tmp/test_pgdata")
66 |
67 |
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/65c7a32b7322_achievement_date_unique.py:
--------------------------------------------------------------------------------
1 | """achievement_date_unique
2 |
3 | Revision ID: 65c7a32b7322
4 | Revises: d4a70083f72e
5 | Create Date: 2017-01-31 23:01:11.744725
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = '65c7a32b7322'
11 | down_revision = 'd4a70083f72e'
12 | branch_labels = None
13 | depends_on = None
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 |
18 |
19 | def upgrade():
20 | ### commands auto generated by Alembic - please adjust! ###
21 | op.execute("ALTER TABLE achievements_users DROP CONSTRAINT pk_achievements_users;")
22 | op.execute("ALTER TABLE achievements_users ADD COLUMN id SERIAL;")
23 | op.execute("ALTER TABLE achievements_users ADD CONSTRAINT pk_achievements_users PRIMARY KEY(id);")
24 |
25 | op.execute("ALTER TABLE goal_evaluation_cache DROP CONSTRAINT pk_goal_evaluation_cache;")
26 | op.execute("ALTER TABLE goal_evaluation_cache ADD COLUMN id SERIAL;")
27 | op.execute("ALTER TABLE goal_evaluation_cache ADD CONSTRAINT pk_goal_evaluation_cache PRIMARY KEY(id);")
28 |
29 | op.create_index('idx_achievements_users_date_not_null_unique', 'achievements_users', ['user_id', 'achievement_id', 'achievement_date', 'level'], unique=True, postgresql_where=sa.text('achievement_date IS NOT NULL'))
30 | op.create_index('idx_achievements_users_date_null_unique', 'achievements_users', ['user_id', 'achievement_id', 'level'], unique=True, postgresql_where=sa.text('achievement_date IS NULL'))
31 | op.create_index(op.f('ix_achievements_users_achievement_id'), 'achievements_users', ['achievement_id'], unique=False)
32 | op.create_index(op.f('ix_achievements_users_level'), 'achievements_users', ['level'], unique=False)
33 |
34 | op.create_index('idx_goal_evaluation_cache_date_not_null_unique', 'goal_evaluation_cache', ['user_id', 'goal_id', 'achievement_date'], unique=True, postgresql_where=sa.text('achievement_date IS NOT NULL'))
35 | op.create_index('idx_goal_evaluation_cache_date_null_unique', 'goal_evaluation_cache', ['user_id', 'goal_id'], unique=True, postgresql_where=sa.text('achievement_date IS NULL'))
36 | op.create_index(op.f('ix_goal_evaluation_cache_goal_id'), 'goal_evaluation_cache', ['goal_id'], unique=False)
37 | op.create_index(op.f('ix_goal_evaluation_cache_user_id'), 'goal_evaluation_cache', ['user_id'], unique=False)
38 | ### end Alembic commands ###
39 |
40 |
41 | def downgrade():
42 | pass
43 | # not possible !
44 |
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/storeState/index.js:
--------------------------------------------------------------------------------
1 | import _ from 'lodash';
2 | import { combineReducers } from "redux";
3 | import { fork } from 'redux-saga/effects';
4 | import { createStore, applyMiddleware } from 'redux';
5 | import createSagaMiddleware from 'redux-saga'
6 | import { composeWithDevTools } from 'remote-redux-devtools';
7 |
8 | import {getLocalPersistenceMiddleware, localPersistenceReducer, LOAD_REDUX_STORE_FROM_LOCAL_STORAGE} from '../lib/persistence';
9 | import {buildSwaggerApi} from '../lib/swagger';
10 | import apiConfig from './apiConfig';
11 | import ui from './ui';
12 |
13 | //import data from "./data";
14 |
15 | export default function initStore() {
16 | return buildSwaggerApi(apiConfig).then(function(api) {
17 | console.log(api)
18 | const sagaMiddleware = createSagaMiddleware();
19 |
20 | const combinedReducer = combineReducers({
21 | api: api.reducer,
22 | ui: ui.reducer,
23 | localPersistence : localPersistenceReducer
24 | //data: data.reducer,
25 | });
26 |
27 | const rootReducer = (state,action) => {
28 | if(action.type == LOAD_REDUX_STORE_FROM_LOCAL_STORAGE) {
29 | return localPersistenceReducer(state, action);
30 | } else {
31 | return combinedReducer(state, action);
32 | }
33 | }
34 |
35 | const store = createStore(rootReducer, composeWithDevTools(
36 | applyMiddleware(sagaMiddleware, getLocalPersistenceMiddleware([
37 | //...data.localPersistence.map(x => ["data",...x]),
38 | ...api.persist.map(x => ["api",...x]),
39 | ...ui.persist.map(x => ["ui",...x])
40 | ])),
41 | ));
42 |
43 | var n_errors = 0
44 | const max_errors = 100;
45 |
46 | function* rootSaga () {
47 | try {
48 | yield [
49 | fork(api.saga),
50 | ];
51 | } catch (e) {
52 | n_errors++;
53 | console.log("Error While executing Saga", e);
54 | if(n_errors < max_errors) {
55 | yield [
56 | fork(rootSaga)
57 | ]
58 | }
59 | }
60 | }
61 |
62 | sagaMiddleware.run(rootSaga);
63 |
64 | return {
65 | store,
66 | dynamic: {
67 | api
68 | }
69 | };
70 | })
71 | }
72 |
--------------------------------------------------------------------------------
/gengine/maintenance/scripts/generate_revision.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import sys
3 |
4 | import os
5 | import pyramid_dogpile_cache
6 |
7 | from pyramid.config import Configurator
8 | from pyramid.paster import (
9 | get_appsettings,
10 | setup_logging,
11 | )
12 | from pyramid.scripts.common import parse_vars
13 | from sqlalchemy import engine_from_config
14 |
15 | def usage(argv):
16 | cmd = os.path.basename(argv[0])
17 | print('usage: %s [var=value]\n'
18 | '(example: "%s production.ini new_table_xy_created")' % (cmd, cmd))
19 | sys.exit(1)
20 |
21 |
22 | def main(argv=sys.argv):
23 | if len(argv) < 3:
24 | usage(argv)
25 | config_uri = argv[1]
26 | message = argv[2]
27 | options = parse_vars(argv[3:])
28 | setup_logging(config_uri)
29 | settings = get_appsettings(config_uri, options=options)
30 |
31 | durl = os.environ.get("DATABASE_URL") # heroku
32 | if durl:
33 | settings['sqlalchemy.url'] = durl
34 |
35 | murl = os.environ.get("MEMCACHED_URL")
36 | if murl:
37 | settings['urlcache_url'] = murl
38 |
39 | revision(settings, message, options)
40 |
41 |
42 | def revision(settings, message, options):
43 | engine = engine_from_config(settings, 'sqlalchemy.')
44 |
45 | config = Configurator(settings=settings)
46 | pyramid_dogpile_cache.includeme(config)
47 |
48 | from gengine.metadata import (
49 | init_session,
50 | init_declarative_base,
51 | init_db
52 | )
53 | init_session()
54 | init_declarative_base()
55 | init_db(engine)
56 |
57 | from gengine.app.cache import init_caches
58 | init_caches()
59 |
60 | from gengine.metadata import (
61 | Base,
62 | )
63 |
64 | if options.get("reset_db", False):
65 | Base.metadata.drop_all(engine)
66 | engine.execute("DROP SCHEMA IF EXISTS public CASCADE")
67 |
68 | engine.execute("CREATE SCHEMA IF NOT EXISTS public")
69 |
70 | from alembic.config import Config
71 | from alembic import command
72 |
73 | alembic_cfg = Config(attributes={
74 | 'engine': engine,
75 | 'schema': 'public'
76 | })
77 | script_location = os.path.join(
78 | os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
79 | 'app/alembic'
80 | )
81 | alembic_cfg.set_main_option("script_location", script_location)
82 |
83 | command.revision(alembic_cfg,message,True)
84 |
85 | engine.dispose()
86 |
87 | if __name__ == '__main__':
88 | main()
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/logo.svg:
--------------------------------------------------------------------------------
1 |
8 |
--------------------------------------------------------------------------------
/docs/installing/index.rst:
--------------------------------------------------------------------------------
1 | :title: gamification-engine installation
2 | :description: installing the gamification-engine
3 |
4 | Installation
5 | ------------
6 |
7 | Requirements
8 | ============
9 |
10 | The gamification-engine requires an installed python distribution in version 3.x. It uses several language structures which are not supported in Python 2.x. Furthermore, the only currently supported persistence layer is PostgreSQL. Also the corresponding development packages are required (for Ubuntu/Debian: libpq-dev and python3-dev).
11 |
12 |
13 | Installation from PyPI
14 | ======================
15 |
16 | The gamification-engine is available as a python package.
17 | You can install it by invoking
18 |
19 | .. highlight:: bash
20 |
21 | ::
22 |
23 | $ pip install gamification-engine
24 | $ gengine_quickstart mygengine
25 | $ cd mygengine
26 |
27 | In the latest version, there are some optional dependencies for auth pushes and testing. To use these features install it in the following way:
28 |
29 | .. highlight:: bash
30 |
31 | ::
32 |
33 | $ pip install gamification-engine[auth,pushes,testing]
34 |
35 | Afterwards edit production.ini according to your needs.
36 |
37 | Database
38 | ========
39 |
40 | The only currently supported persistence layer is PostgreSQL as we make use of its timezone-related features.
41 |
42 | To create the tables run:
43 |
44 | .. highlight:: bash
45 |
46 | ::
47 |
48 | $ initialize_gengine_db production.ini
49 |
50 | Caching
51 | =======
52 |
53 | For caching we make use of two different approaches:
54 | - using dogpile.cache_ for caching database lookups and computations
55 | - using memcached_ as a URL-based cache that can be served directly by nginx_
56 |
57 | The second approach is optional but highly recommended, it can be deactivated by setting urlcache_active = false in your ini-file.
58 |
59 | .. _dogpile.cache: https://dogpilecache.readthedocs.org/
60 | .. _memcached: http://memcached.org/
61 | .. _nginx: http://nginx.org/
62 |
63 | Serving
64 | =======
65 |
66 | You can use any WSGI-supporting webserver.
67 | (e.g. nginx as a reverse-proxy to uwsgi)
68 |
69 | To quickly get started, you can run:
70 |
71 | .. highlight:: bash
72 |
73 | ::
74 |
75 | $ pserve production.ini
76 |
77 | Heroku-style
78 | ============
79 |
80 | There is also an Heroku-like Project (we use dokku) at gamification-engine-dokku_
81 |
82 | .. _gamification-engine-dokku: https://github.com/ActiDoo/gamification-engine-dokku
83 |
84 |
85 | Commercial Support
86 | ==================
87 |
88 | Commercial support is available at https://www.gamification-software.com or together with app development at https://www.appnadoo.de
89 |
--------------------------------------------------------------------------------
/gengine/app/static/admin.js:
--------------------------------------------------------------------------------
1 | jQuery().ready(function($) {
2 | var defaultcall = "progress";
3 |
4 | var fields=["subjectid","variable","value","key","achievementid","level",
5 | "lat","lon","friends","groups","timezone","country","region","city",
6 | "email","password","device_id","push_id","device_os","app_version",
7 | "offset","message_id","additional_public_data","language"];
8 |
9 | var api_funcs = {
10 | "progress" : {
11 | "fields":["subjectid"],
12 | "url":"/progress/{subjectid}",
13 | "method":"GET"
14 | },
15 | "increase_value" : {
16 | "fields":["variable","subjectid","value","key"],
17 | "url":"/increase_value/{variable}/{subjectid}{/key}",
18 | "method":"POST",
19 | "postparams":["value"]
20 | },
21 | "add_or_update_subject" : {
22 | "fields":["subjectid","lat","lon","friends","groups","timezone","country","region","city","additional_public_data","language"],
23 | "url":"/add_or_update_subject/{subjectid}",
24 | "method":"POST",
25 | "postparams":["lat","lon","friends","groups","timezone","country","region","city","additional_public_data","language"]
26 | },
27 | "delete_subject" : {
28 | "fields":["subjectid"],
29 | "url":"/delete_subject/{subjectid}",
30 | "method":"DELETE"
31 | },
32 | "achievement_level" : {
33 | "fields":["achievementid","level"],
34 | "url":"/achievement/{achievementid}/level/{level}",
35 | "method":"GET"
36 | },
37 | "auth_login" : {
38 | "fields":["email","password"],
39 | "url":"/auth/login",
40 | "method":"POST",
41 | "jsonparams":["email","password"]
42 | },
43 | "register_device" : {
44 | "fields":["subjectid","device_id","push_id","device_os","app_version"],
45 | "url":"/register_device/{subjectid}",
46 | "method":"POST",
47 | "jsonparams":["device_id","push_id","device_os","app_version"]
48 | },
49 | "get_messages" : {
50 | "fields":["subjectid","offset"],
51 | "url":"/messages/{subjectid}",
52 | "method":"GET",
53 | "getparams":["offset"]
54 | },
55 | "set_messages_read" : {
56 | "fields":["subjectid","message_id"],
57 | "url":"/read_messages/{subjectid}",
58 | "method":"POST",
59 | "jsonparams":["message_id"]
60 | }
61 | };
62 |
63 | setupAPIForm($,defaultcall,fields,api_funcs);
64 |
65 | });
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/lib/swagger/provider.js:
--------------------------------------------------------------------------------
1 | import React, {Component, PropTypes, Children} from 'react';
2 | import invariant from 'invariant';
3 | import { connect } from 'react-redux';
4 |
5 | export class DynamicProvider extends Component {
6 |
7 | static propTypes = {
8 | dynamic: PropTypes.object,
9 | };
10 |
11 | static childContextTypes = {
12 | dynamic: PropTypes.object,
13 | };
14 |
15 | getChildContext() {
16 | return {
17 | dynamic: this.props.dynamic
18 | }
19 | }
20 |
21 | render() {
22 | return Children.only(this.props.children);
23 | }
24 | }
25 |
26 | function getDisplayName(Component) {
27 | return Component.displayName || Component.name || 'Component';
28 | }
29 |
30 | function invariantDynamicContext({dynamic} = {}) {
31 | invariant(dynamic,
32 | '[Redux Swagger] Could not find required `dynamic` object. ' +
33 | ' needs to exist in the component ancestry.'
34 | );
35 | }
36 |
37 | export function connectDynamic(mapStateToProps, mapDispatchToProps) {
38 | return function(WrappedComponent, options = {}) {
39 | const {
40 | withRef = false,
41 | } = options;
42 |
43 | class DynamicConnected extends Component {
44 | static displayName = `DynamicConnected(${getDisplayName(WrappedComponent)})`;
45 |
46 | static contextTypes = {
47 | dynamic: PropTypes.object,
48 | store: PropTypes.object
49 | };
50 |
51 | constructor(props, context) {
52 | super(props, context);
53 | invariantDynamicContext(context);
54 |
55 | let dynamic = this.context.dynamic;
56 | let state = this.context.store.getState();
57 | let dispatch = this.context.store.dispatch;
58 |
59 | this.WrappedComponent = connect(function(state, props) {
60 | return mapStateToProps(dynamic, state, props)
61 | }, function(dispatch, props) {
62 | return mapDispatchToProps(dynamic, dispatch, props)
63 | })(WrappedComponent);
64 | }
65 |
66 | getWrappedInstance() {
67 | invariant(withRef,
68 | '[Redux Swagger] To access the wrapped instance, ' +
69 | 'the `{withRef: true}` option must be set when calling: ' +
70 | '`connectDynamic()`'
71 | );
72 |
73 | return this.refs.wrappedInstance;
74 | }
75 |
76 | render() {
77 | return (
78 |
79 | )
80 | }
81 | }
82 |
83 | return DynamicConnected
84 | }
85 | }
86 |
87 |
88 |
--------------------------------------------------------------------------------
/docker-files/nginx-dev.conf:
--------------------------------------------------------------------------------
1 | worker_processes auto;
2 |
3 | events {
4 | use epoll;
5 | accept_mutex on;
6 | worker_connections 1024;
7 | }
8 |
9 | http {
10 | gzip on;
11 | gzip_comp_level 2;
12 | gzip_min_length 512;
13 |
14 | server_tokens off;
15 |
16 | access_log /dev/stdout;
17 | error_log /dev/stderr;
18 |
19 | include mime.types;
20 | default_type application/octet-stream;
21 | sendfile on;
22 |
23 | #Must read the body in 5 seconds.
24 | client_body_timeout 5;
25 |
26 | upstream app_server {
27 | server unix:///run/uwsgi/uwsgi.socket;
28 | }
29 |
30 | server {
31 | listen 80;
32 | server_name _;
33 | keepalive_timeout 5;
34 |
35 | location / {
36 | #expires -1;
37 | #add_header Cache-Control no-cache;
38 | #add_header Vary User-Agent;
39 |
40 | uwsgi_param QUERY_STRING $query_string;
41 | uwsgi_param REQUEST_METHOD $request_method;
42 | uwsgi_param CONTENT_TYPE $content_type;
43 | uwsgi_param CONTENT_LENGTH $content_length;
44 | uwsgi_param REQUEST_URI $request_uri;
45 | uwsgi_param PATH_INFO $document_uri;
46 | uwsgi_param DOCUMENT_ROOT $document_root;
47 | uwsgi_param SERVER_PROTOCOL $server_protocol;
48 | uwsgi_param HTTPS $https if_not_empty;
49 | uwsgi_param REMOTE_ADDR $remote_addr;
50 | uwsgi_param REMOTE_PORT $remote_port;
51 | uwsgi_param SERVER_PORT $server_port;
52 | uwsgi_param SERVER_NAME $server_name;
53 |
54 | if ($request_method = POST) {
55 | uwsgi_pass app_server;
56 | }
57 |
58 | default_type "application/json";
59 | charset utf-8;
60 |
61 | set $memcached_key "::URL_CACHE::$request_uri";
62 | memcached_pass memcached:11211;
63 | error_page 404 502 = @fallback;
64 | }
65 |
66 | location @fallback {
67 | #expires -1;
68 | #add_header Cache-Control no-cache;
69 | #add_header Vary User-Agent;
70 |
71 | uwsgi_param QUERY_STRING $query_string;
72 | uwsgi_param REQUEST_METHOD $request_method;
73 | uwsgi_param CONTENT_TYPE $content_type;
74 | uwsgi_param CONTENT_LENGTH $content_length;
75 | uwsgi_param REQUEST_URI $request_uri;
76 | uwsgi_param PATH_INFO $document_uri;
77 | uwsgi_param DOCUMENT_ROOT $document_root;
78 | uwsgi_param SERVER_PROTOCOL $server_protocol;
79 | uwsgi_param HTTPS $https if_not_empty;
80 | uwsgi_param REMOTE_ADDR $remote_addr;
81 | uwsgi_param REMOTE_PORT $remote_port;
82 | uwsgi_param SERVER_PORT $server_port;
83 | uwsgi_param SERVER_NAME $server_name;
84 |
85 |
86 | uwsgi_pass app_server;
87 | internal;
88 | }
89 | }
90 | }
--------------------------------------------------------------------------------
/docker-files/nginx-prod.conf:
--------------------------------------------------------------------------------
1 | worker_processes auto;
2 |
3 | events {
4 | use epoll;
5 | accept_mutex on;
6 | worker_connections 1024;
7 | }
8 |
9 | http {
10 | gzip on;
11 | gzip_comp_level 2;
12 | gzip_min_length 512;
13 |
14 | server_tokens off;
15 |
16 | access_log /dev/stdout;
17 | error_log /dev/stderr;
18 |
19 | include mime.types;
20 | default_type application/octet-stream;
21 | sendfile on;
22 |
23 | #Must read the body in 5 seconds.
24 | client_body_timeout 5;
25 |
26 | upstream app_server {
27 | server unix:///run/uwsgi/uwsgi.socket;
28 | }
29 |
30 | server {
31 | listen 80;
32 | server_name _;
33 | keepalive_timeout 5;
34 |
35 | location / {
36 | #expires -1;
37 | #add_header Cache-Control no-cache;
38 | #add_header Vary User-Agent;
39 |
40 | uwsgi_param QUERY_STRING $query_string;
41 | uwsgi_param REQUEST_METHOD $request_method;
42 | uwsgi_param CONTENT_TYPE $content_type;
43 | uwsgi_param CONTENT_LENGTH $content_length;
44 | uwsgi_param REQUEST_URI $request_uri;
45 | uwsgi_param PATH_INFO $document_uri;
46 | uwsgi_param DOCUMENT_ROOT $document_root;
47 | uwsgi_param SERVER_PROTOCOL $server_protocol;
48 | uwsgi_param HTTPS $https if_not_empty;
49 | uwsgi_param REMOTE_ADDR $remote_addr;
50 | uwsgi_param REMOTE_PORT $remote_port;
51 | uwsgi_param SERVER_PORT $server_port;
52 | uwsgi_param SERVER_NAME $server_name;
53 |
54 | if ($request_method = POST) {
55 | uwsgi_pass app_server;
56 | }
57 |
58 | default_type "application/json";
59 | charset utf-8;
60 |
61 | set $memcached_key "::URL_CACHE::$request_uri";
62 | memcached_pass memcached:11211;
63 | error_page 404 502 = @fallback;
64 | }
65 |
66 | location @fallback {
67 | #expires -1;
68 | #add_header Cache-Control no-cache;
69 | #add_header Vary User-Agent;
70 |
71 | uwsgi_param QUERY_STRING $query_string;
72 | uwsgi_param REQUEST_METHOD $request_method;
73 | uwsgi_param CONTENT_TYPE $content_type;
74 | uwsgi_param CONTENT_LENGTH $content_length;
75 | uwsgi_param REQUEST_URI $request_uri;
76 | uwsgi_param PATH_INFO $document_uri;
77 | uwsgi_param DOCUMENT_ROOT $document_root;
78 | uwsgi_param SERVER_PROTOCOL $server_protocol;
79 | uwsgi_param HTTPS $https if_not_empty;
80 | uwsgi_param REMOTE_ADDR $remote_addr;
81 | uwsgi_param REMOTE_PORT $remote_port;
82 | uwsgi_param SERVER_PORT $server_port;
83 | uwsgi_param SERVER_NAME $server_name;
84 |
85 |
86 | uwsgi_pass app_server;
87 | internal;
88 | }
89 | }
90 | }
--------------------------------------------------------------------------------
/gengine/app/jsscripts/src/service/URLService.js:
--------------------------------------------------------------------------------
1 | import { browserHistory } from 'react-router';
2 | import URI from 'urijs';
3 | import _ from 'lodash';
4 |
5 | export default class URLService {
6 |
7 | static getCurrentPathWithSearch() {
8 | let currentPath = window.location.pathname;
9 | let currentSearch = window.location.search;
10 | return currentPath + currentSearch;
11 | }
12 |
13 | static setQueryParameter(name, value) {
14 | browserHistory.replace(URI(this.getCurrentPathWithSearch()).setQuery(name,value).toString());
15 | }
16 |
17 | static addQueryParameter(name, value) {
18 | browserHistory.replace(URI(this.getCurrentPathWithSearch()).addQuery(name,value).toString());
19 | }
20 |
21 | static removeQueryParameterWithValue(name, value) {
22 | browserHistory.replace(URI(this.getCurrentPathWithSearch()).removeQuery(name,value).toString());
23 | }
24 |
25 | static removeQueryParameter(name) {
26 | browserHistory.replace(URI(this.getCurrentPathWithSearch()).removeQuery(name).toString());
27 | }
28 |
29 | static toggleQueryParameter(name, value) {
30 | let uri = URI(this.getCurrentPathWithSearch());
31 | if(uri.hasQuery(name, value, true)) {
32 | this.removeQueryParameterWithValue(name, value)
33 | } else {
34 | this.addQueryParameter(name, value)
35 | }
36 | }
37 |
38 | static getQueryParameterValueAsList(name) {
39 | let qry = URI(this.getCurrentPathWithSearch()).query(true);
40 |
41 | if(!qry[name]) return [];
42 | if(!_.isArray(qry[name])) {
43 | return [qry[name]]
44 | }
45 | return qry[name]
46 | }
47 |
48 | static getQueryParameterValueAsIntList(name) {
49 | let qry = URI(this.getCurrentPathWithSearch()).query(true);
50 |
51 | if(!qry[name]) return [];
52 | if(!_.isArray(qry[name])) {
53 | return [parseInt(qry[name])]
54 | }
55 | let list = qry[name];
56 | return _.map(list, (it) => {
57 | return parseInt(it);
58 | })
59 | }
60 |
61 | static getQueryParameterAsBool(name) {
62 | let qry = URI(this.getCurrentPathWithSearch()).query(true);
63 |
64 | if(!qry[name]) {
65 | return false;
66 | } else {
67 | return true;
68 | }
69 | }
70 |
71 | static getQueryParameterAsInt(name) {
72 | let qry = URI(this.getCurrentPathWithSearch()).query(true);
73 |
74 | if(typeof qry[name] == 'undefined') {
75 | return null;
76 | } else {
77 | return parseInt(qry[name]);
78 | }
79 | }
80 |
81 | static getQueryParameterAsString(name, fallback=null) {
82 | let qry = URI(this.getCurrentPathWithSearch()).query(true);
83 |
84 | if(typeof qry[name] == 'undefined') {
85 | return fallback;
86 | } else {
87 | return ""+qry[name];
88 | }
89 | }
90 | }
--------------------------------------------------------------------------------
/gengine/app/jsscripts/build/service-worker.js:
--------------------------------------------------------------------------------
1 | "use strict";function setOfCachedUrls(e){return e.keys().then(function(e){return e.map(function(e){return e.url})}).then(function(e){return new Set(e)})}var precacheConfig=[["index.html","afd0a90d1fc5a8d7a9932bed79d390d1"],["static/css/main.a5a0edab.css","1c7be73c85d45f3fe6bd36306384f80e"],["static/js/0.f7d7d8b6.chunk.js","705902973dc0044f5d11411b08e0c261"],["static/js/main.4589375e.js","b5967910d8ea7d0393d99cb19b35f0e3"]],cacheName="sw-precache-v3-sw-precache-webpack-plugin-"+(self.registration?self.registration.scope:""),ignoreUrlParametersMatching=[/^utm_/],addDirectoryIndex=function(e,t){var n=new URL(e);return"/"===n.pathname.slice(-1)&&(n.pathname+=t),n.toString()},cleanResponse=function(e){return e.redirected?("body"in e?Promise.resolve(e.body):e.blob()).then(function(t){return new Response(t,{headers:e.headers,status:e.status,statusText:e.statusText})}):Promise.resolve(e)},createCacheKey=function(e,t,n,r){var a=new URL(e);return r&&a.pathname.match(r)||(a.search+=(a.search?"&":"")+encodeURIComponent(t)+"="+encodeURIComponent(n)),a.toString()},isPathWhitelisted=function(e,t){if(0===e.length)return!0;var n=new URL(t).pathname;return e.some(function(e){return n.match(e)})},stripIgnoredUrlParameters=function(e,t){var n=new URL(e);return n.hash="",n.search=n.search.slice(1).split("&").map(function(e){return e.split("=")}).filter(function(e){return t.every(function(t){return!t.test(e[0])})}).map(function(e){return e.join("=")}).join("&"),n.toString()},hashParamName="_sw-precache",urlsToCacheKeys=new Map(precacheConfig.map(function(e){var t=e[0],n=e[1],r=new URL(t,self.location),a=createCacheKey(r,hashParamName,n,/\.\w{8}\./);return[r.toString(),a]}));self.addEventListener("install",function(e){e.waitUntil(caches.open(cacheName).then(function(e){return setOfCachedUrls(e).then(function(t){return Promise.all(Array.from(urlsToCacheKeys.values()).map(function(n){if(!t.has(n)){var r=new Request(n,{credentials:"same-origin"});return fetch(r).then(function(t){if(!t.ok)throw new Error("Request for "+n+" returned a response with status "+t.status);return cleanResponse(t).then(function(t){return e.put(n,t)})})}}))})}).then(function(){return self.skipWaiting()}))}),self.addEventListener("activate",function(e){var t=new Set(urlsToCacheKeys.values());e.waitUntil(caches.open(cacheName).then(function(e){return e.keys().then(function(n){return Promise.all(n.map(function(n){if(!t.has(n.url))return e.delete(n)}))})}).then(function(){return self.clients.claim()}))}),self.addEventListener("fetch",function(e){if("GET"===e.request.method){var t,n=stripIgnoredUrlParameters(e.request.url,ignoreUrlParametersMatching);t=urlsToCacheKeys.has(n);t||(n=addDirectoryIndex(n,"index.html"),t=urlsToCacheKeys.has(n));!t&&"navigate"===e.request.mode&&isPathWhitelisted([],e.request.url)&&(n=new URL("/index.html",self.location).toString(),t=urlsToCacheKeys.has(n)),t&&e.respondWith(caches.open(cacheName).then(function(e){return e.match(urlsToCacheKeys.get(n)).then(function(e){if(e)return e;throw Error("The cached response that was expected is missing.")})}).catch(function(t){return console.warn('Couldn\'t serve response for "%s" from cache: %O',e.request.url,t),fetch(e.request)}))}});
--------------------------------------------------------------------------------
/gengine/app/tasksystem.py:
--------------------------------------------------------------------------------
1 | from collections import defaultdict
2 | from sqlalchemy.sql.expression import and_
3 | from zope.sqlalchemy.datamanager import mark_changed
4 |
5 |
6 | class EngineTask(object):
7 | def __init__(self, name, description, config_scheme, default_config, default_cron, default_activated, *args, **kwargs):
8 | """Constructor just here to accept parameters for decorator"""
9 | self.name = name
10 | self.description = description
11 | self.config_scheme = config_scheme
12 | self.default_config = default_config
13 | self.default_cron = default_cron
14 | self.default_activated = default_activated
15 | self.args = args
16 | self.kwargs = kwargs
17 |
18 | def __call__(self, wrapped):
19 | """Attach the decorator with Venusian"""
20 |
21 | from gengine.app.registries import get_task_registry
22 | get_task_registry().register(self.name, wrapped, self.description, self.config_scheme, self.default_config, self.default_cron)
23 |
24 | if self.default_activated:
25 | import transaction
26 | from .model import t_tasks
27 | from ..metadata import DBSession
28 |
29 | if hasattr(DBSession, "target"):
30 | sess = DBSession()
31 | else:
32 | sess = DBSession
33 |
34 | with transaction.manager:
35 |
36 | sess.execute("LOCK TABLE tasks IN ACCESS EXCLUSIVE MODE")
37 | db_task = sess.execute(t_tasks.select().where(and_(
38 | t_tasks.c.task_name.like(self.name),
39 | t_tasks.c.is_auto_created == True,
40 | ))).fetchone()
41 |
42 | if not db_task:
43 | # We are not setting config and cron, as we can get the defaults when executing
44 |
45 | mark_changed(sess, transaction.manager, True)
46 |
47 | sess.execute(t_tasks.insert().values({
48 | 'entry_name': self.name,
49 | 'task_name': self.name,
50 | 'config': None,
51 | 'cron': None,
52 | 'is_removed': False,
53 | 'is_manually_modified': False,
54 | 'is_auto_created': True,
55 | }))
56 |
57 | sess.flush()
58 | sess.commit()
59 | return wrapped
60 |
61 |
62 | class TaskRegistry:
63 | def __init__(self):
64 | self.registrations = defaultdict(lambda: defaultdict(dict))
65 |
66 | def register(self, name, fun, description, config_scheme, default_config, default_cron):
67 | self.registrations[name] = {
68 | "fun": fun,
69 | "description": description,
70 | "config_scheme": config_scheme,
71 | "default_config": default_config,
72 | "default_cron": default_cron
73 | }
74 |
75 | def execute(self, name, config):
76 | if not config:
77 | config = self.registrations.get(name).get("default_config", None)
78 | return self.registrations[name]["fun"](config=config)
79 |
80 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 |
4 | from setuptools import setup, find_packages
5 |
6 | here = os.path.abspath(os.path.dirname(__file__))
7 | with open(os.path.join(here, 'README.txt')) as f:
8 | README = f.read()
9 | with open(os.path.join(here, 'CHANGES.txt')) as f:
10 | CHANGES = f.read()
11 |
12 | requires = [
13 | 'pyramid',
14 | 'pyramid_chameleon',
15 | 'pyramid_debugtoolbar',
16 | 'pyramid_tm',
17 | 'SQLAlchemy',
18 | 'transaction',
19 | 'zope.sqlalchemy',
20 | 'waitress',
21 | 'pytz',
22 | 'dogpile.cache',
23 | 'pyramid_dogpile_cache',
24 | 'Flask>=0.10.1',
25 | 'flask-admin',
26 | 'psycopg2',
27 | 'pymemcache',
28 | 'mock',
29 | 'alembic',
30 | 'raven',
31 | 'jsl',
32 | 'jsonschema',
33 | 'pyparsing',
34 | 'python-crontab',
35 | 'croniter',
36 | 'zope.interface',
37 | 'zope.sqlalchemy',
38 | 'argon2'
39 | ]
40 |
41 | version = ''
42 | with open('gengine/__init__.py', 'r') as fd:
43 | version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
44 | fd.read(), re.MULTILINE).group(1)
45 |
46 | if not version:
47 | raise RuntimeError('Cannot find version information')
48 |
49 | setup(name='gamification-engine',
50 | version=version,
51 | description='The Gamification-Engine (gengine) provides an API for integrating any kinds of gamification features.',
52 | long_description=README + '\n\n' + CHANGES,
53 | classifiers=[
54 | "Programming Language :: Python",
55 | "Framework :: Pyramid",
56 | "Topic :: Internet :: WWW/HTTP",
57 | "Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
58 | "Topic :: Software Development :: Libraries",
59 | "Programming Language :: Python :: 3.6",
60 | "License :: OSI Approved :: MIT License"
61 | ],
62 | author='Marcel Sander, Jens Janiuk, Matthias Feldotto',
63 | author_email='marcel@gamification-software.com',
64 | license='MIT',
65 | url='https://www.gamification-software.com',
66 | keywords='web wsgi bfg pylons pyramid gamification',
67 | packages=find_packages(),
68 | include_package_data=True,
69 | zip_safe=False,
70 | test_suite='gengine',
71 | install_requires=requires,
72 | extras_require={
73 | "auth": [
74 | 'argon2'
75 | ],
76 | "pushes": [
77 | 'tapns3',
78 | 'python-gcm',
79 | ],
80 | "testing": [
81 | 'testing.postgresql',
82 | 'testing.redis',
83 | 'names'
84 | ]
85 | },
86 | entry_points="""\
87 | [paste.app_factory]
88 | main = gengine:main
89 | [console_scripts]
90 | initialize_gengine_db = gengine.maintenance.scripts.initializedb:main
91 | generate_gengine_erd = gengine.maintenance.scripts.generate_erd:main
92 | generate_gengine_revision = gengine.maintenance.scripts.generate_revision:main
93 | gengine_push_messages = gengine.maintenance.scripts.push_messages:main
94 | gengine_scheduler_beat = gengine.maintenance.scripts.scheduler_beat:main
95 | gengine_scheduler_worker = gengine.maintenance.scripts.scheduler_worker:main
96 | [redgalaxy.plugins]
97 | gengine = gengine:redgalaxy
98 | """,
99 | )
100 |
--------------------------------------------------------------------------------
/gengine/app/templates/admin/layout.html:
--------------------------------------------------------------------------------
1 | {% macro menu_icon(item) -%}
2 | {% set icon_type = item.get_icon_type() %}
3 | {%- if icon_type %}
4 | {% set icon_value = item.get_icon_value() %}
5 | {% if icon_type == 'glyph' %}
6 |
7 | {% elif icon_type == 'image' %}
8 |
9 | {% elif icon_type == 'image-url' %}
10 |
11 | {% endif %}
12 | {% endif %}
13 | {%- endmacro %}
14 |
15 | {% macro menu() %}
16 | {%- for item in admin_view.admin.menu() %}
17 | {%- if item.is_category() -%}
18 | {% set children = item.get_children() %}
19 | {%- if children %}
20 | {% set class_name = item.get_class_name() %}
21 | {%- if item.is_active(admin_view) %}
22 |
23 | {% else -%}
24 |
25 | {%- endif %}
26 | {{ menu_icon(item) }}{{ item.name }}
27 |
39 |
40 | {% endif %}
41 | {%- else %}
42 | {%- if item.is_accessible() and item.is_visible() -%}
43 | {% set class_name = item.get_class_name() %}
44 | {%- if item.is_active(admin_view) %}
45 |
46 | {%- else %}
47 |
48 | {%- endif %}
49 | {{ menu_icon(item) }}{{ item.name }}
50 |
51 | {%- endif -%}
52 | {% endif -%}
53 | {% endfor %}
54 | {% endmacro %}
55 |
56 | {% macro menu_links() %}
57 | {% for item in admin_view.admin.menu_links() %}
58 | {% if item.is_accessible() and item.is_visible() %}
59 |
60 | {{ menu_icon(item) }}{{ item.name }}
61 |
62 | {% endif %}
63 | {% endfor %}
64 |
65 |
66 | Authentication: {{ ('active' if settings_enable_authentication else 'inactive') | safe }}
67 |
68 | {% endmacro %}
69 |
70 | {% macro messages() %}
71 | {% with messages = get_flashed_messages(with_categories=True) %}
72 | {% if messages %}
73 | {% for category, m in messages %}
74 | {% if category %}
75 |
76 | {% else %}
77 |
78 | {% endif %}
79 |
x
80 | {{ m }}
81 |
82 | {% endfor %}
83 | {% endif %}
84 | {% endwith %}
85 | {% endmacro %}
86 |
--------------------------------------------------------------------------------
/docker-files/production.ini:
--------------------------------------------------------------------------------
1 | ###
2 | # app configuration
3 | # http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/environment.html
4 | ###
5 |
6 | [app:main]
7 | use = egg:gamification-engine
8 |
9 | pyramid.reload_templates = false
10 | pyramid.debug_authorization = false
11 | pyramid.debug_notfound = false
12 | pyramid.debug_routematch = false
13 | pyramid.default_locale_name = en
14 | pyramid.includes =
15 | pyramid_tm
16 |
17 | sqlalchemy.url = postgres://postgres:password@postgres/gengine
18 |
19 | #reverse proxy settings
20 | force_https = false
21 | urlprefix =
22 | urlcacheid = gengine
23 |
24 | #swagger
25 | swagger.use_http_scheme = true
26 | #leave empty because swagger url is somehow used in some react code (leaving empty makes it use relative path)
27 | swagger.host =
28 |
29 | #flaskadmin settings
30 | flaskadmin_secret = 87ghsjkdjfhg85grsfgsdfghwez89hsuif # replace with random string!
31 |
32 | # dogpile cache
33 | dogpile_cache.backend = dogpile.cache.null
34 |
35 | dogpile_cache.general.backend = dogpile.cache.dbm
36 | dogpile_cache.general.arguments.filename = general.dbm
37 |
38 | dogpile_cache.achievement_eval.backend = dogpile.cache.dbm
39 | dogpile_cache.achievement_eval.arguments.filename = achievement_eval.dbm
40 |
41 | dogpile_cache.achievements_by_user_for_today.backend = dogpile.cache.dbm
42 | dogpile_cache.achievements_by_user_for_today.arguments.filename = achievements_by_user_for_today.dbm
43 |
44 | dogpile_cache.translations.backend = dogpile.cache.dbm
45 | dogpile_cache.translations.arguments.filename = translations.dbm
46 |
47 | dogpile_cache.achievements_users_levels.backend = dogpile.cache.dbm
48 | dogpile_cache.achievements_users_levels.arguments.filename = achievements_users_levels.dbm
49 |
50 | dogpile_cache.goal_evaluation.backend = dogpile.cache.dbm
51 | dogpile_cache.goal_evaluation.arguments.filename = goal_evaluation.dbm
52 |
53 | dogpile_cache.goal_statements.backend = dogpile.cache.memory
54 |
55 | # memcache
56 | urlcache_url = memcached:11211
57 | urlcache_active = true
58 |
59 | # callback url, will be used for time-related leaderboard evaluations (daily,monthly,yearly) (TBD)
60 | notify_progress =
61 |
62 | enable_user_authentication = true
63 | fallback_language = en
64 | gcm.api_key=
65 | gcm.package=
66 | apns.dev.key=
67 | apns.dev.certificate=
68 | apns.prod.key=
69 | apns.prod.certificate=
70 | push_title=Gamification Engine
71 |
72 | ###
73 | # wsgi server configuration
74 | ###
75 |
76 | #[server:main]
77 | #use = egg:waitress#main
78 | #host = 0.0.0.0
79 | #port = 6543
80 |
81 |
82 | ###
83 | # logging configuration
84 | # http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/logging.html
85 | ###
86 |
87 | [loggers]
88 | keys = root, gengine, sqlalchemy
89 |
90 | [handlers]
91 | keys = console
92 |
93 | [formatters]
94 | keys = generic
95 |
96 | [logger_root]
97 | level = WARN
98 | handlers = console
99 |
100 | [logger_gengine]
101 | level = WARN
102 | handlers =
103 | qualname = gengine
104 |
105 | [logger_sqlalchemy]
106 | level = WARN
107 | handlers =
108 | qualname = sqlalchemy.engine
109 | # "level = INFO" logs SQL queries.
110 | # "level = DEBUG" logs SQL queries and results.
111 | # "level = WARN" logs neither. (Recommended for production systems.)
112 |
113 | [handler_console]
114 | class = StreamHandler
115 | args = (sys.stderr,)
116 | level = NOTSET
117 | formatter = generic
118 |
119 | [formatter_generic]
120 | format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s
121 |
--------------------------------------------------------------------------------
/gengine/maintenance/scripts/scheduler_worker.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import sys
3 | import logging
4 |
5 | from gengine.base.util import dt_now
6 | from sqlalchemy.sql.expression import and_, select
7 | from zope.sqlalchemy.datamanager import mark_changed
8 |
9 | import os
10 | import transaction
11 | from pyramid.paster import (
12 | get_appsettings,
13 | setup_logging,
14 | )
15 | from pyramid.scripts.common import parse_vars
16 |
17 | log = logging.getLogger(__name__)
18 | log.addHandler(logging.StreamHandler())
19 |
20 | def usage(argv):
21 | cmd = os.path.basename(argv[0])
22 | print('usage: %s
[var=value]\n'
23 | '(example: "%s production.ini")' % (cmd, cmd))
24 | sys.exit(1)
25 |
26 |
27 | def main(argv=sys.argv):
28 | if len(argv) < 2:
29 | usage(argv)
30 | config_uri = argv[1]
31 | options = parse_vars(argv[2:])
32 | setup_logging(config_uri)
33 | settings = get_appsettings(config_uri, options=options)
34 |
35 | import gengine
36 | gengine.main({}, **settings)
37 |
38 | from gengine.metadata import (
39 | DBSession
40 | )
41 | sess = DBSession()
42 |
43 | import gengine.app.model as m
44 |
45 | from gengine.app.registries import get_task_registry
46 | enginetasks = get_task_registry().registrations
47 |
48 | to_run = []
49 |
50 | with transaction.manager:
51 | mark_changed(sess, transaction.manager, True)
52 |
53 | tasks = sess.execute(
54 | select([
55 | m.t_tasks.c.entry_name,
56 | m.t_tasks.c.task_name,
57 | m.t_tasks.c.config,
58 | m.t_taskexecutions.c.id.label("execution_id"),
59 | ], from_obj=m.t_taskexecutions.join(m.t_tasks)).\
60 | where(and_(
61 | m.t_taskexecutions.c.planned_at < dt_now(),
62 | m.t_taskexecutions.c.canceled_at==None,
63 | m.t_taskexecutions.c.locked_at==None,
64 | m.t_taskexecutions.c.finished_at==None,
65 | ))
66 | ).fetchall()
67 |
68 | for task in tasks:
69 | if task["task_name"] in enginetasks.keys():
70 | to_run.append(task)
71 |
72 | sess.execute(
73 | m.t_taskexecutions.update().values({
74 | 'locked_at' : dt_now()
75 | }).where(
76 | m.t_taskexecutions.c.id == task["execution_id"]
77 | )
78 | )
79 |
80 | sess.flush()
81 | sess.commit()
82 |
83 | for task in to_run:
84 | with transaction.manager:
85 | mark_changed(sess, transaction.manager, True)
86 |
87 | log.info("Running task %(task_name)s (ExecutionID: %(execution_id)s)" % {
88 | 'task_name': task["task_name"],
89 | 'execution_id': task["execution_id"]
90 | })
91 |
92 | result = get_task_registry().execute(
93 | name=task["task_name"],
94 | config=task["config"] # execute contains fallback to default_config
95 | )
96 | logged = result.get("log", None)
97 | success = result.get("success", True)
98 |
99 | sess.execute(
100 | m.t_taskexecutions.update().values({
101 | 'finished_at': dt_now(),
102 | 'log': logged,
103 | 'success': success
104 | }).where(
105 | m.t_taskexecutions.c.id == task["execution_id"]
106 | )
107 | )
108 |
109 | sess.flush()
110 | sess.commit()
111 |
--------------------------------------------------------------------------------
/development.ini:
--------------------------------------------------------------------------------
1 | ###
2 | # app configuration
3 | # http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/environment.html
4 | ###
5 |
6 | [app:main]
7 | use = egg:gamification-engine
8 |
9 | pyramid.reload_templates = true
10 | pyramid.debug_authorization = false
11 | pyramid.debug_notfound = false
12 | pyramid.debug_routematch = true
13 | pyramid.default_locale_name = en
14 | pyramid.includes =
15 | pyramid_debugtoolbar
16 | pyramid_tm
17 |
18 | sqlalchemy.url = postgres://postgres:password@postgres/gengine
19 |
20 | #reverse proxy settings
21 | force_https = false
22 | urlprefix =
23 | urlcacheid = gengine
24 | load_from_webpack_dev_server = false
25 |
26 | swagger.use_http_scheme = true
27 | swagger.host = localhost:6543
28 |
29 | #flaskadmin settings
30 | flaskadmin_secret = 87ghsjkdjfhg85grsfgsdfghwez89hsuif # replace with random string!
31 |
32 | # By default, the toolbar only appears for clients from IP addresses
33 | # '127.0.0.1' and '::1'.
34 | # debugtoolbar.hosts = 127.0.0.1 ::1
35 |
36 | # dogpile cache
37 | dogpile_cache.backend = dogpile.cache.null
38 |
39 | dogpile_cache.general.backend = dogpile.cache.dbm
40 | dogpile_cache.general.arguments.filename = general.dbm
41 |
42 | dogpile_cache.achievement_eval.backend = dogpile.cache.dbm
43 | dogpile_cache.achievement_eval.arguments.filename = achievement_eval.dbm
44 |
45 | dogpile_cache.achievements_by_user_for_today.backend = dogpile.cache.dbm
46 | dogpile_cache.achievements_by_user_for_today.arguments.filename = achievements_by_user_for_today.dbm
47 |
48 | dogpile_cache.translations.backend = dogpile.cache.dbm
49 | dogpile_cache.translations.arguments.filename = translations.dbm
50 |
51 | dogpile_cache.achievements_users_levels.backend = dogpile.cache.dbm
52 | dogpile_cache.achievements_users_levels.arguments.filename = achievements_users_levels.dbm
53 |
54 | dogpile_cache.goal_evaluation.backend = dogpile.cache.dbm
55 | dogpile_cache.goal_evaluation.arguments.filename = goal_evaluation.dbm
56 |
57 | dogpile_cache.goal_statements.backend = dogpile.cache.memory
58 |
59 | # memcache
60 | urlcache_url = memcached:11211
61 | urlcache_active = true
62 |
63 | # callback url, will be used for time-related leaderboard evaluations (daily,monthly,yearly) (TBD)
64 | notify_progress =
65 |
66 | enable_user_authentication = true
67 | fallback_language = en
68 | gcm.api_key=
69 | gcm.package=
70 | apns.dev.key=
71 | apns.dev.certificate=
72 | apns.prod.key=
73 | apns.prod.certificate=
74 | push_title=Gamification Engine
75 |
76 |
77 | ###
78 | # wsgi server configuration
79 | ###
80 |
81 | [server:main]
82 | use = egg:waitress#main
83 | host = 0.0.0.0
84 | port = 6543
85 |
86 | ###
87 | # logging configuration
88 | # http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/logging.html
89 | ###
90 |
91 | [loggers]
92 | keys = root, gengine, sqlalchemy, sentry
93 |
94 | [handlers]
95 | keys = console, sentry
96 |
97 | [formatters]
98 | keys = generic
99 |
100 | [logger_root]
101 | level = DEBUG
102 | handlers = console,sentry
103 |
104 | [logger_gengine]
105 | level = DEBUG
106 | handlers =
107 | qualname = gengine
108 |
109 | [logger_sqlalchemy]
110 | level = DEBUG
111 | handlers =
112 | qualname = sqlalchemy.engine
113 | # "level = INFO" logs SQL queries.
114 | # "level = DEBUG" logs SQL queries and results.
115 | # "level = WARN" logs neither. (Recommended for production systems.)
116 |
117 | [logger_sentry]
118 | level = WARN
119 | handlers = console
120 | qualname = sentry.errors
121 | propagate = 0
122 |
123 | [handler_console]
124 | class = StreamHandler
125 | args = (sys.stderr,)
126 | level = NOTSET
127 | formatter = generic
128 |
129 | [handler_sentry]
130 | class = raven.handlers.logging.SentryHandler
131 | args = ()
132 | level = WARNING
133 | formatter = generic
134 |
135 | [formatter_generic]
136 | format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s
137 |
--------------------------------------------------------------------------------
/gengine/base/model.py:
--------------------------------------------------------------------------------
1 | import pytz
2 | from pytz.exceptions import UnknownTimeZoneError
3 | from sqlalchemy.inspection import inspect
4 | from sqlalchemy.orm.exc import DetachedInstanceError
5 | from sqlalchemy.orm.query import Query
6 | from sqlalchemy.sql.expression import select
7 | from sqlalchemy.sql.functions import func
8 | from sqlalchemy.util.compat import with_metaclass
9 | from zope.sqlalchemy.datamanager import mark_changed
10 |
11 | import gengine.metadata as meta
12 |
13 | class ABaseMeta(type):
14 | def __init__(cls, name, bases, nmspc):
15 | super(ABaseMeta, cls).__init__(name, bases, nmspc)
16 |
17 | # monkey patch __unicode__
18 | # this is required to give show the SQL error to the user in flask admin if constraints are violated
19 | if hasattr(cls,"__unicode__"):
20 | old_unicode = cls.__unicode__
21 | def patched(self):
22 | try:
23 | return old_unicode(self)
24 | except DetachedInstanceError:
25 | return "(DetachedInstance)"
26 | cls.__unicode__ = patched
27 |
28 | def __getattr__(cls, item):
29 | if item == "__table__":
30 | return inspect(cls).local_table
31 | raise AttributeError(item)
32 |
33 |
34 | class ABase(with_metaclass(ABaseMeta, object)):
35 | """abstract base class which introduces a nice constructor for the model classes."""
36 |
37 | def __init__(self, *args, **kw):
38 | """ create a model object.
39 |
40 | pass attributes by using named parameters, e.g. name="foo", value=123
41 | """
42 |
43 | for k, v in kw.items():
44 | setattr(self, k, v)
45 |
46 | def __str__(self):
47 | if hasattr(self, "__unicode__"):
48 | return self.__unicode__()
49 |
50 | def __getitem__(self, key):
51 | return getattr(self,key)
52 |
53 | def __setitem__(self, key, item):
54 | return setattr(self,key,item)
55 |
56 |
57 | def calc_distance(latlong1, latlong2):
58 | """generates a sqlalchemy expression for distance query in km
59 |
60 | :param latlong1: the location from which we look for rows, as tuple (lat,lng)
61 |
62 | :param latlong2: the columns containing the latitude and longitude, as tuple (lat,lng)
63 | """
64 |
65 | # explain: http://geokoder.com/distances
66 |
67 | # return func.sqrt(func.pow(69.1 * (latlong1[0] - latlong2[0]),2)
68 | # + func.pow(53.0 * (latlong1[1] - latlong2[1]),2))
69 |
70 | return func.sqrt(func.pow(111.2 * (latlong1[0] - latlong2[0]), 2)
71 | + func.pow(111.2 * (latlong1[1] - latlong2[1]) * func.cos(latlong2[0]), 2))
72 |
73 |
74 | def coords(row):
75 | return (row["lat"], row["lng"])
76 |
77 |
78 | def combine_updated_at(list_of_dates):
79 | return max(list_of_dates)
80 |
81 |
82 | def get_insert_id_by_result(r):
83 | return r.last_inserted_ids()[0]
84 |
85 |
86 | def get_insert_ids_by_result(r):
87 | return r.last_inserted_ids()
88 |
89 |
90 | def exists_by_expr(t, expr):
91 | # TODO: use exists instead of count
92 | q = select([func.count("*").label("c")], from_obj=t).where(expr)
93 | r = meta.DBSession.execute(q).fetchone()
94 | if r.c > 0:
95 | return True
96 | else:
97 | return False
98 |
99 |
100 | def datetime_trunc(field, timezone):
101 | return "date_trunc('%(field)s', CAST(to_char(NOW() AT TIME ZONE %(timezone)s, 'YYYY-MM-DD HH24:MI:SS') AS TIMESTAMP)) AT TIME ZONE %(timezone)s" % {
102 | "field": field,
103 | "timezone": timezone
104 | }
105 |
106 |
107 | def valid_timezone(timezone):
108 | try:
109 | pytz.timezone(timezone)
110 | except UnknownTimeZoneError:
111 | return False
112 | return True
113 |
114 |
115 | def update_connection():
116 | session = meta.DBSession() if callable(meta.DBSession) else meta.DBSession
117 | mark_changed(session)
118 | return session
119 |
120 |
--------------------------------------------------------------------------------
/gengine/app/api/schemas.py:
--------------------------------------------------------------------------------
1 | import pyramid_swagger_spec.swagger as sw
2 | from pyramid_swagger_spec.schema import JSchema
3 |
4 | r_status = JSchema(schema={
5 | "status": sw.property(sw.Types.string, nullable=False)
6 | })
7 |
8 | r_subject = JSchema(schema={
9 | 'id': sw.property(sw.Types.string, nullable=False),
10 | 'name': sw.property(sw.Types.string, nullable=True),
11 | 'created_at': sw.property(sw.Types.string, nullable=False),
12 | 'subjecttype_id': sw.property(sw.Types.string, nullable=False),
13 | 'path': sw.property(sw.Types.string, nullable=True),
14 | 'inherited_by_subjecttype_id': sw.property(sw.Types.number, nullable=True),
15 | 'inherited_by_name': sw.property(sw.Types.number, nullable=False),
16 | 'in_parent': sw.property(sw.Types.boolean, nullable=False),
17 | 'directly_in_parent': sw.property(sw.Types.boolean, nullable=False),
18 | 'inherited_by': sw.property(sw.Types.number, nullable=True),
19 | })
20 |
21 | r_subject_short = JSchema(schema={
22 | 'id': sw.property(sw.Types.string, nullable=False),
23 | 'name': sw.property(sw.Types.string, nullable=True),
24 | })
25 |
26 | r_subjectlist = JSchema(schema={
27 | 'subjects': sw.array_property(
28 | items=sw.object_property(
29 | properties=r_subject.get_json_schema()
30 | )
31 | )
32 | })
33 |
34 | b_subjectlist = JSchema(schema={
35 | "limit": sw.property(sw.Types.number, nullable=True),
36 | "offset": sw.property(sw.Types.number, nullable=True),
37 | "include_search": sw.property(sw.Types.string, nullable=True),
38 | "exclude_leaves": sw.property(sw.Types.boolean, nullable=True),
39 | "parent_subjecttype_id": sw.property(sw.Types.number, nullable=True),
40 | "parent_subject_id": sw.property(sw.Types.number, nullable=True),
41 | })
42 |
43 | b_subject_id = JSchema(schema={
44 | "subject_id": sw.property(sw.Types.number, nullable=True)
45 | })
46 |
47 |
48 | r_subjecttype = JSchema(schema={
49 | 'id': sw.property(sw.Types.string, nullable=False),
50 | 'name': sw.property(sw.Types.string, nullable=False),
51 | })
52 |
53 | r_subjecttypelist = JSchema(schema={
54 | 'subjecttypes': sw.array_property(
55 | items=sw.object_property(
56 | properties=r_subjecttype.get_json_schema()
57 | )
58 | )
59 | })
60 |
61 | r_variable = JSchema(schema={
62 | 'id': sw.property(sw.Types.string, nullable=False),
63 | 'name': sw.property(sw.Types.string, nullable=False),
64 | 'increase_permission': sw.property(sw.Types.string, nullable=False),
65 | })
66 |
67 | r_variablelist = JSchema(schema={
68 | 'variables': sw.array_property(
69 | items=sw.object_property(
70 | properties=r_variable.get_json_schema()
71 | )
72 | )
73 | })
74 |
75 |
76 | r_timezone = JSchema(schema={
77 | 'name': sw.property(sw.Types.string, nullable=False),
78 | })
79 |
80 | r_timezonelist = JSchema(schema={
81 | 'timezones': sw.array_property(
82 | items=sw.object_property(
83 | properties=r_timezone.get_json_schema()
84 | )
85 | )
86 | })
87 |
88 | # This currently only accepts the parameters which are needed for the leaderboard_creation form
89 | # To be extended if needed
90 | b_createachievement = JSchema(schema={
91 | 'name': sw.property(sw.Types.string, nullable=False),
92 | 'player_subjecttype_id': sw.property(sw.Types.number, nullable=False),
93 | 'context_subjecttype_id': sw.property(sw.Types.number, nullable=True),
94 | 'domain_subject_ids': sw.array_property(
95 | items=sw.property(sw.Types.number, nullable=False),
96 | nullable=True
97 | ),
98 | 'condition': sw.property(sw.Types.object, nullable=False),
99 | 'evaluation': sw.property(sw.Types.string, nullable=False),
100 | 'comparison_type': sw.property(sw.Types.string, nullable=False),
101 | 'evaluation_timezone': sw.property(sw.Types.string, nullable=False),
102 | 'evaluation_shift': sw.property(sw.Types.number, nullable=False),
103 | 'valid_start': sw.property(sw.Types.string, nullable=True),
104 | 'valid_end': sw.property(sw.Types.string, nullable=True),
105 | })
106 |
107 |
--------------------------------------------------------------------------------
/gengine/app/tests_old/test_auth.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from gengine.app.tests.base import BaseDBTest
3 | from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language
4 | from gengine.metadata import DBSession
5 | from gengine.app.model import AuthUser
6 |
7 |
8 | class TestUserCreation(BaseDBTest):
9 |
10 | def test_user_creation(self):
11 |
12 | lang = get_or_create_language("en")
13 | user = create_user(
14 | lat = 12.1,
15 | lng = 12.2,
16 | #country = "RO",
17 | #region = "Transylvania",
18 | #city = "Cluj-Napoca",
19 | timezone = "Europe/Bukarest",
20 | language = "en",
21 | additional_public_data = {
22 | "first_name" : "Rudolf",
23 | "last_name" : "Red Nose"
24 | }
25 | )
26 |
27 | self.assertTrue(user.lat == 12.1)
28 | self.assertTrue(user.lng == 12.2)
29 | #self.assertTrue(user.country == "RO")
30 | #self.assertTrue(user.region == "Transylvania")
31 | #self.assertTrue(user.city == "Cluj-Napoca")
32 | self.assertTrue(user.timezone == "Europe/Bukarest")
33 | self.assertTrue(user.language_id == lang.id)
34 | self.assertTrue(user.additional_public_data["first_name"] == "Rudolf")
35 | self.assertTrue(user.additional_public_data["last_name"] == "Red Nose")
36 |
37 | def test_user_updation(self):
38 |
39 | lang = get_or_create_language("en")
40 | user = create_user()
41 | user = update_user(
42 | user_id = user.id,
43 | lat = 14.2,
44 | lng = 16.3,
45 | #country = "EN",
46 | #region = "Transylvania",
47 | #city = "Cluj-Napoca",
48 | timezone = "Europe/Bukarest",
49 | language = "en",
50 | additional_public_data = {
51 | "first_name" : "Rudolf",
52 | "last_name" : "Red Nose"
53 | }
54 | )
55 |
56 | # Correct cases
57 | self.assertTrue(user.lat == 14.2)
58 | self.assertTrue(user.lng == 16.3)
59 | #self.assertTrue(user.country == "EN")
60 | #self.assertTrue(user.region == "Transylvania")
61 | #self.assertTrue(user.city == "Cluj-Napoca")
62 | self.assertTrue(user.timezone == "Europe/Bukarest")
63 | self.assertTrue(user.language_id == lang.id)
64 |
65 | def test_user_deletion(self):
66 |
67 | user1 = create_user()
68 |
69 | # Create Second user
70 | user2 = create_user(
71 | lat=85.59,
72 | lng=65.75,
73 | #country="DE",
74 | #region="Niedersachsen",
75 | #city="Osnabrück",
76 | timezone="Europe/Berlin",
77 | language="de",
78 | additional_public_data={
79 | "first_name": "Michael",
80 | "last_name": "Clarke"
81 | },
82 | friends=[1]
83 | )
84 |
85 | remaining_users = delete_user(
86 | user_id = user1.id
87 | )
88 |
89 | # Correct cases
90 | self.assertNotIn(user1.id, remaining_users)
91 | self.assertEqual(user2.id, remaining_users[0].id)
92 |
93 | def test_verify_password(self):
94 | auth_user = AuthUser()
95 | auth_user.password = "test12345"
96 | auth_user.active = True
97 | auth_user.email = "test@actidoo.com"
98 | DBSession.add(auth_user)
99 |
100 | iscorrect = auth_user.verify_password("test12345")
101 |
102 | self.assertEqual(iscorrect, True)
103 |
104 | def test_create_token(self):
105 | user = create_user()
106 | auth_user = AuthUser()
107 | auth_user.user_id = user.id
108 | auth_user.password = "test12345"
109 | auth_user.active = True
110 | auth_user.email = "test@actidoo.com"
111 | DBSession.add(auth_user)
112 |
113 | if auth_user.verify_password("test12345"):
114 | token = auth_user.get_or_create_token()
115 |
116 | self.assertNotEqual(token, None)
117 |
118 |
119 |
120 |
121 |
122 |
--------------------------------------------------------------------------------
/gengine/app/tests_old/test_groups.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from gengine.app.tests.base import BaseDBTest
3 | from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language
4 | from gengine.metadata import DBSession
5 | from gengine.app.model import AuthUser
6 |
7 |
8 | class TestUserCreation(BaseDBTest):
9 |
10 | def test_user_creation(self):
11 |
12 | lang = get_or_create_language("en")
13 | user = create_user(
14 | lat = 12.1,
15 | lng = 12.2,
16 | #country = "RO",
17 | #region = "Transylvania",
18 | #city = "Cluj-Napoca",
19 | timezone = "Europe/Bukarest",
20 | language = "en",
21 | additional_public_data = {
22 | "first_name" : "Rudolf",
23 | "last_name" : "Red Nose"
24 | }
25 | )
26 |
27 | self.assertTrue(user.lat == 12.1)
28 | self.assertTrue(user.lng == 12.2)
29 | #self.assertTrue(user.country == "RO")
30 | #self.assertTrue(user.region == "Transylvania")
31 | #self.assertTrue(user.city == "Cluj-Napoca")
32 | self.assertTrue(user.timezone == "Europe/Bukarest")
33 | self.assertTrue(user.language_id == lang.id)
34 | self.assertTrue(user.additional_public_data["first_name"] == "Rudolf")
35 | self.assertTrue(user.additional_public_data["last_name"] == "Red Nose")
36 |
37 | def test_user_updation(self):
38 |
39 | lang = get_or_create_language("en")
40 | user = create_user()
41 | user = update_user(
42 | user_id = user.id,
43 | lat = 14.2,
44 | lng = 16.3,
45 | #country = "EN",
46 | #region = "Transylvania",
47 | #city = "Cluj-Napoca",
48 | timezone = "Europe/Bukarest",
49 | language = "en",
50 | additional_public_data = {
51 | "first_name" : "Rudolf",
52 | "last_name" : "Red Nose"
53 | }
54 | )
55 |
56 | # Correct cases
57 | self.assertTrue(user.lat == 14.2)
58 | self.assertTrue(user.lng == 16.3)
59 | #self.assertTrue(user.country == "EN")
60 | #self.assertTrue(user.region == "Transylvania")
61 | #self.assertTrue(user.city == "Cluj-Napoca")
62 | self.assertTrue(user.timezone == "Europe/Bukarest")
63 | self.assertTrue(user.language_id == lang.id)
64 |
65 | def test_user_deletion(self):
66 |
67 | user1 = create_user()
68 |
69 | # Create Second user
70 | user2 = create_user(
71 | lat=85.59,
72 | lng=65.75,
73 | #country="DE",
74 | #region="Niedersachsen",
75 | #city="Osnabrück",
76 | timezone="Europe/Berlin",
77 | language="de",
78 | additional_public_data={
79 | "first_name": "Michael",
80 | "last_name": "Clarke"
81 | },
82 | friends=[1]
83 | )
84 |
85 | remaining_users = delete_user(
86 | user_id = user1.id
87 | )
88 |
89 | # Correct cases
90 | self.assertNotIn(user1.id, remaining_users)
91 | self.assertEqual(user2.id, remaining_users[0].id)
92 |
93 | def test_verify_password(self):
94 | auth_user = AuthUser()
95 | auth_user.password = "test12345"
96 | auth_user.active = True
97 | auth_user.email = "test@actidoo.com"
98 | DBSession.add(auth_user)
99 |
100 | iscorrect = auth_user.verify_password("test12345")
101 |
102 | self.assertEqual(iscorrect, True)
103 |
104 | def test_create_token(self):
105 | user = create_user()
106 | auth_user = AuthUser()
107 | auth_user.user_id = user.id
108 | auth_user.password = "test12345"
109 | auth_user.active = True
110 | auth_user.email = "test@actidoo.com"
111 | DBSession.add(auth_user)
112 |
113 | if auth_user.verify_password("test12345"):
114 | token = auth_user.get_or_create_token()
115 |
116 | self.assertNotEqual(token, None)
117 |
118 |
119 |
120 |
121 |
122 |
--------------------------------------------------------------------------------
/gengine/app/tests_old/test_eval_types_and_rewards.py:
--------------------------------------------------------------------------------
1 | import datetime
2 |
3 | from gengine.app.cache import clear_all_caches
4 | from gengine.app.tests.base import BaseDBTest
5 | from gengine.app.tests.helpers import create_user, create_achievement, create_variable, create_goals, create_achievement_user
6 | from gengine.app.model import Achievement, Value
7 |
8 |
9 | class TestEvaluationForMultipleUsersAndTimzone(BaseDBTest):
10 |
11 | def test_friends_leaderboard(self):
12 |
13 | user1 = create_user()
14 |
15 | # Create Second user
16 | user2 = create_user(
17 | lat=85.59,
18 | lng=65.75,
19 | #country="DE",
20 | #region="Roland",
21 | #city="New York",
22 | timezone="US/Eastern",
23 | language="en",
24 | additional_public_data={
25 | "first_name": "Michael",
26 | "last_name": "Clarke"
27 | }
28 | )
29 |
30 | # Create Third user
31 | user3 = create_user(
32 | lat=12.1,
33 | lng=12.2,
34 | #country="RO",
35 | #region="Transylvania",
36 | #city="Cluj-Napoca",
37 | timezone="Europe/Bucharest",
38 | language="en",
39 | additional_public_data={
40 | "first_name": "Rudolf",
41 | "last_name": "Red Nose"
42 | },
43 | friends=[1, 2]
44 | )
45 |
46 | # Create Fourth user
47 | user4 = create_user(
48 | lat=25.56,
49 | lng=15.89,
50 | #country="AU",
51 | #region="Sydney",
52 | #city="New South Wales",
53 | timezone="Australia/Sydney",
54 | language="en",
55 | additional_public_data={
56 | "first_name": "Steve",
57 | "last_name": "Waugh"
58 | },
59 | friends=[3]
60 | )
61 |
62 | achievement = create_achievement(achievement_name="invite_users_achievement",
63 | achievement_relevance="friends",
64 | achievement_maxlevel=3,
65 | achievement_evaluation="weekly")
66 |
67 | print(achievement.evaluation_timezone)
68 | achievement_date1 = Achievement.get_datetime_for_evaluation_type(achievement.evaluation_timezone, achievement.evaluation)
69 | print("Achievement date for first user:")
70 | print(achievement_date1)
71 |
72 | create_variable("invite_users", variable_group="day")
73 |
74 | create_goals(achievement,
75 | goal_goal=None,
76 | goal_operator="geq",
77 | goal_group_by_key=False
78 | )
79 |
80 | Value.increase_value(variable_name="invite_users", user=user1, value=12, key=None, at_datetime=achievement_date1)
81 | Value.increase_value(variable_name="invite_users", user=user2, value=2, key=None, at_datetime=achievement_date1)
82 | Value.increase_value(variable_name="invite_users", user=user3, value=11, key=None, at_datetime=achievement_date1)
83 | Value.increase_value(variable_name="invite_users", user=user4, value=6, key=None, at_datetime=achievement_date1)
84 |
85 | clear_all_caches()
86 |
87 | print("test for multiple users")
88 |
89 | # Evaluate achievement for friends of user 3
90 | achievement1 = Achievement.evaluate(user3, achievement.id, achievement_date1)
91 | print(achievement1["goals"][1]["leaderboard"])
92 |
93 | # user 3 has to friends: user 1 and user 2
94 | self.assertEqual(user1["id"], achievement1["goals"][1]["leaderboard"][0]["user"]["id"])
95 | self.assertEqual(user3["id"], achievement1["goals"][1]["leaderboard"][1]["user"]["id"])
96 | self.assertEqual(user2["id"], achievement1["goals"][1]["leaderboard"][2]["user"]["id"])
97 |
98 | self.assertEqual(12.0, achievement1["goals"][1]["leaderboard"][0]["value"])
99 | self.assertEqual(11.0, achievement1["goals"][1]["leaderboard"][1]["value"])
100 | self.assertEqual(2.0, achievement1["goals"][1]["leaderboard"][2]["value"])
101 |
102 |
--------------------------------------------------------------------------------
/docs/rest/index.rst:
--------------------------------------------------------------------------------
1 | :title: rest
2 | :description: introduction to the REST api
3 |
4 | REST API
5 | --------
6 |
7 | Add or update user data
8 | =======================
9 |
10 | - POST to "/add_or_update_subject/{subjectId}"
11 | - URL parameters:
12 | - subjectId (the Id of a subject in your system)
13 | - POST parameters:
14 | - lat (float latitude)
15 | - lon (float longitude)
16 | - country (String country)
17 | - city (String city)
18 | - region (String city)
19 | - friends (comma separated list of user Ids)
20 | - groups (comma separated list of group Ids)
21 | - language (name)
22 | - additional_public_data (JSON)
23 |
24 | - add or updates a subject with Id {subjectId} and Post parameters into the engines database
25 | - if friends Ids are not registered a empty record with only the user Id will be created
26 |
27 |
28 | Delete a subject
29 | =============
30 |
31 | - DELETE to "/delete_user/{userId}"
32 |
33 |
34 | Increase Value
35 | ==============
36 |
37 | - POST to "/increase_value/{variable_name}/{userId}/{key}"
38 | - URL parameters:
39 | - variable_name (the name of the variable to increase or decrease)
40 | - userId (the Id of the user)
41 | - key (an optional key, describing the context of the event, can be used in rules)
42 | - POST parameters:
43 | - value (the increase/decrease value in Double)
44 |
45 | - if the userId is not registered an error will be thrown
46 | - directly evaluates all goals associated with this variable_name
47 | - directly returns new reached achievements
48 |
49 | Increase multiple Values at once
50 | ==============
51 |
52 | - POST to "/increase_multi_values"
53 | - JSON request body:
54 | .. code:: json
55 | {
56 | "{userId}" : {
57 | "{variable}" : [
58 | {
59 | "key" : "{key}",
60 | "value" : "{value}"
61 | }
62 | ]
63 | }
64 | }
65 |
66 | - directly evaluates all goals associated with the given variables
67 | - directly returns new reached achievements
68 |
69 | Get Progress
70 | ============
71 |
72 | - get complete achievement progress for a single user
73 |
74 | - GET to "/progress/{userId}"
75 |
76 | - returns the complete achievement progress of a single user
77 |
78 | Get a single achievement Level
79 | ==============================
80 |
81 | - GET to "/achievement/{achievement_id}/level/{level}"
82 |
83 | - retrieves information about the rewards/properties of an achievement level
84 |
85 | Authentication
86 | ==============================
87 | - POST to "/auth/login"
88 | - Parameters in JSON-Body: email, password
89 | - Returns a json body with a token:
90 | .. code:: json
91 | {
92 | "token" : "foobar...."
93 | }
94 |
95 | Register Device (for Push-Messages)
96 | ==============================
97 | - POST to "/register_device/{user_id}"
98 | - Parameters in JSON-Body: device_id, push_id, device_os, app_version
99 | - Returns a json body with an ok status, or an error:
100 | .. code:: json
101 | {
102 | "status" : "ok"
103 | }
104 |
105 | Get Messages
106 | ==============================
107 | - GET to "/messages/{user_id}"
108 | - Possible GET Parameters: offset
109 | - Limit is always 100
110 | - Returns a json body with the messages:
111 | .. code:: json
112 | {
113 | "messages" : [{
114 | "id" : "....",
115 | "text" : "....",
116 | "is_read" : false,
117 | "created_at" : "...."
118 | }]
119 | }
120 |
121 | Set Messages Read
122 | ==============================
123 | - POST to "/read_messages/{user_id}"
124 | - Parameters in JSON-Body: message_id
125 | - Sets all messages as read which are at least as old, as the given message
126 | - Returns a json body with an ok status, or an error:
127 | .. code:: json
128 | {
129 | "status" : "ok"
130 | }
131 |
132 |
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/a90076b18837_tasks_and_shift.py:
--------------------------------------------------------------------------------
1 | """tasks and shift
2 |
3 | Revision ID: a90076b18837
4 | Revises: 8e65de1ed535
5 | Create Date: 2017-05-19 11:18:09.260416
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = 'a90076b18837'
11 | down_revision = '8e65de1ed535'
12 | branch_labels = None
13 | depends_on = None
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 | from sqlalchemy.dialects import postgresql
18 |
19 | def upgrade():
20 | ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('tasks',
22 | sa.Column('id', sa.Integer(), nullable=False),
23 | sa.Column('entry_name', sa.String(length=100), nullable=True),
24 | sa.Column('task_name', sa.String(length=100), nullable=False),
25 | sa.Column('config', sa.JSON(), nullable=True),
26 | sa.Column('cron', sa.String(length=100), nullable=True),
27 | sa.Column('is_removed', sa.Boolean(), nullable=False),
28 | sa.Column('is_auto_created', sa.Boolean(), nullable=False),
29 | sa.Column('is_manually_modified', sa.Boolean(), nullable=False),
30 | sa.PrimaryKeyConstraint('id', name=op.f('pk_tasks'))
31 | )
32 | op.create_index(op.f('ix_tasks_entry_name'), 'tasks', ['entry_name'], unique=False)
33 | op.create_index(op.f('ix_tasks_is_auto_created'), 'tasks', ['is_auto_created'], unique=False)
34 | op.create_index(op.f('ix_tasks_is_removed'), 'tasks', ['is_removed'], unique=False)
35 | op.create_index(op.f('ix_tasks_is_manually_modified'), 'tasks', ['is_manually_modified'], unique=False)
36 | op.create_index(op.f('ix_tasks_task_name'), 'tasks', ['task_name'], unique=False)
37 | op.create_table('taskexecutions',
38 | sa.Column('id', sa.Integer(), nullable=False),
39 | sa.Column('task_id', sa.Integer(), nullable=False),
40 | sa.Column('planned_at', postgresql.TIMESTAMP(timezone=True), nullable=False),
41 | sa.Column('locked_at', postgresql.TIMESTAMP(timezone=True), nullable=True),
42 | sa.Column('finished_at', postgresql.TIMESTAMP(timezone=True), nullable=True),
43 | sa.Column('canceled_at', postgresql.TIMESTAMP(timezone=True), nullable=True),
44 | sa.Column('log', sa.String(), nullable=True),
45 | sa.Column('success', sa.Boolean(), nullable=True),
46 | sa.ForeignKeyConstraint(['task_id'], ['tasks.id'], name=op.f('fk_taskexecutions_task_id_tasks'), ondelete='CASCADE'),
47 | sa.PrimaryKeyConstraint('id', name=op.f('pk_taskexecutions'))
48 | )
49 | op.create_index(op.f('ix_taskexecutions_canceled_at'), 'taskexecutions', ['canceled_at'], unique=False)
50 | op.create_index(op.f('ix_taskexecutions_finished_at'), 'taskexecutions', ['finished_at'], unique=False)
51 | op.create_index(op.f('ix_taskexecutions_locked_at'), 'taskexecutions', ['locked_at'], unique=False)
52 | op.create_index(op.f('ix_taskexecutions_planned_at'), 'taskexecutions', ['planned_at'], unique=False)
53 | op.create_index(op.f('ix_taskexecutions_success'), 'taskexecutions', ['success'], unique=False)
54 | op.create_index(op.f('ix_taskexecutions_task_id'), 'taskexecutions', ['task_id'], unique=False)
55 | op.add_column('achievements', sa.Column('evaluation_shift', sa.Integer(), nullable=True))
56 | ### end Alembic commands ###
57 |
58 |
59 | def downgrade():
60 | ### commands auto generated by Alembic - please adjust! ###
61 | op.drop_column('achievements', 'evaluation_shift')
62 | op.drop_index(op.f('ix_taskexecutions_task_id'), table_name='taskexecutions')
63 | op.drop_index(op.f('ix_taskexecutions_success'), table_name='taskexecutions')
64 | op.drop_index(op.f('ix_taskexecutions_planned_at'), table_name='taskexecutions')
65 | op.drop_index(op.f('ix_taskexecutions_locked_at'), table_name='taskexecutions')
66 | op.drop_index(op.f('ix_taskexecutions_finished_at'), table_name='taskexecutions')
67 | op.drop_index(op.f('ix_taskexecutions_canceled_at'), table_name='taskexecutions')
68 | op.drop_table('taskexecutions')
69 | op.drop_index(op.f('ix_tasks_task_name'), table_name='tasks')
70 | op.drop_index(op.f('ix_tasks_is_manually_modified'), table_name='tasks')
71 | op.drop_index(op.f('ix_tasks_is_removed'), table_name='tasks')
72 | op.drop_index(op.f('ix_tasks_is_auto_created'), table_name='tasks')
73 | op.drop_index(op.f('ix_tasks_entry_name'), table_name='tasks')
74 | op.drop_table('tasks')
75 | ### end Alembic commands ###
76 |
--------------------------------------------------------------------------------
/gengine/maintenance/scripts/scheduler_beat.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import sys
3 | import logging
4 | import datetime
5 | import os
6 | import pyramid_dogpile_cache
7 | import transaction
8 |
9 | from gengine.base.util import dt_ago, dt_now
10 | from sqlalchemy.sql.expression import and_
11 | from zope.sqlalchemy.datamanager import mark_changed
12 |
13 | from gengine.app.cache import init_caches
14 | from pyramid.config import Configurator
15 | from pyramid.paster import (
16 | get_appsettings,
17 | setup_logging,
18 | )
19 | from pyramid.scripts.common import parse_vars
20 | from sqlalchemy import engine_from_config
21 |
22 | log = logging.getLogger(__name__)
23 | log.addHandler(logging.StreamHandler())
24 |
25 |
26 | def usage(argv):
27 | cmd = os.path.basename(argv[0])
28 | print('usage: %s [var=value]\n'
29 | '(example: "%s production.ini")' % (cmd, cmd))
30 | sys.exit(1)
31 |
32 |
33 | def main(argv=sys.argv):
34 | if len(argv) < 2:
35 | usage(argv)
36 | config_uri = argv[1]
37 | options = parse_vars(argv[2:])
38 | setup_logging(config_uri)
39 | settings = get_appsettings(config_uri, options=options)
40 |
41 | import gengine
42 | gengine.main({}, **settings)
43 |
44 | from gengine.metadata import (
45 | DBSession
46 | )
47 | sess = DBSession()
48 |
49 | import gengine.app.model as m
50 | import crontab
51 |
52 | from gengine.app.registries import get_task_registry
53 | enginetasks = get_task_registry().registrations
54 |
55 | with transaction.manager:
56 | mark_changed(sess, transaction.manager, True)
57 |
58 | tasks = sess.execute(m.t_tasks.select()).fetchall()
59 |
60 | for task in tasks:
61 | cron = task["cron"]
62 | if not cron:
63 | cron = enginetasks.get(task["task_name"]).get("default_cron", None)
64 |
65 | if cron:
66 |
67 | now = dt_now().replace(second=0)
68 |
69 | item = crontab.CronItem(line=cron)
70 | s = item.schedule(date_from=now)
71 | prev = s.get_next().replace(second=0)
72 | next = s.get_next().replace(second=0)
73 |
74 | execs = sess.execute(m.t_taskexecutions.select().where(and_(
75 | m.t_taskexecutions.c.task_id == task["id"],
76 | m.t_taskexecutions.c.canceled_at == None,
77 | m.t_taskexecutions.c.finished_at == None,
78 | )).order_by(m.t_taskexecutions.c.planned_at.desc())).fetchall()
79 |
80 | found = False
81 |
82 | for exec in execs:
83 |
84 | if exec["planned_at"] >= next:
85 | # The next execution is already planned
86 | found = True
87 |
88 | if exec["planned_at"] <= prev and prev < dt_ago(minutes=10) and not exec["locked_at"]:
89 | # The execution is more than 10 minutes in the past and not yet locked (worker not running / overloaded)
90 | if next - datetime.timedelta(minutes=10) < dt_now():
91 | # The next execution is planned in less than 10 minutes, cancel the other one
92 | sess.execute(
93 | m.t_taskexecutions.update().values({
94 | 'canceled_at': dt_now()
95 | }).where({
96 | 'id': exec["id"]
97 | })
98 | )
99 |
100 | if exec["locked_at"] and exec["locked_at"] < dt_ago(hours=24):
101 | # this task is running for more than 24 hours. probably crashed.... set it to canceled
102 | sess.execute(
103 | m.t_taskexecutions.update().values({
104 | 'canceled_at': dt_now()
105 | }).where({
106 | 'id': exec["id"]
107 | })
108 | )
109 |
110 | if not found:
111 | # Plan next execution
112 | sess.execute(
113 | m.t_taskexecutions.insert().values({
114 | 'task_id': task["id"],
115 | 'planned_at': next
116 | })
117 | )
118 |
119 | sess.flush()
120 | sess.commit()
121 |
--------------------------------------------------------------------------------
/gengine/app/static/api.js:
--------------------------------------------------------------------------------
1 | var setupAPIForm = function($, defaultcall, fields, api_funcs) {
2 |
3 | var container_fields = {};
4 |
5 | for(var i=0; i/g, '>');
68 | return json.replace(/("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?)/g, function (match) {
69 | var cls = 'number';
70 | if (/^"/.test(match)) {
71 | if (/:$/.test(match)) {
72 | cls = 'key';
73 | } else {
74 | cls = 'string';
75 | }
76 | } else if (/true|false/.test(match)) {
77 | cls = 'boolean';
78 | } else if (/null/.test(match)) {
79 | cls = 'null';
80 | }
81 | return '' + match + '';
82 | });
83 | };
84 |
85 | //register form submit
86 | api_form.submit(function() {
87 | var url = api_settings_url;
88 | var method = api_settings_method;
89 | var postparams = api_settings_postparams;
90 | var jsonparams = api_settings_jsonparams;
91 | var getparams = api_settings_getparams;
92 | var ajax_options={};
93 |
94 | ajax_options["data"] = {};
95 | jsondata = {};
96 | encoded_get_params = [];
97 |
98 | for(var i=0; i0) {
127 | ajax_options["data"] = JSON.stringify(jsondata)
128 | }
129 |
130 | var request = $.ajax(ajax_options);
131 |
132 | request.done(function( msg ) {
133 | api_result.html(""+syntaxHighlight(msg)+"
");
134 | });
135 |
136 | request.fail(function( jqXHR, textStatus ) {
137 | try {
138 | var json = JSON.parse(jqXHR.responseText);
139 | api_result.html("Error "+jqXHR.status+":\n"+syntaxHighlight(json)+"
");
140 | } catch(e) {
141 | api_result.html("Error: "+jqXHR.status+"
");
142 | }
143 | });
144 |
145 | return false;
146 | });
147 |
148 | //activate default api call
149 | call_select.val(defaultcall);
150 | activationfuncs[defaultcall]();
151 | };
--------------------------------------------------------------------------------
/gengine/app/leaderboard.py:
--------------------------------------------------------------------------------
1 | from gengine.base.util import dt_now
2 | from sqlalchemy.sql.expression import select, and_, or_
3 |
4 | from gengine.app.model import t_subjects, t_subjectrelations, t_subjects_subjects, Subject
5 | from gengine.metadata import DBSession
6 |
7 | import logging
8 | # TODO: ADD date filter (compare created_at)
9 | # TODO: filter deleted
10 |
11 |
12 | class GlobalLeaderBoardSubjectSet:
13 |
14 | @classmethod
15 | def forward(cls, subjecttype_id, from_date, to_date, whole_time_required):
16 | q = select([t_subjects.c.id, ]).where(t_subjects.c.subjecttype_id == subjecttype_id)
17 | if from_date != None and to_date != None:
18 | if whole_time_required:
19 | q = q.where(and_(
20 | t_subjects.c.created_at <= from_date
21 | #or_(
22 | # t_subjects.c.deleted_at == None,
23 | # t_subjects.c.deleted_at >= to_date
24 | #)
25 | ))
26 | else:
27 | q = q.where(or_(
28 | and_(
29 | t_subjects.c.created_at <= from_date,
30 | #or_(
31 | # t_subjects.c.deleted_at >= from_date,
32 | # t_subjects.c.deleted_at == None,
33 | #)
34 | ),
35 | and_(
36 | t_subjects.c.created_at >= from_date,
37 | t_subjects.c.created_at <= to_date,
38 | )
39 | ))
40 | return [x.id for x in DBSession.execute(q).fetchall()]
41 |
42 | #@classmethod
43 | #def reverse(cls):
44 | # return cls.forward()
45 |
46 |
47 | class RelationsLeaderBoardSubjectSet:
48 |
49 | @classmethod
50 | def forward(cls, subject_id, from_date, to_date, whole_time_required):
51 | subjects = [subject_id, ]
52 |
53 | q = select([t_subjectrelations.c.to_id, ], t_subjectrelations.c.from_id == subject_id)
54 |
55 | if from_date and to_date:
56 | if whole_time_required:
57 | q = q.where(and_(
58 | t_subjectrelations.c.created_at <= from_date,
59 | or_(
60 | t_subjectrelations.c.deleted_at == None,
61 | t_subjectrelations.c.deleted_at >= to_date
62 | )
63 | ))
64 | else:
65 | q = q.where(or_(
66 | and_(
67 | t_subjectrelations.c.created_at <= from_date,
68 | or_(
69 | t_subjectrelations.c.deleted_at >= from_date,
70 | t_subjectrelations.c.deleted_at == None,
71 | )
72 | ),
73 | and_(
74 | t_subjectrelations.c.created_at >= from_date,
75 | t_subjectrelations.c.created_at <= to_date,
76 | )
77 | ))
78 | else:
79 | q = q.where(
80 | t_subjectrelations.c.deleted_at == None,
81 | )
82 |
83 | subjects += [x["to_id"] for x in DBSession.execute(q).fetchall()]
84 |
85 | return subjects
86 |
87 | #@classmethod
88 | #def reverse(cls, subject_id):
89 | # subjects = [subject_id, ]
90 | # subjects += [x["from_id"] for x in DBSession.execute(select([t_subjects_subjects.c.from_id, ], t_subjects_subjects.c.to_id == subject_id)).fetchall()]
91 | # return subjects
92 |
93 |
94 | class ContextSubjectLeaderBoardSubjectSet:
95 |
96 | @classmethod
97 | def forward(cls, subjecttype_id, context_subject_id, from_date, to_date, whole_time_required=False):
98 | # We are comparing all subjects of type subject_type which have been part of context_subject_id between from_date and to_date
99 | # By default, they don't have to be member all the time (whole_time_required).
100 |
101 | #print("Looking for descendents of %s of type %s" % (context_subject_id, subjecttype_id))
102 | #print("From Date: %s, To Date: %s, whole_time_required: %s" % (from_date, to_date, whole_time_required))
103 |
104 | ancestor_subjects = Subject.get_descendent_subjects(
105 | subject_id=context_subject_id,
106 | of_type_id=subjecttype_id,
107 | from_date=from_date if from_date else dt_now(),
108 | to_date=to_date if to_date else dt_now(),
109 | whole_time_required=whole_time_required
110 | )
111 |
112 | subjects = [x for x in ancestor_subjects.keys()]
113 |
114 | return subjects
115 |
116 | #@classmethod
117 | #def reverse(cls, subject_type_id, context_subject_id, from_date, to_date):
118 | # return cls.forward(subject_type=subject_type_id, context_subject_id=context_subject_id, from_date=from_date, to_date=to_date)
119 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # gamification-engine
2 | [](https://travis-ci.org/ActiDoo/gamification-engine)
3 | [](https://coveralls.io/github/ActiDoo/gamification-engine?branch=master)
4 | [](https://requires.io/github/ActiDoo/gamification-engine/requirements/?branch=master)
5 | [](https://gamification-engine.herokuapp.com)
6 | [](https://readthedocs.org/projects/gamification-engine/?badge=master)
7 | [](https://github.com/ActiDoo/gamification-engine/releases/latest)
8 | [](https://pypi.python.org/pypi/gamification-engine/)
9 | []()
10 | [](https://github.com/ActiDoo/gamification-engine/blob/master/LICENSE)
11 |
12 |
13 | The Gamification-Engine (gengine) is an open source software (MIT) for integrating any kinds of gamification features into your product.
14 |
15 | The engine is **not meant to be a platform** including any kinds of predefined graphics, layouts or statistics.
16 | It is framework for developing your own solution, implemented as a **service** to which your application server can talk over a REST api.
17 |
18 | ( https://www.actidoo.com )
19 |
20 | Latest recommended version: https://github.com/ActiDoo/gamification-engine/releases/latest
21 |
22 | ## Features
23 |
24 | - multi level achievements
25 | - multi goal achievements
26 | - progress and single goals
27 | - leader boards / ladder
28 | - achievements can be reached directly or by evaluating leaderboards daily, weekly, monthly or yearly
29 | - time zone independent (correct daily, weekly.. evaluations among time zones)
30 | - social-awareness (e.g. reach the highest score among your friends)
31 | - geo-awareness (e.g. reach the highest score among all users in your city)
32 | - rules can be defined in python using given variables (e.g. the current level)
33 | - custom definable achievement properties and rewards
34 | - custom definable languages and translations
35 | - dependencies between achievements (prerequisites & postconditions)
36 | - goals can execute triggers (currently creation of messages and mobile pushes for iOS/Android)
37 | - high performance / scalable
38 | - administration ui
39 |
40 | ## REST Interface
41 |
42 | Simple REST-interface to communicate with arbitrary systems
43 |
44 | see http://gamification-engine.readthedocs.org/en/latest/rest/index.html
45 |
46 | ## Demo Installation
47 |
48 | A demo installation is available at https://gamification-engine.herokuapp.com/
49 |
50 | ## Usage
51 |
52 | 1. A ready-to-use buildpack for Heroku and dokku is available at https://github.com/ActiDoo/gamification-engine-dokku Follow the instructions there.
53 | 2. The current stable version is available as python package at https://pypi.python.org/pypi/gamification-engine
54 | 3. Checkout the source code and configure the PostgreSQL database in `development.ini`. Then run `python setup.py develop` to configure the application, `initialize_gengine_db development.ini reset_db=True populate_demo=True` to configure the database and `pserve development.ini` to start it. The front end is available at http://localhost:6543/admin/
55 |
56 | ## Docker support (2019-12)
57 |
58 | For development run:
59 | `docker-compose up`
60 | Go to `localhost:9000` to access admin interface or `localhost:6060` to access adminer.
61 | Code will be mounted into the container and server will be started with `--reload`.
62 |
63 | For production deployment adapt passwords and settings in `production.ini` and `docker-compose.production.yml` and run:
64 | ```
65 | docker-compose -f docker-compose.production.yml up -d
66 | ```
67 | A production ready server will run on `localhost:9000`, login using credentials set in `docker-compose.production.yml`
68 |
69 | ## Roadmap
70 |
71 | Do you want to contribute? We badly need tests!
72 | Also a nicer admin UI would be great or maybe some cool features :-)
73 |
74 | For a full list see:
75 |
76 | http://gamification-engine.readthedocs.org/en/latest/roadmap.html
77 |
78 | ## Licenses of dependencies
79 |
80 | Pyramid: BSD-derived (http://www.repoze.org/LICENSE.txt)
81 |
82 | pyramid_chameleon: BSD-derived (http://www.repoze.org/LICENSE.txt)
83 |
84 | pyramid_debugtoolbar: BSD
85 |
86 | SQLAlchemy: MIT
87 |
88 | zope.sqlalchemy: Zope Public License (ZPL) Version 2.1
89 |
90 | waitress: Zope Public License (ZPL) Version 2.1
91 |
92 | pytz: MIT
93 |
94 | dogpile.cache: BSD
95 |
96 | pyramid_dogpile_cache: MIT
97 |
98 | flask: BSD
99 |
100 | flask-admin: BSD
101 |
102 | psycopg2: LGPL / ZPL
103 |
104 | pymemcache: Apache License 2.0
105 |
106 | mock: BSD
107 |
108 | alembic: MIT
109 |
--------------------------------------------------------------------------------
/gengine/app/alembic/versions/46e4971dc46f_extended_groups.py:
--------------------------------------------------------------------------------
1 | """extended_groups
2 |
3 | Revision ID: 46e4971dc46f
4 | Revises: a90076b18837
5 | Create Date: 2017-05-19 17:08:34.780367
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = '46e4971dc46f'
11 | down_revision = 'a90076b18837'
12 | branch_labels = None
13 | depends_on = None
14 |
15 | from alembic import op
16 | import sqlalchemy as sa
17 |
18 |
19 | def upgrade():
20 | ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('grouptypes',
22 | sa.Column('id', sa.BigInteger(), nullable=False),
23 | sa.Column('name', sa.String(length=100), nullable=False),
24 | sa.PrimaryKeyConstraint('id', name=op.f('pk_grouptypes')),
25 | sa.UniqueConstraint('name', name=op.f('uq_grouptypes_name'))
26 | )
27 | op.create_table('grouptypes_grouptypes',
28 | sa.Column('id', sa.BigInteger(), nullable=False),
29 | sa.Column('grouptype_id', sa.BigInteger(), nullable=False),
30 | sa.Column('part_of_id', sa.BigInteger(), nullable=False),
31 | sa.ForeignKeyConstraint(['grouptype_id'], ['grouptypes.id'], name=op.f('fk_grouptypes_grouptypes_grouptype_id_grouptypes'), ondelete='CASCADE'),
32 | sa.ForeignKeyConstraint(['part_of_id'], ['grouptypes.id'], name=op.f('fk_grouptypes_grouptypes_part_of_id_grouptypes'), ondelete='CASCADE'),
33 | sa.PrimaryKeyConstraint('id', name=op.f('pk_grouptypes_grouptypes')),
34 | sa.UniqueConstraint('grouptype_id', 'part_of_id', name=op.f('uq_grouptypes_grouptypes_grouptype_id'))
35 | )
36 | op.create_index(op.f('ix_grouptypes_grouptypes_grouptype_id'), 'grouptypes_grouptypes', ['grouptype_id'], unique=False)
37 | op.create_index(op.f('ix_grouptypes_grouptypes_part_of_id'), 'grouptypes_grouptypes', ['part_of_id'], unique=False)
38 | op.create_table('groups_groups',
39 | sa.Column('id', sa.BigInteger(), nullable=False),
40 | sa.Column('group_id', sa.BigInteger(), nullable=False),
41 | sa.Column('part_of_id', sa.BigInteger(), nullable=False),
42 | sa.ForeignKeyConstraint(['group_id'], ['groups.id'], name=op.f('fk_groups_groups_group_id_groups'), ondelete='CASCADE'),
43 | sa.ForeignKeyConstraint(['part_of_id'], ['groups.id'], name=op.f('fk_groups_groups_part_of_id_groups'), ondelete='CASCADE'),
44 | sa.PrimaryKeyConstraint('id', name=op.f('pk_groups_groups')),
45 | sa.UniqueConstraint('group_id', 'part_of_id', name=op.f('uq_groups_groups_group_id'))
46 | )
47 | op.create_index(op.f('ix_groups_groups_group_id'), 'groups_groups', ['group_id'], unique=False)
48 | op.create_index(op.f('ix_groups_groups_part_of_id'), 'groups_groups', ['part_of_id'], unique=False)
49 | op.add_column('groups', sa.Column('grouptype_id', sa.BigInteger(), nullable=False))
50 | op.create_index(op.f('ix_groups_grouptype_id'), 'groups', ['grouptype_id'], unique=False)
51 | op.create_foreign_key(op.f('fk_groups_grouptype_id_grouptypes'), 'groups', 'grouptypes', ['grouptype_id'], ['id'], ondelete='CASCADE')
52 | ### end Alembic commands ###
53 |
54 | op.execute("""
55 | CREATE OR REPLACE FUNCTION check_grouptypes_grouptypes_cycle() RETURNS trigger AS $$
56 | DECLARE
57 | cycles INTEGER;
58 | BEGIN
59 | LOCK TABLE grouptypes_grouptypes IN ACCESS EXCLUSIVE MODE;
60 | WITH RECURSIVE search_graph(part_of_id, group_id, id, depth, path, cycle) AS (
61 | SELECT NEW.part_of_id, NEW.group_id, NEW.id, 1,
62 | ARRAY[NEW.id], false
63 | UNION ALL
64 | SELECT g.part_of_id, g.group_id, g.id, sg.depth + 1,
65 | path || g.id,
66 | g.id = ANY(path)
67 | FROM grouptypes_grouptypes g, search_graph sg
68 | WHERE g.part_of_id = sg.group_id AND NOT cycle
69 | )
70 | SELECT INTO cycles COUNT(*) FROM search_graph WHERE cycle=true;
71 | RAISE NOTICE 'cycles: %', cycles;
72 | IF cycles > 0 THEN
73 | RAISE EXCEPTION 'cycle';
74 | END IF;
75 | RETURN NEW;
76 | END
77 | $$ LANGUAGE plpgsql;
78 |
79 | CREATE TRIGGER check_grouptypes_grouptypes_cycle AFTER INSERT OR UPDATE ON grouptypes_grouptypes
80 | FOR EACH ROW EXECUTE PROCEDURE check_grouptypes_grouptypes_cycle();
81 | """)
82 |
83 | op.execute("""
84 | CREATE OR REPLACE FUNCTION check_groups_groups_cycle() RETURNS trigger AS $$
85 | DECLARE
86 | cycles INTEGER;
87 | BEGIN
88 | LOCK TABLE groups_groups IN ACCESS EXCLUSIVE MODE;
89 | WITH RECURSIVE search_graph(part_of_id, group_id, id, depth, path, cycle) AS (
90 | SELECT NEW.part_of_id, NEW.group_id, NEW.id, 1,
91 | ARRAY[NEW.id], false
92 | UNION ALL
93 | SELECT g.part_of_id, g.group_id, g.id, sg.depth + 1,
94 | path || g.id,
95 | g.id = ANY(path)
96 | FROM groups_groups g, search_graph sg
97 | WHERE g.part_of_id = sg.group_id AND NOT cycle
98 | )
99 | SELECT INTO cycles COUNT(*) FROM search_graph WHERE cycle=true;
100 | RAISE NOTICE 'cycles: %', cycles;
101 | IF cycles > 0 THEN
102 | RAISE EXCEPTION 'cycle';
103 | END IF;
104 | RETURN NEW;
105 | END
106 | $$ LANGUAGE plpgsql;
107 |
108 | CREATE TRIGGER check_groups_groups_cycle AFTER INSERT OR UPDATE ON groups_groups
109 | FOR EACH ROW EXECUTE PROCEDURE check_groups_groups_cycle();
110 | """)
111 |
112 |
113 | def downgrade():
114 | ### commands auto generated by Alembic - please adjust! ###
115 | raise NotImplementedError()
116 | ### end Alembic commands ###
117 |
--------------------------------------------------------------------------------
/gengine/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from pyramid.events import NewRequest
3 |
4 | from gengine.app.permissions import yield_all_perms
5 | from gengine.base.context import reset_context
6 | from gengine.base.errors import APIError
7 | from gengine.base.settings import set_settings
8 | from gengine.base.util import dt_now
9 |
10 | __version__ = '0.4.0'
11 |
12 | import datetime
13 |
14 | import os
15 | from pyramid.config import Configurator
16 | from pyramid.renderers import JSON
17 | from pyramid.settings import asbool
18 | from sqlalchemy import engine_from_config, create_engine
19 |
20 | from gengine.wsgiutil import HTTPSProxied, init_reverse_proxy
21 |
22 | def main(global_config, **settings):
23 | """ This function returns a Pyramid WSGI application.
24 | """
25 |
26 | durl = os.environ.get("DATABASE_URL") #heroku
27 | if durl:
28 | settings['sqlalchemy.url']=durl
29 |
30 | murl = os.environ.get("MEMCACHED_URL") #heroku
31 | if murl:
32 | settings['urlcache_url']=murl
33 |
34 | set_settings(settings)
35 |
36 | if os.environ.get("DATABASE_URL",None):
37 | engine = create_engine(os.environ["DATABASE_URL"], connect_args={"options": "-c timezone=utc"})
38 | else:
39 | engine = engine_from_config(settings, 'sqlalchemy.', connect_args={"options": "-c timezone=utc"}, )
40 |
41 | from gengine.app.cache import init_caches
42 | init_caches()
43 |
44 | from gengine.metadata import init_session, init_declarative_base, init_db
45 |
46 | init_session()
47 | init_declarative_base()
48 | init_db(engine)
49 |
50 | from gengine.base.monkeypatch_flaskadmin import do_monkeypatch
51 | do_monkeypatch()
52 |
53 | def reset_context_on_new_request(event):
54 | reset_context()
55 |
56 | from gengine.app.api.resources import root_factory
57 | config = Configurator(settings=settings, root_factory=root_factory)
58 | config.add_subscriber(reset_context_on_new_request,NewRequest)
59 | config.include('pyramid_dogpile_cache')
60 | config.include('pyramid_swagger_spec')
61 |
62 | config.include("pyramid_tm")
63 | config.include('pyramid_chameleon')
64 | config.include('gengine.app.tasks')
65 | config.include('gengine.app.jsscripts')
66 |
67 | urlprefix = settings.get("urlprefix","")
68 | urlcacheid = settings.get("urlcacheid","gengine")
69 | force_https = asbool(settings.get("force_https",False))
70 | init_reverse_proxy(force_https, urlprefix)
71 |
72 | urlcache_url = settings.get("urlcache_url","127.0.0.1:11211")
73 | urlcache_active = asbool(os.environ.get("URLCACHE_ACTIVE", settings.get("urlcache_active",True)))
74 |
75 | #auth
76 | def get_user(request):
77 | if not asbool(settings.get("enable_user_authentication",False)):
78 | return None
79 | token = request.headers.get('X-Auth-Token')
80 | if (not token) and request.cookies.get("X-Auth-Token"):
81 | token = request.cookies.get("X-Auth-Token")
82 | if token is not None:
83 | from gengine.app.model import DBSession, AuthUser, AuthToken
84 | tokenObj = DBSession.query(AuthToken).filter(AuthToken.token.like(token)).first()
85 | user = None
86 | if tokenObj and tokenObj.valid_until < dt_now():
87 | tokenObj.extend()
88 | if tokenObj:
89 | user = tokenObj.user
90 | if not user:
91 | raise APIError(401, "invalid_token", "Invalid token provided.")
92 | if not user.active:
93 | raise APIError(404, "user_is_not_activated", "Your user is not activated.")
94 | return user
95 | return None
96 |
97 | def get_permissions(request):
98 | if not asbool(settings.get("enable_user_authentication", False)):
99 | return [x[0] for x in yield_all_perms()]
100 |
101 | if not request.user:
102 | return []
103 |
104 | from gengine.app.model import DBSession, t_auth_tokens, t_auth_users, t_auth_roles, t_auth_roles_permissions, t_auth_users_roles
105 | from sqlalchemy.sql import select
106 | j = t_auth_users_roles.join(t_auth_roles).join(t_auth_roles_permissions)
107 | q = select([t_auth_roles_permissions.c.name],from_obj=j).where(t_auth_users_roles.c.auth_user_id==request.user.id)
108 | rows = DBSession.execute(q).fetchall()
109 | return [r["name"] for r in rows]
110 |
111 | def get_subject(request):
112 | return request.user.subject if request.user else None
113 |
114 | def has_perm(request, name):
115 | return name in request.permissions
116 |
117 | config.add_request_method(get_user, 'user', reify=True)
118 | config.add_request_method(get_subject, 'subject', reify=True)
119 | config.add_request_method(get_permissions, 'permissions', reify=True)
120 | config.add_request_method(has_perm, 'has_perm')
121 |
122 | #routes
123 | from gengine.app.route import config_routes as config_app_routes
124 |
125 | config.include(config_app_routes, route_prefix=urlprefix)
126 |
127 | #date serialization
128 | json_renderer = JSON()
129 | def datetime_adapter(obj, request):
130 | return obj.isoformat()
131 | json_renderer.add_adapter(datetime.datetime, datetime_adapter)
132 | config.add_renderer('json', json_renderer)
133 |
134 | config.scan(ignore=["gengine.app.tests"])
135 |
136 | config.add_route('admin_app', '/admin/*subpath')
137 | from gengine.app.admin import init_admin as init_tenantadmin
138 | init_tenantadmin(urlprefix=urlprefix,
139 | secret=settings.get("flaskadmin_secret", "fKY7kJ2xSrbPC5yieEjV"))
140 |
141 | return HTTPSProxied(config.make_wsgi_app())
142 |
--------------------------------------------------------------------------------
/wait-for-it.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Use this script to test if a given TCP host/port are available
3 |
4 | WAITFORIT_cmdname=${0##*/}
5 |
6 | echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
7 |
8 | usage()
9 | {
10 | cat << USAGE >&2
11 | Usage:
12 | $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
13 | -h HOST | --host=HOST Host or IP under test
14 | -p PORT | --port=PORT TCP port under test
15 | Alternatively, you specify the host and port as host:port
16 | -s | --strict Only execute subcommand if the test succeeds
17 | -q | --quiet Don't output any status messages
18 | -t TIMEOUT | --timeout=TIMEOUT
19 | Timeout in seconds, zero for no timeout
20 | -- COMMAND ARGS Execute command with args after the test finishes
21 | USAGE
22 | exit 1
23 | }
24 |
25 | wait_for()
26 | {
27 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
28 | echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
29 | else
30 | echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
31 | fi
32 | WAITFORIT_start_ts=$(date +%s)
33 | while :
34 | do
35 | if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
36 | nc -z $WAITFORIT_HOST $WAITFORIT_PORT
37 | WAITFORIT_result=$?
38 | else
39 | (echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
40 | WAITFORIT_result=$?
41 | fi
42 | if [[ $WAITFORIT_result -eq 0 ]]; then
43 | WAITFORIT_end_ts=$(date +%s)
44 | echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
45 | break
46 | fi
47 | sleep 1
48 | done
49 | return $WAITFORIT_result
50 | }
51 |
52 | wait_for_wrapper()
53 | {
54 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
55 | if [[ $WAITFORIT_QUIET -eq 1 ]]; then
56 | timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
57 | else
58 | timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
59 | fi
60 | WAITFORIT_PID=$!
61 | trap "kill -INT -$WAITFORIT_PID" INT
62 | wait $WAITFORIT_PID
63 | WAITFORIT_RESULT=$?
64 | if [[ $WAITFORIT_RESULT -ne 0 ]]; then
65 | echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
66 | fi
67 | return $WAITFORIT_RESULT
68 | }
69 |
70 | # process arguments
71 | while [[ $# -gt 0 ]]
72 | do
73 | case "$1" in
74 | *:* )
75 | WAITFORIT_hostport=(${1//:/ })
76 | WAITFORIT_HOST=${WAITFORIT_hostport[0]}
77 | WAITFORIT_PORT=${WAITFORIT_hostport[1]}
78 | shift 1
79 | ;;
80 | --child)
81 | WAITFORIT_CHILD=1
82 | shift 1
83 | ;;
84 | -q | --quiet)
85 | WAITFORIT_QUIET=1
86 | shift 1
87 | ;;
88 | -s | --strict)
89 | WAITFORIT_STRICT=1
90 | shift 1
91 | ;;
92 | -h)
93 | WAITFORIT_HOST="$2"
94 | if [[ $WAITFORIT_HOST == "" ]]; then break; fi
95 | shift 2
96 | ;;
97 | --host=*)
98 | WAITFORIT_HOST="${1#*=}"
99 | shift 1
100 | ;;
101 | -p)
102 | WAITFORIT_PORT="$2"
103 | if [[ $WAITFORIT_PORT == "" ]]; then break; fi
104 | shift 2
105 | ;;
106 | --port=*)
107 | WAITFORIT_PORT="${1#*=}"
108 | shift 1
109 | ;;
110 | -t)
111 | WAITFORIT_TIMEOUT="$2"
112 | if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
113 | shift 2
114 | ;;
115 | --timeout=*)
116 | WAITFORIT_TIMEOUT="${1#*=}"
117 | shift 1
118 | ;;
119 | --)
120 | shift
121 | WAITFORIT_CLI=("$@")
122 | break
123 | ;;
124 | --help)
125 | usage
126 | ;;
127 | *)
128 | echoerr "Unknown argument: $1"
129 | usage
130 | ;;
131 | esac
132 | done
133 |
134 | if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
135 | echoerr "Error: you need to provide a host and port to test."
136 | usage
137 | fi
138 |
139 | WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
140 | WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
141 | WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
142 | WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
143 |
144 | # check to see if timeout is from busybox?
145 | WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
146 | WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
147 | if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
148 | WAITFORIT_ISBUSY=1
149 | WAITFORIT_BUSYTIMEFLAG=""
150 |
151 | else
152 | WAITFORIT_ISBUSY=0
153 | WAITFORIT_BUSYTIMEFLAG="-t"
154 | fi
155 |
156 | if [[ $WAITFORIT_CHILD -gt 0 ]]; then
157 | wait_for
158 | WAITFORIT_RESULT=$?
159 | exit $WAITFORIT_RESULT
160 | else
161 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
162 | wait_for_wrapper
163 | WAITFORIT_RESULT=$?
164 | else
165 | wait_for
166 | WAITFORIT_RESULT=$?
167 | fi
168 | fi
169 |
170 | if [[ $WAITFORIT_CLI != "" ]]; then
171 | if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
172 | echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
173 | exit $WAITFORIT_RESULT
174 | fi
175 | exec "${WAITFORIT_CLI[@]}"
176 | else
177 | exit $WAITFORIT_RESULT
178 | fi
179 |
--------------------------------------------------------------------------------