├── .meta ├── logs │ └── log.out ├── modified ├── user_log ├── vGraph_json │ └── .gitkeep ├── config ├── tracker └── users │ └── tester │ └── config ├── orpheus ├── __init__.py ├── clt │ ├── __init__.py │ ├── db.py │ └── click_entry.py ├── core │ ├── __init__.py │ ├── orpheus_const.py │ ├── access.py │ ├── encryption.py │ ├── helper.py │ ├── orpheus_exceptions.py │ ├── db.py │ ├── orpheus_schema_parser.py │ ├── user_control.py │ ├── vgraph.py │ ├── version.py │ ├── metadata.py │ ├── orpheus_sqlparse.py │ ├── relation.py │ └── executor.py └── interface │ ├── __init__.py │ ├── src │ ├── __init__.py │ ├── db.py │ └── cmd_parser.py │ ├── main │ ├── __init__.py │ ├── templates │ │ ├── vGraph.html │ │ └── main │ │ │ ├── index_all.html │ │ │ └── index.html │ ├── admin.py │ ├── tests.py │ ├── static │ │ ├── dist │ │ │ ├── fonts │ │ │ │ ├── glyphicons-halflings-regular.eot │ │ │ │ ├── glyphicons-halflings-regular.ttf │ │ │ │ ├── glyphicons-halflings-regular.woff │ │ │ │ └── glyphicons-halflings-regular.woff2 │ │ │ └── js │ │ │ │ └── npm.js │ │ ├── assets │ │ │ ├── css │ │ │ │ ├── ie10-viewport-bug-workaround.css │ │ │ │ ├── src │ │ │ │ │ └── pygments-manni.css │ │ │ │ └── docs.min.css │ │ │ └── js │ │ │ │ ├── ie10-viewport-bug-workaround.js │ │ │ │ ├── ie-emulation-modes-warning.js │ │ │ │ └── vendor │ │ │ │ └── holder.min.js │ │ ├── dashboard.css │ │ └── tree.js │ ├── apps.py │ ├── urls.py │ ├── models.py │ └── views.py │ ├── orpheus_ui │ ├── __init__.py │ ├── wsgi.py │ ├── urls.py │ └── settings.py │ └── manage.py ├── test ├── sample_schema.csv ├── data.csv └── small_table_test.sql ├── config.yaml ├── future └── TODOs ├── .gitignore ├── setup.py └── README.md /.meta/logs/log.out: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.meta/modified: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /.meta/user_log: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /orpheus/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /orpheus/clt/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /orpheus/core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.meta/vGraph_json/.gitkeep: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /orpheus/interface/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /orpheus/interface/src/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /orpheus/interface/main/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /orpheus/interface/orpheus_ui/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.meta/config: -------------------------------------------------------------------------------- 1 | {"user": "", "passphrase": "", "database": ""} -------------------------------------------------------------------------------- /test/sample_schema.csv: -------------------------------------------------------------------------------- 1 | employee_id,int 2 | age,int 3 | salary,int 4 | -------------------------------------------------------------------------------- /.meta/tracker: -------------------------------------------------------------------------------- 1 | {"file_map": {}, "table_map": {}, "table_created_time": {}, "merged_tables": [], "delta": {}} 2 | -------------------------------------------------------------------------------- /orpheus/interface/main/templates/vGraph.html: -------------------------------------------------------------------------------- 1 | {% load static %} 2 | -------------------------------------------------------------------------------- /.meta/users/tester/config: -------------------------------------------------------------------------------- 1 | {"user": "tester", "passphrase": "517dff11e4d92b1e1184f29e2ee7392e3f654ef4ad4b66acbc20e9ad63494e92"} -------------------------------------------------------------------------------- /orpheus/interface/main/admin.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.contrib import admin 5 | 6 | # Register your models here. 7 | -------------------------------------------------------------------------------- /orpheus/interface/main/tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.test import TestCase 5 | 6 | # Create your tests here. 7 | -------------------------------------------------------------------------------- /test/data.csv: -------------------------------------------------------------------------------- 1 | 66000001,30,10340 2 | 66000002,18,4000 3 | 66000003,40,20500 4 | 66000004,23,7000 5 | 66000005,21,7400 6 | 66000006,32,10320 7 | 66000007,41,54020 8 | 66000008,22,8000 -------------------------------------------------------------------------------- /orpheus/interface/main/static/dist/fonts/glyphicons-halflings-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orpheus-db/implementation/HEAD/orpheus/interface/main/static/dist/fonts/glyphicons-halflings-regular.eot -------------------------------------------------------------------------------- /orpheus/interface/main/static/dist/fonts/glyphicons-halflings-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orpheus-db/implementation/HEAD/orpheus/interface/main/static/dist/fonts/glyphicons-halflings-regular.ttf -------------------------------------------------------------------------------- /orpheus/interface/main/apps.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.apps import AppConfig 5 | 6 | 7 | class MainConfig(AppConfig): 8 | name = 'main' 9 | -------------------------------------------------------------------------------- /orpheus/interface/main/static/dist/fonts/glyphicons-halflings-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orpheus-db/implementation/HEAD/orpheus/interface/main/static/dist/fonts/glyphicons-halflings-regular.woff -------------------------------------------------------------------------------- /orpheus/interface/main/static/dist/fonts/glyphicons-halflings-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orpheus-db/implementation/HEAD/orpheus/interface/main/static/dist/fonts/glyphicons-halflings-regular.woff2 -------------------------------------------------------------------------------- /orpheus/core/orpheus_const.py: -------------------------------------------------------------------------------- 1 | # PostgreSQL schema prefix 2 | PUBLIC_SCHEMA = 'public.' 3 | 4 | # CVD suffix 5 | DATATABLE_SUFFIX = '_datatable' 6 | INDEXTABLE_SUFFIX = '_indextable' 7 | VERSIONTABLE_SUFFIX = '_versiontable' 8 | -------------------------------------------------------------------------------- /orpheus/core/access.py: -------------------------------------------------------------------------------- 1 | class AccessManager(): 2 | 3 | def check_access(self): 4 | print "Check access: Under construction." 5 | 6 | @staticmethod 7 | def grant_access(table, user_name): 8 | print "Grant access: Under construction." 9 | 10 | -------------------------------------------------------------------------------- /orpheus/core/encryption.py: -------------------------------------------------------------------------------- 1 | import hashlib, binascii 2 | 3 | class EncryptionTool(): 4 | 5 | # salt should have a random state? 6 | @staticmethod 7 | def passphrase_hash(raw, salt=b'datahub', method='sha256', iteration=100000): 8 | return binascii.hexlify(hashlib.pbkdf2_hmac(method, raw, salt, iteration)) -------------------------------------------------------------------------------- /orpheus/interface/main/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf.urls import url 2 | from django.conf import settings 3 | from django.conf.urls.static import static 4 | 5 | from . import views 6 | 7 | app_name = 'main' 8 | 9 | urlpatterns = [ 10 | url(r'^$', views.index, name='index'), 11 | ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) -------------------------------------------------------------------------------- /config.yaml: -------------------------------------------------------------------------------- 1 | # Server information 2 | host: localhost 3 | port: 5432 4 | 5 | # Orpheus Home dir 6 | orpheus_home: #Please specify the Orpheus home directory 7 | 8 | # Meta dir 9 | # TODO: hide them under orpheus_home 10 | log_path: .meta/logs/log.out 11 | user_log: .meta/user_log 12 | commit_path: .meta/commit_tables 13 | meta_info: .meta/tracker 14 | meta_modifiedIds: .meta/modified 15 | vGraph_json: .meta/vGraph_json 16 | 17 | -------------------------------------------------------------------------------- /orpheus/interface/main/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models 5 | 6 | # Create your models here. 7 | 8 | class CVDs(models.Model): 9 | name = models.CharField(max_length=200) 10 | 11 | class PrivateFiles(models.Model): 12 | name = models.CharField(max_length=200) 13 | 14 | class PrivateTables(models.Model): 15 | name = models.CharField(max_length=200) -------------------------------------------------------------------------------- /orpheus/interface/orpheus_ui/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for orpheus_ui project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "orpheus_ui.settings") 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /orpheus/core/helper.py: -------------------------------------------------------------------------------- 1 | 2 | class Print(object): 3 | def __init__ (self, request = None): 4 | self.request = request 5 | 6 | def pmessage(self, msg): 7 | if self.request: 8 | from django.contrib import messages 9 | messages.info(self.request, msg) 10 | else: 11 | print "%s" % msg 12 | 13 | def perror(self, err): 14 | if self.request: 15 | from django.contrib import messages 16 | messages.error(self.request, err) 17 | else: 18 | import click 19 | click.secho(str(err), fg='red') -------------------------------------------------------------------------------- /orpheus/interface/main/static/assets/css/ie10-viewport-bug-workaround.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * IE10 viewport hack for Surface/desktop Windows 8 bug 3 | * Copyright 2014-2015 Twitter, Inc. 4 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 5 | */ 6 | 7 | /* 8 | * See the Getting Started docs for more information: 9 | * http://getbootstrap.com/getting-started/#support-ie10-width 10 | */ 11 | @-ms-viewport { width: device-width; } 12 | @-o-viewport { width: device-width; } 13 | @viewport { width: device-width; } 14 | -------------------------------------------------------------------------------- /future/TODOs: -------------------------------------------------------------------------------- 1 | ### Todos 2 | - tracker overwrite, get rid of the old mapping 3 | - ~~change cvd to public schema (not work for PostgreSQL due to INFORMATION_SCHEMA.COLUMNS)~~ 4 | - Update meta after dropping dataset 5 | - Update load current state path from .meta/config 6 | - $ORPHEUS_HOME$ in bashrc 7 | - Verbose mode 8 | - Mock testing (mock SQL commands is enough) 9 | - Security of user information 10 | - Compact index table 11 | - Grant access 12 | - Eliminate rid in the SQL output 13 | - Test more cases in SQL parse 14 | 15 | ### BUGs 16 | - 17 | -------------------------------------------------------------------------------- /orpheus/interface/main/static/dist/js/npm.js: -------------------------------------------------------------------------------- 1 | // This file is autogenerated via the `commonjs` Grunt task. You can require() this file in a CommonJS environment. 2 | require('../../js/transition.js') 3 | require('../../js/alert.js') 4 | require('../../js/button.js') 5 | require('../../js/carousel.js') 6 | require('../../js/collapse.js') 7 | require('../../js/dropdown.js') 8 | require('../../js/modal.js') 9 | require('../../js/tooltip.js') 10 | require('../../js/popover.js') 11 | require('../../js/scrollspy.js') 12 | require('../../js/tab.js') 13 | require('../../js/affix.js') -------------------------------------------------------------------------------- /orpheus/core/orpheus_exceptions.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Any system level general exceptions should be included here 3 | Module related exception should go into each module 4 | ''' 5 | 6 | class BadStateError(Exception): 7 | def __init__(self, value): 8 | self.value = value 9 | def __str__(self): 10 | return self.value 11 | 12 | class NotImplementedError(Exception): 13 | def __init__(self, value): 14 | self.value = value 15 | def __str__(self): 16 | return self.value 17 | 18 | class BadParametersError(Exception): 19 | def __init__(self, value): 20 | self.value = value 21 | def __str__(self): 22 | return self.value -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled source # 2 | ################### 3 | *.com 4 | *.class 5 | *.dll 6 | *.exe 7 | *.o 8 | *.so 9 | *.pyc 10 | 11 | # Packages # 12 | ############ 13 | # it's better to unpack these files and commit the raw source 14 | # git has its own built in compression methods 15 | *.7z 16 | *.dmg 17 | *.gz 18 | *.iso 19 | *.jar 20 | *.rar 21 | *.tar 22 | *.zip 23 | 24 | # Logs and databases # 25 | ###################### 26 | *.log 27 | *.sql 28 | *.sqlite 29 | 30 | # OS generated files # 31 | ###################### 32 | .DS_Store 33 | .DS_Store? 34 | ._* 35 | .Spotlight-V100 36 | .Trashes 37 | ehthumbs.db 38 | Thumbs.db 39 | 40 | # Orpheus related files # 41 | .meta/users 42 | .meta/logs/log.out 43 | -------------------------------------------------------------------------------- /orpheus/interface/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "orpheus_ui.settings") 7 | try: 8 | from django.core.management import execute_from_command_line 9 | except ImportError: 10 | # The above import may fail for some other reason. Ensure that the 11 | # issue is really that Django is missing to avoid masking other 12 | # exceptions on Python 2. 13 | try: 14 | import django 15 | except ImportError: 16 | raise ImportError( 17 | "Couldn't import Django. Are you sure it's installed and " 18 | "available on your PYTHONPATH environment variable? Did you " 19 | "forget to activate a virtual environment?" 20 | ) 21 | raise 22 | execute_from_command_line(sys.argv) -------------------------------------------------------------------------------- /orpheus/interface/main/static/assets/js/ie10-viewport-bug-workaround.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * IE10 viewport hack for Surface/desktop Windows 8 bug 3 | * Copyright 2014-2015 Twitter, Inc. 4 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 5 | */ 6 | 7 | // See the Getting Started docs for more information: 8 | // http://getbootstrap.com/getting-started/#support-ie10-width 9 | 10 | (function () { 11 | 'use strict'; 12 | 13 | if (navigator.userAgent.match(/IEMobile\/10\.0/)) { 14 | var msViewportStyle = document.createElement('style') 15 | msViewportStyle.appendChild( 16 | document.createTextNode( 17 | '@-ms-viewport{width:auto!important}' 18 | ) 19 | ) 20 | document.querySelector('head').appendChild(msViewportStyle) 21 | } 22 | 23 | })(); 24 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup( 4 | name='orpheus', 5 | version='1.0.2', 6 | description='OrpheusDB command line tool', 7 | packages=['orpheus', 'orpheus.clt', 'orpheus.core'], 8 | url='http://orpheus-db.github.io/', 9 | # py_modules=['db', 10 | # 'encryption', 11 | # 'metadata', 12 | # 'orpheus_const', 13 | # 'orpheus_exceptions', 14 | # 'orpheus_sqlparse', 15 | # 'relation', 16 | # 'orpheus_schema_parser', 17 | # 'user_control', 18 | # 'version', 19 | # 'access', 20 | # 'click_entry'], 21 | #py_modules=['click_entry'], 22 | install_requires=[ 23 | 'Click', 'psycopg2-binary', 'PyYAML', 'pandas', 'pyparsing', 'sqlparse', 'django' 24 | #'Click' 25 | ], 26 | license='MIT', 27 | entry_points=''' 28 | [console_scripts] 29 | orpheus=orpheus.clt.click_entry:cli 30 | ''' 31 | ) 32 | -------------------------------------------------------------------------------- /orpheus/core/db.py: -------------------------------------------------------------------------------- 1 | # Database Manager exceptions 2 | class UserNotSetError(Exception): 3 | def __init__(self, value): 4 | self.value = value 5 | def __str__(self): 6 | return self.value 7 | 8 | class ConnectionError(Exception): 9 | def __init__(self, value): 10 | self.value = value 11 | def __str__(self): 12 | return self.value 13 | 14 | class OperationError(Exception): 15 | def __str__(self): 16 | return 'Operation failure, check system parameters' 17 | 18 | class DatasetExistsError(Exception): 19 | def __init__(self, value, user): 20 | self.value = value 21 | self.user = user 22 | def __str__(self): 23 | return 'Dataset [%s] exists under user [%s]' % (self.value, self.user) 24 | 25 | class SQLSyntaxError(Exception): 26 | def __str__(self): 27 | return 'Error during executing sql, please revise!' 28 | -------------------------------------------------------------------------------- /orpheus/interface/orpheus_ui/urls.py: -------------------------------------------------------------------------------- 1 | """orpheus_ui URL Configuration 2 | 3 | The `urlpatterns` list routes URLs to views. For more information please see: 4 | https://docs.djangoproject.com/en/1.11/topics/http/urls/ 5 | Examples: 6 | Function views 7 | 1. Add an import: from my_app import views 8 | 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') 9 | Class-based views 10 | 1. Add an import: from other_app.views import Home 11 | 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') 12 | Including another URLconf 13 | 1. Import the include() function: from django.conf.urls import url, include 14 | 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) 15 | """ 16 | from django.conf.urls import include, url 17 | from django.contrib import admin 18 | 19 | urlpatterns = [ 20 | url(r'^main/', include('main.urls')), 21 | url(r'^admin/', admin.site.urls), 22 | ] 23 | -------------------------------------------------------------------------------- /test/small_table_test.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE index (vid int, rlist int[]); 2 | INSERT INTO index VALUES (1, ARRAY[1,2]), (2, ARRAY[1,2,3]), (3, ARRAY[1,4,5]), (4, ARRAY[1,4,5,6]); 3 | 4 | create table datatable(rid serial primary key, employee_id int, age int, salary float); 5 | insert into datatable(employee_id, age, salary) values (66000001,25,6500), (66000002,30,7500), (66000003,25,7000),(66000004,30,8000), (66000002,30,10000), (66000005, 35,10000 ); 6 | 7 | create table version(vid int primary key, num_records int, parent integer[], children integer[], create_time timestamp, commit_time timestamp, commit_msg text); 8 | insert into version values (1, 3, '{-1}', '{2,3}', '2016-03-15 00:00:49.387281-05', '2016-03-15 00:00:49.387281-05', 'init version'), 9 | (2, 4, '{1}', '{4}', '2016-03-15 00:00:49.387281-05', '2016-03-15 00:00:49.387281-05', 'checkout from v1'), (3, 4, '{1}', '{4}', '2016-03-15 00:00:49.387281-05', '2016-03-15 00:00:49.387281-05', 'checkout from v1'), (4, 4, '{2,3}', '{}', '2016-03-15 00:00:49.387281-05', '2016-03-15 00:00:49.387281-05', 'cehckout from v2 and v3'); 10 | -------------------------------------------------------------------------------- /orpheus/core/orpheus_schema_parser.py: -------------------------------------------------------------------------------- 1 | from orpheus_exceptions import NotImplementedError 2 | 3 | # This file is static file for parsing user given schema information 4 | # Expected comma seperated file as default 5 | 6 | class FormatError(Exception): 7 | def __init__(self, filename): 8 | self.filename = filename 9 | def __str__(self): 10 | return "Error parsing %s, please check format" % self.filename 11 | 12 | class ReservedFieldError(Exception): 13 | def __init__(self, field): 14 | self.field = field 15 | def __str__(self): 16 | return "Error parsing field %s, reserved field" % self.field 17 | 18 | class Parser(object): 19 | 20 | @staticmethod 21 | def get_attribute_from_file(abs_path, delimiter=','): 22 | # Postgresql supportted type 23 | PREDEFINED_TYPE = set(['int', 'float', 'text']) # and many more to be added 24 | 25 | # Reserved attribute names 26 | RESERVED_ATTRIBUTES = set(['rid', 'vid']) 27 | 28 | 29 | attribute_name, attribute_type = [],[] 30 | with open(abs_path, 'r') as f: 31 | for line in f: 32 | try: 33 | [cur_attribute, cur_attribute_type] = line.rstrip().split(delimiter) 34 | if cur_attribute_type not in PREDEFINED_TYPE: 35 | raise NotImplementedError("Type %s not supported" % cur_attribute_type) 36 | return 37 | if cur_attribute in RESERVED_ATTRIBUTES: 38 | raise ReservedFieldError(cur_attribute) 39 | return 40 | 41 | # use generator if file is really large 42 | attribute_name.append(cur_attribute) 43 | attribute_type.append(cur_attribute_type) 44 | except ValueError: 45 | raise FormatError(abs_path) 46 | return 47 | return attribute_name, attribute_type 48 | 49 | 50 | -------------------------------------------------------------------------------- /orpheus/interface/main/static/assets/js/ie-emulation-modes-warning.js: -------------------------------------------------------------------------------- 1 | // NOTICE!! DO NOT USE ANY OF THIS JAVASCRIPT 2 | // IT'S JUST JUNK FOR OUR DOCS! 3 | // ++++++++++++++++++++++++++++++++++++++++++ 4 | /*! 5 | * Copyright 2014-2015 Twitter, Inc. 6 | * 7 | * Licensed under the Creative Commons Attribution 3.0 Unported License. For 8 | * details, see https://creativecommons.org/licenses/by/3.0/. 9 | */ 10 | // Intended to prevent false-positive bug reports about Bootstrap not working properly in old versions of IE due to folks testing using IE's unreliable emulation modes. 11 | (function () { 12 | 'use strict'; 13 | 14 | function emulatedIEMajorVersion() { 15 | var groups = /MSIE ([0-9.]+)/.exec(window.navigator.userAgent) 16 | if (groups === null) { 17 | return null 18 | } 19 | var ieVersionNum = parseInt(groups[1], 10) 20 | var ieMajorVersion = Math.floor(ieVersionNum) 21 | return ieMajorVersion 22 | } 23 | 24 | function actualNonEmulatedIEMajorVersion() { 25 | // Detects the actual version of IE in use, even if it's in an older-IE emulation mode. 26 | // IE JavaScript conditional compilation docs: https://msdn.microsoft.com/library/121hztk3%28v=vs.94%29.aspx 27 | // @cc_on docs: https://msdn.microsoft.com/library/8ka90k2e%28v=vs.94%29.aspx 28 | var jscriptVersion = new Function('/*@cc_on return @_jscript_version; @*/')() // jshint ignore:line 29 | if (jscriptVersion === undefined) { 30 | return 11 // IE11+ not in emulation mode 31 | } 32 | if (jscriptVersion < 9) { 33 | return 8 // IE8 (or lower; haven't tested on IE<8) 34 | } 35 | return jscriptVersion // IE9 or IE10 in any mode, or IE11 in non-IE11 mode 36 | } 37 | 38 | var ua = window.navigator.userAgent 39 | if (ua.indexOf('Opera') > -1 || ua.indexOf('Presto') > -1) { 40 | return // Opera, which might pretend to be IE 41 | } 42 | var emulated = emulatedIEMajorVersion() 43 | if (emulated === null) { 44 | return // Not IE 45 | } 46 | var nonEmulated = actualNonEmulatedIEMajorVersion() 47 | 48 | if (emulated !== nonEmulated) { 49 | window.alert('WARNING: You appear to be using IE' + nonEmulated + ' in IE' + emulated + ' emulation mode.\nIE emulation modes can behave significantly differently from ACTUAL older versions of IE.\nPLEASE DON\'T FILE BOOTSTRAP BUGS based on testing in IE emulation modes!') 50 | } 51 | })(); 52 | -------------------------------------------------------------------------------- /orpheus/core/user_control.py: -------------------------------------------------------------------------------- 1 | from encryption import EncryptionTool 2 | 3 | import orpheus_exceptions as sys_exception 4 | import json 5 | 6 | class LocalUserExistError(Exception): 7 | def __init__(self, value): 8 | self.value = value 9 | def __str__(self): 10 | return self.value 11 | 12 | class InvalidCredentialError(Exception): 13 | def __str__(self): 14 | return "credentials does not match records" 15 | 16 | class UserManager(object): 17 | @classmethod 18 | def config_path(self): 19 | return ".meta/config" 20 | 21 | @classmethod 22 | def user_path(self): 23 | return ".meta/users" 24 | 25 | @classmethod 26 | def check_user_exist(cls, user): 27 | from os import listdir 28 | from os.path import isfile 29 | return user in [usr for usr in listdir(cls.user_path())] and isfile("/".join([cls.user_path(), usr, 'config'])) 30 | 31 | @classmethod 32 | def create_user(cls, user, password=None): 33 | from os import makedirs 34 | if cls.check_user_exist(user): 35 | return None 36 | user_obj = { 37 | 'user' : user 38 | } 39 | #if password: 40 | passphrase = EncryptionTool.passphrase_hash(password) 41 | user_obj['passphrase'] = passphrase 42 | 43 | user_directory = '/'.join([cls.user_path(),user]) 44 | makedirs(user_directory) # make the directory, need to check if have permission 45 | with open('/'.join([user_directory, 'config']), 'w+') as f: 46 | f.write(json.dumps(user_obj)) 47 | return 1 48 | 49 | 50 | # this method is very dangrous! use caution 51 | @classmethod 52 | def delete_user(cls, user, password): 53 | pass 54 | 55 | 56 | @classmethod 57 | def get_current_state(cls): 58 | try: 59 | with open(cls.config_path(), 'r') as f: 60 | config_info = json.loads(f.readline()) 61 | except Exception as inst: 62 | return None 63 | return config_info 64 | 65 | @classmethod 66 | def write_current_state(cls, obj): 67 | user_obj = {"database": "", "user": "", "passphrase": ""} 68 | for key in user_obj: 69 | user_obj[key] = obj[key] 70 | with open(cls.config_path(), 'w') as f: 71 | f.write(json.dumps(user_obj)) 72 | 73 | 74 | @classmethod 75 | def verify_credential(cls, user, raw): 76 | if cls.check_user_exist(user): 77 | user_obj = cls.__get_user_config(user) 78 | if user_obj['passphrase'] == EncryptionTool.passphrase_hash(raw): 79 | return True 80 | raise InvalidCredentialError() 81 | return False 82 | 83 | @classmethod 84 | def __get_user_config(cls, user): 85 | with open('/'.join([cls.user_path(), user, 'config']), 'r') as f: 86 | user_obj = json.loads(f.readline()) 87 | return user_obj 88 | 89 | # for debug purpose 90 | @classmethod 91 | def __list_user(cls): 92 | from os import listdir 93 | return [usr for usr in listdir(cls.user_path())] 94 | -------------------------------------------------------------------------------- /orpheus/core/vgraph.py: -------------------------------------------------------------------------------- 1 | import orpheus_exceptions as sys_exception 2 | import os 3 | import json 4 | 5 | #the version graph at the frontend 6 | class VersionGraph(object): 7 | def __init__(self, config, request): 8 | try: 9 | self.request = request 10 | self.vGraph_json = config['vGraph_json'] 11 | if not self.vGraph_json.endswith("/"): 12 | self.vGraph_json += "/" 13 | except KeyError as e: 14 | raise sys_exception.BadStateError("Context missing field %s, abort" % e.args[0]) 15 | 16 | def __gen_json_object(self, vid, hasChildren): 17 | data = {} 18 | data['name'] = int(vid) 19 | if hasChildren: 20 | data['children'] = [] 21 | return data 22 | 23 | def init_vGraph_json(self, dataset, vid): 24 | fpath = self.vGraph_json + dataset 25 | data = self.__gen_json_object(vid,True) 26 | f = open(fpath, 'w') 27 | f.write(json.dumps(data)) 28 | f.close() 29 | 30 | def delete_vGraph_json(self, dataset): 31 | fpath = self.vGraph_json + dataset 32 | try: 33 | os.remove(fpath) 34 | except OSError: 35 | pass 36 | 37 | # Find the JSON object in data format whose vid = pvid 38 | def __insert_into_parent_node(self, data, pvid, new_node): 39 | visited, stack = set(), [(data['name'], data['children'])] 40 | while stack: 41 | node = stack.pop() 42 | vid = int(node[0]) 43 | 44 | if vid not in visited: 45 | visited.add(vid) 46 | if int(vid) == int(pvid): 47 | node[1].append(new_node) 48 | return True 49 | for child in node[1]: 50 | if child['name'] not in visited: 51 | stack.append((child['name'], child['children'])) 52 | return False 53 | 54 | def update_vGraph_json(self, dataset, vid, parents): 55 | # Load corresponding JSON file 56 | try: 57 | fpath = self.vGraph_json + dataset 58 | data = json.loads(open(fpath).read()) 59 | 60 | new_node = self.__gen_json_object(vid, True) 61 | 62 | if len(parents) > 1: 63 | new_node['parent_'] = [] 64 | for vid in parents[1:]: 65 | new_node['parent_'].append(self.__gen_json_object(vid, False)) 66 | primary_parent_vid = int(parents[0]) 67 | 68 | success = self.__insert_into_parent_node(data, primary_parent_vid, new_node) 69 | if not success: 70 | raise KeyError 71 | f = open(fpath, 'w') 72 | f.write(json.dumps(data)) 73 | f.close() 74 | except KeyError: 75 | from django.contrib import messages 76 | messages.error(self.request,"Could not find its parent JSON object") 77 | raise KeyError 78 | return -------------------------------------------------------------------------------- /orpheus/interface/main/static/dashboard.css: -------------------------------------------------------------------------------- 1 | /* 2 | * Base structure 3 | */ 4 | 5 | /* Move down content because we have a fixed navbar that is 50px tall */ 6 | body { 7 | padding-top: 50px; 8 | font-size: 1.8em; 9 | } 10 | 11 | 12 | /* 13 | * Global add-ons 14 | */ 15 | 16 | .sub-header { 17 | padding-bottom: 10px; 18 | border-bottom: 1px solid #eee; 19 | 20 | } 21 | 22 | /* 23 | * Top navigation 24 | * Hide default border to remove 1px line. 25 | */ 26 | .navbar-fixed-top { 27 | border: 0; 28 | } 29 | 30 | /* 31 | * Sidebar 32 | */ 33 | 34 | /* Hide for mobile, show later */ 35 | .sidebar { 36 | display: none; 37 | } 38 | @media (min-width: 768px) { 39 | .sidebar { 40 | position: fixed; 41 | top: 51px; 42 | bottom: 0; 43 | left: 0; 44 | z-index: 1000; 45 | display: block; 46 | padding: 20px; 47 | overflow-x: hidden; 48 | overflow-y: auto; /* Scrollable contents if viewport is shorter than content. */ 49 | background-color: #f5f5f5; 50 | border-right: 1px solid #eee; 51 | } 52 | } 53 | 54 | /* Sidebar navigation */ 55 | .nav-sidebar { 56 | margin-right: -21px; /* 20px padding + 1px border */ 57 | margin-bottom: 20px; 58 | margin-left: -20px; 59 | 60 | } 61 | .nav-sidebar > li > a { 62 | padding-right: 20px; 63 | padding-left: 20px; 64 | } 65 | .nav-sidebar > .active > a, 66 | .nav-sidebar > .active > a:hover, 67 | .nav-sidebar > .active > a:focus { 68 | color: #fff; 69 | background-color: #428bca; 70 | } 71 | 72 | /* 73 | * Main content 74 | */ 75 | 76 | .main { 77 | padding: 20px; 78 | } 79 | @media (min-width: 768px) { 80 | .main { 81 | padding-right: 40px; 82 | padding-left: 40px; 83 | } 84 | } 85 | .main .page-header { 86 | margin-top: 0; 87 | } 88 | 89 | 90 | /* 91 | * Placeholder dashboard ideas 92 | */ 93 | 94 | .placeholders { 95 | margin-bottom: 30px; 96 | text-align: center; 97 | } 98 | .placeholders h4 { 99 | margin-bottom: 0; 100 | 101 | } 102 | .placeholder { 103 | margin-bottom: 20px; 104 | } 105 | .placeholder img { 106 | display: inline-block; 107 | border-radius: 50%; 108 | } 109 | 110 | 111 | 112 | 113 | /* 114 | * Tree Representation 115 | */ 116 | .node circle { 117 | fill: #fff; 118 | stroke: steelblue; 119 | stroke-width: 4px; 120 | } 121 | 122 | .node text { font: 16px sans-serif; } 123 | 124 | .link { 125 | fill: none; 126 | stroke: #ccc; 127 | stroke-width: 4px; 128 | } 129 | 130 | 131 | /* Link like button */ 132 | .btn-link{ 133 | border:none; 134 | outline:none; 135 | background:none; 136 | cursor:pointer; 137 | color:steelblue; 138 | padding:20px; 139 | font-family:inherit; 140 | font-size:inherit; 141 | } 142 | 143 | pre { 144 | display: block; 145 | margin: 1em 0; 146 | padding: 0 px; 147 | width: auto; 148 | overflow: auto; 149 | margin-bottom: 0px; 150 | background-color: #EEEEEE; 151 | 152 | font-family: inherit; 153 | font-size: inherit; 154 | } 155 | -------------------------------------------------------------------------------- /orpheus/interface/main/static/assets/css/src/pygments-manni.css: -------------------------------------------------------------------------------- 1 | .hll { background-color: #ffffcc } 2 | /*{ background: #f0f3f3; }*/ 3 | .c { color: #999; } /* Comment */ 4 | .err { color: #AA0000; background-color: #FFAAAA } /* Error */ 5 | .k { color: #006699; } /* Keyword */ 6 | .o { color: #555555 } /* Operator */ 7 | .cm { color: #999; } /* Comment.Multiline */ /* Edited to remove italics and make into comment */ 8 | .cp { color: #009999 } /* Comment.Preproc */ 9 | .c1 { color: #999; } /* Comment.Single */ 10 | .cs { color: #999; } /* Comment.Special */ 11 | .gd { background-color: #FFCCCC; border: 1px solid #CC0000 } /* Generic.Deleted */ 12 | .ge { font-style: italic } /* Generic.Emph */ 13 | .gr { color: #FF0000 } /* Generic.Error */ 14 | .gh { color: #003300; } /* Generic.Heading */ 15 | .gi { background-color: #CCFFCC; border: 1px solid #00CC00 } /* Generic.Inserted */ 16 | .go { color: #AAAAAA } /* Generic.Output */ 17 | .gp { color: #000099; } /* Generic.Prompt */ 18 | .gs { } /* Generic.Strong */ 19 | .gu { color: #003300; } /* Generic.Subheading */ 20 | .gt { color: #99CC66 } /* Generic.Traceback */ 21 | .kc { color: #006699; } /* Keyword.Constant */ 22 | .kd { color: #006699; } /* Keyword.Declaration */ 23 | .kn { color: #006699; } /* Keyword.Namespace */ 24 | .kp { color: #006699 } /* Keyword.Pseudo */ 25 | .kr { color: #006699; } /* Keyword.Reserved */ 26 | .kt { color: #007788; } /* Keyword.Type */ 27 | .m { color: #FF6600 } /* Literal.Number */ 28 | .s { color: #d44950 } /* Literal.String */ 29 | .na { color: #4f9fcf } /* Name.Attribute */ 30 | .nb { color: #336666 } /* Name.Builtin */ 31 | .nc { color: #00AA88; } /* Name.Class */ 32 | .no { color: #336600 } /* Name.Constant */ 33 | .nd { color: #9999FF } /* Name.Decorator */ 34 | .ni { color: #999999; } /* Name.Entity */ 35 | .ne { color: #CC0000; } /* Name.Exception */ 36 | .nf { color: #CC00FF } /* Name.Function */ 37 | .nl { color: #9999FF } /* Name.Label */ 38 | .nn { color: #00CCFF; } /* Name.Namespace */ 39 | .nt { color: #2f6f9f; } /* Name.Tag */ 40 | .nv { color: #003333 } /* Name.Variable */ 41 | .ow { color: #000000; } /* Operator.Word */ 42 | .w { color: #bbbbbb } /* Text.Whitespace */ 43 | .mf { color: #FF6600 } /* Literal.Number.Float */ 44 | .mh { color: #FF6600 } /* Literal.Number.Hex */ 45 | .mi { color: #FF6600 } /* Literal.Number.Integer */ 46 | .mo { color: #FF6600 } /* Literal.Number.Oct */ 47 | .sb { color: #CC3300 } /* Literal.String.Backtick */ 48 | .sc { color: #CC3300 } /* Literal.String.Char */ 49 | .sd { color: #CC3300; font-style: italic } /* Literal.String.Doc */ 50 | .s2 { color: #CC3300 } /* Literal.String.Double */ 51 | .se { color: #CC3300; } /* Literal.String.Escape */ 52 | .sh { color: #CC3300 } /* Literal.String.Heredoc */ 53 | .si { color: #AA0000 } /* Literal.String.Interpol */ 54 | .sx { color: #CC3300 } /* Literal.String.Other */ 55 | .sr { color: #33AAAA } /* Literal.String.Regex */ 56 | .s1 { color: #CC3300 } /* Literal.String.Single */ 57 | .ss { color: #FFCC33 } /* Literal.String.Symbol */ 58 | .bp { color: #336666 } /* Name.Builtin.Pseudo */ 59 | .vc { color: #003333 } /* Name.Variable.Class */ 60 | .vg { color: #003333 } /* Name.Variable.Global */ 61 | .vi { color: #003333 } /* Name.Variable.Instance */ 62 | .il { color: #FF6600 } /* Literal.Number.Integer.Long */ 63 | 64 | .css .o, 65 | .css .o + .nt, 66 | .css .nt + .nt { color: #999; } 67 | -------------------------------------------------------------------------------- /orpheus/interface/orpheus_ui/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django settings for orpheus_ui project. 3 | 4 | Generated by 'django-admin startproject' using Django 1.11. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/1.11/topics/settings/ 8 | 9 | For the full list of settings and their values, see 10 | https://docs.djangoproject.com/en/1.11/ref/settings/ 11 | """ 12 | 13 | import os 14 | 15 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...) 16 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 17 | 18 | 19 | # Quick-start development settings - unsuitable for production 20 | # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ 21 | 22 | # SECURITY WARNING: keep the secret key used in production secret! 23 | SECRET_KEY = '@l+9s7+*(yo%01)l^cxw%r91pd7r)296b4lm@kb$ww!%_8xm6$' 24 | 25 | # SECURITY WARNING: don't run with debug turned on in production! 26 | DEBUG = True 27 | 28 | ALLOWED_HOSTS = [] 29 | 30 | 31 | # Application definition 32 | 33 | INSTALLED_APPS = [ 34 | 'main.apps.MainConfig', 35 | 'django.contrib.admin', 36 | 'django.contrib.auth', 37 | 'django.contrib.contenttypes', 38 | 'django.contrib.sessions', 39 | 'django.contrib.messages', 40 | 'django.contrib.staticfiles', 41 | ] 42 | 43 | MIDDLEWARE = [ 44 | 'django.middleware.security.SecurityMiddleware', 45 | 'django.contrib.sessions.middleware.SessionMiddleware', 46 | 'django.middleware.common.CommonMiddleware', 47 | 'django.middleware.csrf.CsrfViewMiddleware', 48 | 'django.contrib.auth.middleware.AuthenticationMiddleware', 49 | 'django.contrib.messages.middleware.MessageMiddleware', 50 | 'django.middleware.clickjacking.XFrameOptionsMiddleware', 51 | ] 52 | 53 | ROOT_URLCONF = 'orpheus_ui.urls' 54 | 55 | TEMPLATES = [ 56 | { 57 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', 58 | 'DIRS': [], 59 | 'APP_DIRS': True, 60 | 'OPTIONS': { 61 | 'context_processors': [ 62 | 'django.template.context_processors.debug', 63 | 'django.template.context_processors.request', 64 | 'django.contrib.auth.context_processors.auth', 65 | 'django.contrib.messages.context_processors.messages', 66 | ], 67 | }, 68 | }, 69 | ] 70 | 71 | WSGI_APPLICATION = 'orpheus_ui.wsgi.application' 72 | 73 | 74 | # Database 75 | # https://docs.djangoproject.com/en/1.11/ref/settings/#databases 76 | 77 | DATABASES = { 78 | 'default': { 79 | 'ENGINE': 'django.db.backends.postgresql', 80 | 'NAME': '', #Please specify 81 | 'USER': '', #Please specify 82 | 'PASSWORD': '', 83 | 'HOST': 'localhost', 84 | 'PORT': '5432', 85 | } 86 | } 87 | 88 | # Password validation 89 | # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators 90 | 91 | AUTH_PASSWORD_VALIDATORS = [ 92 | { 93 | 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', 94 | }, 95 | { 96 | 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 97 | }, 98 | { 99 | 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', 100 | }, 101 | { 102 | 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', 103 | }, 104 | ] 105 | 106 | 107 | # Internationalization 108 | # https://docs.djangoproject.com/en/1.11/topics/i18n/ 109 | 110 | LANGUAGE_CODE = 'en-us' 111 | 112 | TIME_ZONE = 'UTC' 113 | 114 | USE_I18N = True 115 | 116 | USE_L10N = True 117 | 118 | USE_TZ = True 119 | 120 | 121 | # Static files (CSS, JavaScript, Images) 122 | # https://docs.djangoproject.com/en/1.11/howto/static-files/ 123 | 124 | STATIC_URL = '/static/' 125 | 126 | -------------------------------------------------------------------------------- /orpheus/core/version.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import orpheus_const as const 3 | from helper import Print 4 | 5 | class VersionManager(object): 6 | def __init__(self, conn, request = None): 7 | self.conn = conn 8 | self.p = Print(request) 9 | 10 | def init_version_graph_dataset(self, dataset, list_of_rid, user): 11 | # using CREATE SQL command 12 | self.p.pmessage("Initializing the version table ...") 13 | self.conn.refresh_cursor() 14 | init_version_sql = "INSERT INTO %s VALUES \ 15 | (1, '%s', %s, '{-1}', '{}', '%s', '%s', 'init commit');" % \ 16 | (const.PUBLIC_SCHEMA + dataset + const.VERSIONTABLE_SUFFIX, user, str(len(list_of_rid)), str(datetime.datetime.now()), str(datetime.datetime.now())) 17 | self.conn.cursor.execute(init_version_sql) 18 | self.conn.connect.commit() 19 | 20 | def init_index_table_dataset(self, dataset, list_of_rid): 21 | self.p.pmessage("Initializing the index table ...") 22 | 23 | self.conn.refresh_cursor() 24 | 25 | init_indextbl_sql = "INSERT INTO %s \ 26 | VALUES \ 27 | (1, '{%s}');" % (const.PUBLIC_SCHEMA + dataset + const.INDEXTABLE_SUFFIX, str(",".join(map(str, list_of_rid)))) 28 | self.conn.cursor.execute(init_indextbl_sql) 29 | self.conn.connect.commit() 30 | 31 | def update_version_graph(self, version_graph_name, user, num_of_records, parent_list, table_create_time, msg): 32 | # create new version 33 | parent_list_string='\'{' + ', '.join(parent_list) + '}\'' 34 | commit_time = str(datetime.datetime.now()) 35 | table_create_time = table_create_time or commit_time 36 | max_vid = self.get_curt_max_vid(version_graph_name) 37 | curt_vid = max_vid + 1 38 | values = "(%s, '%s', %s, %s, %s, %s, %s, %s)" % (curt_vid, user, num_of_records, parent_list_string, "'{}'", "'%s'" % table_create_time, "'%s'" % commit_time, "'%s'" % msg) 39 | sql = "INSERT INTO %s VALUES %s;"% (version_graph_name, values) 40 | # print sql 41 | self.conn.cursor.execute(sql) 42 | 43 | # update child column in the parent tuple 44 | target_parent_vid='{' + ','.join(parent_list) + '}' 45 | sql = "UPDATE %s SET children = ARRAY_APPEND(children, %s) WHERE vid = ANY('%s' :: int[]);" %(version_graph_name, curt_vid, target_parent_vid) 46 | self.conn.cursor.execute(sql) 47 | self.conn.connect.commit() 48 | return curt_vid 49 | 50 | def update_index_table(self, index_table_name, new_vid, new_rids): 51 | sql = 'INSERT INTO %s VALUES (%s, ARRAY%s);' % (index_table_name, new_vid, new_rids) 52 | self.conn.cursor.execute(sql) 53 | self.conn.connect.commit() 54 | 55 | 56 | def clean(self): 57 | self.p.pmessage("Version clean: Under Construction.") 58 | #messages.info(self.request, "Version clean: Under Construction.") 59 | 60 | def get_curt_max_vid(self,version_graph_name): 61 | sql = "SELECT MAX(vid) FROM %s;" % version_graph_name 62 | # print sql 63 | self.conn.cursor.execute(sql) 64 | result = self.conn.cursor.fetchall() 65 | # print result 66 | return int(result[0][0]) 67 | 68 | def select_records_of_version_list(self, vlist): 69 | targetv= ','.join(vlist) 70 | sql = "SELECT distinct rlist FROM indexTbl WHERE vlist && (ARRAY[%s]);"%targetv 71 | #print sql 72 | self.conn.cursor.execute(sql) 73 | data = self.conn.cursor.fetchall() 74 | data_string='' 75 | for item in data: 76 | for num in item[0]: 77 | # print num 78 | data_string+=str(num)+(',') 79 | data_string=data_string[0:len(data_string) - 1] 80 | data_string='{' + data_string+'}' 81 | # print data_string 82 | return data_string -------------------------------------------------------------------------------- /orpheus/interface/main/views.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.http import HttpResponse 5 | from django.template import loader, RequestContext 6 | from django.shortcuts import render 7 | from django.contrib import messages 8 | # Create your views here. 9 | from django.shortcuts import render 10 | from django.conf import settings 11 | 12 | from src.db import DatabaseManager 13 | from src.cmd_parser import Parser 14 | 15 | import json 16 | import os.path, os 17 | 18 | class PrivateFile: 19 | def __init__(self, x, y): 20 | self.name = x 21 | self.link = y 22 | 23 | def index(request): 24 | # TODO: add try-catch to catch exceptions 25 | context = {} 26 | table_list, cmd_string = None, "" 27 | 28 | # If command button is clicked 29 | cmd_btn = request.POST.get('command') 30 | 31 | # If command button is clicked 32 | explain_btn = request.POST.get('explain') 33 | 34 | # if view button is clicked 35 | view_btn = request.POST.get('view_btn') 36 | # if info button is clicked 37 | info = request.POST.get('info_btn') 38 | # if diff button is clicked 39 | diff = request.POST.get('diff_btn') 40 | 41 | # if diff button is clicked 42 | show_btn = request.POST.get('show_btn') 43 | 44 | # if private_file button 45 | priv_file_btn = request.POST.get('private_file') 46 | 47 | #if vGraph submit button is clicked 48 | vGraph_btn = request.POST.get('vGraph') 49 | if vGraph_btn: 50 | selected_cvd = request.POST.get('cvd_selection') 51 | request.session['prev_selection'] = selected_cvd 52 | else: 53 | if 'prev_selection' in request.session: 54 | prev_selection = request.session['prev_selection'] 55 | else: 56 | prev_selection = "" 57 | selected_cvd = prev_selection 58 | context['selected_cvd'] = selected_cvd 59 | 60 | if view_btn or diff or info or show_btn: 61 | cmd_string = request.POST.get('cmdExec') 62 | request.session['cmd_string'] = cmd_string 63 | else: # cmd_btn: 64 | cmd_string = request.POST.get('cmdText') 65 | request.session['cmd_string'] = cmd_string 66 | context['cmd_string'] = request.session['cmd_string'] 67 | if view_btn or diff or info or cmd_btn or show_btn or explain_btn: 68 | # Parse and execute the command 69 | try: 70 | p = Parser(request) 71 | if view_btn or diff: 72 | attributes = p.get_attributes(selected_cvd) 73 | cmd_string = cmd_string.replace('*', attributes) 74 | context['cmd_string'] = cmd_string 75 | request.session['cmd_string'] = cmd_string 76 | 77 | if cmd_string != "": 78 | table_list = p.parse(cmd_string, explain_btn) 79 | except Exception as e: 80 | messages.error(request, str(e)) 81 | 82 | fpath = '.meta/vGraph_json/%s' % selected_cvd 83 | if os.path.isfile(fpath): 84 | data = json.loads(open(fpath).read()) 85 | data = json.dumps(data) 86 | context['vGraph_json'] = data 87 | if selected_cvd: 88 | request.session['prev_selection'] = selected_cvd 89 | 90 | if table_list: 91 | context['table_list'] = table_list 92 | if priv_file_btn: 93 | # open the file with sublime 94 | try: 95 | p = Parser(request) 96 | fpath = p.config['orpheus_home'] + priv_file_btn 97 | if os.path.exists(fpath): 98 | os.system('open %s' % fpath) #TODO: Change to open with default editor after demo 99 | else: 100 | messages.error(request, "Unable to open file %s with the path %s" % (priv_file_btn, fpath)) 101 | except Exception as e: 102 | messages.error(request, str(e)) 103 | 104 | # Refresh table and file list 105 | config = settings.DATABASES['default'] 106 | config['database'] = config['NAME'] 107 | config['user'] = config['USER'] 108 | conn = DatabaseManager(config, request) 109 | 110 | cvd_sql = "SELECT * FROM %s.datasets" % (config["user"]) 111 | 112 | track_str = '' 113 | with open(".meta/tracker", "r") as fp: 114 | track_str = fp.readline().strip("\n") 115 | metadata = json.loads(track_str) 116 | 117 | context['cvds'] = [r[0] for r in conn.sql_records(cvd_sql)] 118 | context['files'] = [PrivateFile(k[(k.rfind("/")+1):], k) for k in metadata['file_map']] 119 | context['tables'] = [k for k in metadata['table_map']] 120 | 121 | return render(request, 'main/index.html', context) -------------------------------------------------------------------------------- /orpheus/core/metadata.py: -------------------------------------------------------------------------------- 1 | import json 2 | import datetime 3 | import orpheus_exceptions as sys_exception 4 | 5 | from helper import Print 6 | 7 | class MetadataManager(object): 8 | #TODO: refactor this class to static class for performance issue 9 | def __init__(self, config,request = None): 10 | # def __init__(self,user): 11 | # file path is in some format of 'user'. 12 | # The simpliest is "~/user/" 13 | try: 14 | self.file_path = ".." 15 | self.meta_info = config['meta_info'] 16 | self.meta_modifiedIds = config['meta_modifiedIds'] 17 | self.p = Print(request) 18 | except KeyError as e: 19 | raise sys_exception.BadStateError("Context missing field %s, abort" % e.args[0]) 20 | 21 | # Read metadata 22 | def load_meta(self): 23 | # print "load meta" 24 | with open(self.meta_info, 'r') as f: 25 | meta_info = f.readline() 26 | return json.loads(meta_info) 27 | 28 | # Commit metadata 29 | def commit_meta(self, new_meta): 30 | open(self.meta_info, 'w').close() 31 | f = open(self.meta_info, 'w') 32 | f.write(json.dumps(new_meta)) 33 | f.close() 34 | self.p.pmessage("Metadata committed") 35 | 36 | # can change to static method 37 | def update(self, to_table, to_file, dataset, vlist, old_meta): 38 | if to_table: 39 | self.update_tablemap(to_table, dataset, vlist, old_meta) 40 | if to_file: 41 | self.update_filemap(to_file, dataset, vlist, old_meta) 42 | # return old_meta 43 | 44 | def update_tablemap(self, to_table, dataset, vlist, old_meta): 45 | self.p.pmessage("Update metadata.") 46 | 47 | old_meta['table_map'][to_table] = dataset, vlist 48 | old_meta['table_created_time'][to_table] = str(datetime.datetime.now()) 49 | # self.commit_meta(_meta) 50 | return old_meta 51 | 52 | def update_filemap(self, to_file, dataset, vlist, old_meta): 53 | old_meta['file_map'][to_file] = dataset, vlist 54 | # keep track of time? 55 | return old_meta 56 | 57 | def load_modified(self): 58 | with open(self.meta_modifiedIds, 'r') as f: 59 | meta_modifiedIds = f.readline() 60 | return json.loads(meta_modifiedIds) 61 | 62 | def load_modified_id(self,table_name): 63 | _meta = self.load_meta() 64 | _modified=self.load_modified() 65 | modified_id = [] 66 | if table_name not in _meta['merged_tables']: 67 | try: 68 | modified_id = _modified[table_name] 69 | except KeyError: 70 | error_msg = "Table" + table_name + "does not have changes, nothing to commit " 71 | raise ValueError(error_msg) 72 | return 73 | return modified_id 74 | 75 | 76 | def load_parent_id(self,table_name, mapping='table_map'): 77 | parent_vid_lis = None 78 | try: 79 | _meta = self.load_meta() 80 | parent_vid_lis = _meta[mapping][table_name] 81 | # print type(parent_vid_lis) 82 | # print type(parent_vid_lis[1]) 83 | # parent_vid = "\'{%s}\'" % ",".join(str(x) for x in parent_vid_lis) 84 | return parent_vid_lis 85 | except KeyError as e: 86 | raise sys_exception.BadStateError("Metadata information missing field %s, abort" % e.args[0]) 87 | return None 88 | 89 | 90 | def update_parent_id(self,table_name, dataset, pvid, mapping='table_map'): 91 | plist = [str(pvid)] 92 | try: 93 | _meta = self.load_meta() 94 | #print "mapping %s " % mapping 95 | _meta[mapping][table_name] = dataset, plist 96 | #print _meta 97 | # print type(parent_vid_lis) 98 | # print type(parent_vid_lis[1]) 99 | # parent_vid = "\'{%s}\'" % ",".join(str(x) for x in parent_vid_lis) 100 | self.commit_meta(_meta) 101 | except KeyError as e: 102 | raise sys_exception.BadStateError("Metadata information missing field %s, abort" % e.args[0]) 103 | return 104 | 105 | def load_table_create_time(self,table_name): 106 | # load the table creat time 107 | try: 108 | _meta = self.load_meta() 109 | create_time = _meta['table_created_time'][table_name] 110 | return create_time 111 | except KeyError: 112 | # print "----- ERROR ------: %s" % "created time must be completed" 113 | return None 114 | -------------------------------------------------------------------------------- /orpheus/interface/main/static/tree.js: -------------------------------------------------------------------------------- 1 | 2 | var margin = {top: 50, right: 0, bottom: 0, left: 0}, 3 | width = 550 - margin.right - margin.left, 4 | height = 500 - margin.top - margin.bottom; 5 | 6 | var i = 0; 7 | 8 | var tree = d3.layout.tree() 9 | .size([height, width]); 10 | 11 | var svg = d3.select("#viz").append("svg") 12 | .attr("width", width + margin.right + margin.left) 13 | .attr("height", height + margin.top + margin.bottom) 14 | .append("g") 15 | .attr("transform", "translate(" + margin.left + "," + margin.top + ")"); 16 | 17 | 18 | var select_node = []; 19 | function update(data){ 20 | select_node = []; 21 | data = JSON.parse(data); 22 | var treeData = []; 23 | treeData.push(data); 24 | 25 | root = treeData[0]; 26 | source = root; 27 | 28 | // Compute the new tree layout.] 29 | var nodes = tree.nodes(root).reverse(), 30 | links = tree.links(nodes); 31 | 32 | // Normalize for fixed-depth. 33 | nodes.forEach(function(d) { d.y = d.depth * 80; }); 34 | 35 | // Declare the nodes 36 | var node = svg.selectAll("g.node") 37 | .data(nodes, function(d) { return d.id || (d.id = ++i); }); 38 | 39 | console.log(node); 40 | // Enter the nodes. 41 | var nodeEnter = node.enter().append("g") 42 | .attr("class", "node") 43 | .attr("transform", function(d) { 44 | return "translate(" + d.x + "," + d.y + ")"; }); 45 | 46 | nodeEnter.append("circle") 47 | .attr("r", 10) 48 | .attr("selected", "false") 49 | .style("fill", "#fff") 50 | .on('click', function(nodeEnter) { 51 | // If the node has not been selected -> change its stoke color to firebrick 52 | if (d3.select(this).attr("selected") == "false"){ 53 | d3.select(this).style("stroke", "firebrick") 54 | d3.select(this).attr("selected", "true") 55 | select_node.push(nodeEnter.name) 56 | }else{ 57 | d3.select(this).style("stroke", "steelblue") 58 | d3.select(this).attr("selected", "false") 59 | var index = select_node.indexOf(nodeEnter.name) 60 | select_node.splice(index, 1) 61 | } 62 | }); 63 | 64 | nodeEnter.append("text") 65 | .attr("y", function(d) { 66 | return d.children || d._children ? -18 : 18; }) 67 | .attr("dy", ".35em") 68 | .attr("text-anchor", "middle") 69 | .attr("selected", "false") 70 | .text(function(d) { return "vid = " + d.name; }) 71 | .style("fill-opacity", 1); 72 | 73 | var diagonal = d3.svg.diagonal(); 74 | 75 | for (var w = 0; w < nodes.length; w++) { 76 | var cur = nodes[w]; 77 | if(cur.parent_) { 78 | for (var j=0;j 2 | 3 | {% load static %} 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | OrpheusDB Demonstration 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 33 | 34 | 35 | 36 | 37 | 58 | 59 |
60 |
61 | 80 |
81 |

Command Input

82 |
83 | 84 |

Please enter either the SQL or the version control command below:

85 | 86 |
87 | 88 | 89 | 90 |

Output Results

91 |
92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 |
protein1protein2neighbor hoodcooccu rrencecoexpre ssion
ENSP273047ENSP26189005383
ENSP273047ENSP26189005383
ENSP300413ENSP2742424260164
ENSP300413ENSP2742424260164
ENSP300413ENSP2742424260164
ENSP300413ENSP2742424260164
ENSP309334ENSP3460220227975
ENSP309334ENSP3460220227975
161 |
162 |
163 | 164 |
165 |

Version Visualization

166 | 167 | 168 | 169 | 170 | 171 | 172 |
173 | 174 |
175 | 180 |
181 |
182 |
183 | 184 | 185 | 186 | 187 | 188 | 189 |
190 |
191 | 192 | 193 | 194 |
195 | 196 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | -------------------------------------------------------------------------------- /orpheus/interface/src/cmd_parser.py: -------------------------------------------------------------------------------- 1 | import shlex 2 | import os 3 | import yaml 4 | 5 | from django.contrib import messages 6 | from main.models import CVDs, PrivateFiles, PrivateTables 7 | from django.conf import settings 8 | 9 | from orpheus.core.executor import Executor 10 | from orpheus.core.orpheus_exceptions import BadStateError, NotImplementedError, BadParametersError 11 | from orpheus.core.relation import RelationManager 12 | from db import DatabaseManager 13 | import orpheus.core.orpheus_const as const 14 | from orpheus.core.vgraph import VersionGraph 15 | 16 | 17 | class CommandNotExistError(Exception): 18 | def __init__(self, cmd): 19 | self.name = cmd 20 | def __str__(self): 21 | return "Command '%s' does not exist" % self.name 22 | 23 | class CommandInvalidError(Exception): 24 | def __init__(self, cmd): 25 | self.name = cmd 26 | def __str__(self): 27 | return "Command '%s' is not valid" % self.name 28 | 29 | class Parser(object): 30 | def __init__(self, request): 31 | self.request = request 32 | self.config_file = 'config.yaml' 33 | if 'ORPHEUS_HOME' not in os.environ: 34 | os.environ['ORPHEUS_HOME'] = os.getcwd() 35 | self.config_path = os.environ['ORPHEUS_HOME'] + '/' + self.config_file 36 | else: 37 | self.config_path = os.environ['ORPHEUS_HOME'] + self.config_file 38 | try: 39 | with open(self.config_path, 'r') as f: 40 | self.config = yaml.load(f) 41 | assert(self.config['orpheus_home'] != None) 42 | 43 | if not self.config['orpheus_home'].endswith("/"): 44 | self.config['orpheus_home'] += "/" 45 | # if user overwrite the ORPHEUS_HOME, rewrite the enviormental parameters 46 | if 'orpheus_home' in self.config: 47 | os.environ['ORPHEUS_HOME'] = self.config['orpheus_home'] 48 | except (IOError, KeyError) as e: 49 | raise BadStateError("config.yaml file not found or data not clean, abort") 50 | return 51 | except AssertionError as e: 52 | raise BadStateError("orpheus_home not specified in config.yaml") 53 | return 54 | except: # unknown error 55 | raise BadStateError("Unknown error during loading the config file, abort") 56 | return 57 | 58 | # extract database related info from Django setting 59 | self.config['user'] = settings.DATABASES['default']['USER'] 60 | self.config['database'] = settings.DATABASES['default']['NAME'] 61 | 62 | def get_attributes(self, dataset): 63 | conn = DatabaseManager(self.config, self.request) 64 | rel = RelationManager(conn) 65 | datatable_attributes, _ = rel.get_datatable_attribute(dataset + const.DATATABLE_SUFFIX) 66 | return ",".join(datatable_attributes) 67 | 68 | def parse(self, cmd_string, explain_btn): 69 | # Preserve the string in commit command 70 | cmd = shlex.split(cmd_string) 71 | executor = Executor(self.config, self.request) 72 | if cmd[0] != "orpheus" or len(cmd) < 2: 73 | raise CommandNotExistError(cmd) 74 | return 75 | action = cmd[1] 76 | if action == "run" and explain_btn: 77 | action = "explain" 78 | elif explain_btn: 79 | messages.error(self.request, "This command could not be executed by the \'Explain\' button") 80 | return None 81 | try: 82 | if action == "init": 83 | input_file, dataset, table_name, schema = self.__parse_init(cmd) 84 | 85 | conn = DatabaseManager(self.config, self.request) 86 | executor.exec_init(input_file, dataset, table_name, schema, conn) 87 | 88 | 89 | elif action == "checkout": 90 | dataset, vlist, to_table, to_file, delimiters, header, ignore = self.__parse_checkout(cmd) 91 | conn = DatabaseManager(self.config, self.request) 92 | executor.exec_checkout(dataset, vlist, to_table, to_file, delimiters, header, ignore, conn) 93 | 94 | elif action == "commit": 95 | message, table_name, file_name, delimiters, header = self.__parse_commit(cmd) 96 | conn = DatabaseManager(self.config, self.request) 97 | parent_name, curt_vid, parent_list = executor.exec_commit(message, table_name, file_name, delimiters, header, conn) 98 | 99 | 100 | elif action == "run": 101 | sql = self.__parse_run(cmd) 102 | conn = DatabaseManager(self.config, self.request) 103 | attr_names, transactions = executor.exec_run(sql, conn) 104 | table_list = [] 105 | table_list.append((attr_names, transactions)) 106 | return table_list 107 | elif action == "explain": 108 | sql = self.__parse_run(cmd) 109 | conn = DatabaseManager(self.config, self.request) 110 | return executor.exec_explain(sql, conn) 111 | elif action == "drop": 112 | dataset = self.__parse_drop(cmd) 113 | conn = DatabaseManager(self.config, self.request) 114 | executor.exec_drop(dataset, conn) 115 | 116 | elif action == "show": 117 | dataset = self.__parse_show(cmd) 118 | conn = DatabaseManager(self.config, self.request) 119 | return executor.exec_show(dataset, conn) 120 | elif action == "restore": 121 | conn = DatabaseManager(self.config, self.request) 122 | executor.exec_restore(conn) 123 | else: 124 | raise CommandNotExistError(cmd_string) 125 | return 126 | except Exception as e: 127 | messages.error(self.request, str(e)) 128 | return None 129 | # TODO: This simple parser does not detect invalid optional tags. 130 | # E.g. -z schema => no schema file detected. Rather, it should print out error message, no -z option 131 | 132 | # Init command 133 | # Required args: input, dataset, optional args: -t table, -s schema 134 | def __parse_init(self, cmd): 135 | try: 136 | input_file, dataset, table_name, schema = cmd[2], cmd[3], None, None 137 | if '-t' in cmd: 138 | table_name = cmd[cmd.index('-t') + 1] 139 | if '-s' in cmd: 140 | schema = cmd[cmd.index('-s') + 1] 141 | except Exception as e: 142 | raise CommandInvalidError(' '.join(cmd)) 143 | return 144 | return input_file, dataset, table_name, schema 145 | 146 | # checkout command 147 | # required argument: dataset, vlist, optional args: to_table, to_file, delimiters, header, ignore 148 | def __parse_checkout(self, cmd): 149 | try: 150 | dataset, vlist = cmd[2], [] 151 | to_table, to_file, delimiters, header, ignore = None, None, ',', False, False 152 | vlist_indices = [i for i, x in enumerate(cmd) if x == '-v'] 153 | for i in vlist_indices: 154 | vlist.append(str(cmd[i+1])) 155 | if '-t' in cmd: 156 | to_table = cmd[cmd.index('-t') + 1] 157 | if '-f' in cmd: 158 | to_file = cmd[cmd.index('-f') + 1] 159 | if '-d' in cmd: 160 | delimiters = cmd[cmd.index('-d') + 1] 161 | if '-h' in cmd: 162 | header = True 163 | if '--ignore' in cmd: 164 | ignore = True 165 | except Exception as e: 166 | raise CommandInvalidError(' '.join(cmd)) 167 | return 168 | return dataset, vlist, to_table, to_file, delimiters, header, ignore 169 | 170 | # Commit command 171 | # Required argument: -m message, optional argument: -t table_name, -f file_name, -d delimiters, -h header 172 | def __parse_commit(self, cmd): 173 | try: 174 | message = cmd[cmd.index('-m') + 1] 175 | table_name, file_name, delimiters, header = None, None, ',', False 176 | if '-t' in cmd: 177 | table_name = cmd[cmd.index('-t') + 1] 178 | if '-f' in cmd: 179 | file_name = cmd[cmd.index('-f') + 1] 180 | if '-d' in cmd: 181 | delimiters = cmd[cmd.index('-d') + 1] 182 | if '-h' in cmd: 183 | header = True 184 | except Exception as e: 185 | raise CommandInvalidError(' '.join(cmd)) 186 | return 187 | return message, table_name, file_name, delimiters, header 188 | 189 | def __init_vGraph(): 190 | graph = VersionGraph(self.config, self.request) 191 | try: 192 | graph.init_vGraph_json(dataset, 1) # init vid = 1 193 | except Exception as e: 194 | graph.delete_vGraph_json(dataset) 195 | raise Exception 196 | return 197 | # TODO: What about schema? Automation or specified by user? 198 | 199 | 200 | # Drop command 201 | # Required argument: dataset 202 | def __parse_drop(self, cmd): 203 | try: 204 | dataset = cmd[2] 205 | except Exception as e: 206 | raise CommandInvalidError(' '.join(cmd)) 207 | return 208 | return dataset 209 | 210 | def __parse_run(self, cmd): 211 | try: 212 | sql = cmd[2] 213 | except Exception as e: 214 | raise CommandInvalidError(' '.join(cmd)) 215 | return 216 | return sql 217 | 218 | # The func is the same as __parse_drop 219 | def __parse_show(self, cmd): 220 | try: 221 | dataset = cmd[2] 222 | except Exception as e: 223 | raise CommandInvalidError(' '.join(cmd)) 224 | return 225 | return dataset 226 | -------------------------------------------------------------------------------- /orpheus/clt/db.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import logging 3 | import click 4 | import psycopg2 5 | import sys 6 | import json 7 | 8 | from orpheus.core.orpheus_sqlparse import SQLParser 9 | from orpheus.core.orpheus_exceptions import BadStateError, NotImplementedError, BadParametersError 10 | import orpheus.core.orpheus_const as const 11 | from orpheus.core.db import UserNotSetError, ConnectionError, OperationError, DatasetExistsError, SQLSyntaxError 12 | 13 | class DatabaseManager(): 14 | def __init__(self, config): 15 | # yaml config passed from ctx 16 | try: 17 | self.verbose = False 18 | self.connect = None 19 | self.cursor = None 20 | self.config = config 21 | logging.basicConfig(filename=config['log_path'], format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S ') 22 | self.user_log = open(config['user_log'], 'a') 23 | self.home = config['orpheus_home'] 24 | self.currentDB = config['database'] 25 | self.user = config['user'] 26 | self.password = config['passphrase'] 27 | self.connect_str = "host=" + self.config['host'] + " port=" + str(self.config['port']) + " dbname=" + self.currentDB + " user=" + self.user + " password=" + self.password 28 | self.connect_db() 29 | except KeyError as e: 30 | raise BadStateError("context missing field %s, abort" % e.args[0]) 31 | 32 | 33 | def connect_db(self): 34 | print "Connecting to the database [%s] ..." % self.currentDB 35 | try: 36 | if self.verbose: 37 | click.echo('Trying to connect to %s' % (self.currentDB)) 38 | logging.info('Trying to connect to %s' % (self.currentDB)) 39 | self.connect = psycopg2.connect(self.connect_str) 40 | self.cursor = self.connect.cursor() 41 | except psycopg2.OperationalError as e: 42 | logging.error('%s is not open' % (self.currentDB)) 43 | # click.echo(e, file=sys.stderr) 44 | raise ConnectionError("Cannot connect to the database [%s] @ [%s]:[%s]. Check connection, username, password and database name." % (self.currentDB, self.config['host'], self.config['port'])) 45 | return self 46 | 47 | def execute_sql(self, sql): 48 | try: 49 | self.cursor.execute(sql) 50 | if SQLParser.is_select(sql): #return records 51 | colnames = [desc[0] for desc in self.cursor.description] 52 | print ', '.join(colnames) 53 | for row in self.cursor.fetchall(): 54 | print ', '.join(str(e) for e in row) 55 | else: 56 | print self.cursor.statusmessage 57 | self.connect.commit() # commit UPDATE/INSERT messages 58 | 59 | except psycopg2.ProgrammingError: 60 | raise SQLSyntaxError() 61 | 62 | def refresh_cursor(self): 63 | self.connect = psycopg2.connect(self.connect_str) 64 | self.cursor = self.connect.cursor() 65 | 66 | 67 | # schema is a list of tuple of (attribute_name, attribute_type) as string 68 | def create_dataset(self, inputfile, dataset, schema, header=False, attributes=None): 69 | self.refresh_cursor() 70 | print "Creating the dataset [%s] to the database [%s] ..." % (dataset, self.currentDB) 71 | # create a schema (in postgres) to store user specific information 72 | try: 73 | self.cursor.execute("CREATE SCHEMA IF NOT EXISTS %s ;" % self.user) 74 | self.cursor.execute("CREATE TABLE IF NOT EXISTS %s (dataset_name text primary key);" % (self.user + '.datasets')) 75 | except psycopg2.ProgrammingError: 76 | # this is ok since table has been created before 77 | self.refresh_cursor() 78 | 79 | 80 | try: 81 | # add current dataset name into user.datasets 82 | self.cursor.execute("INSERT INTO %s values('%s');" % (self.user + '.datasets', dataset)) 83 | except psycopg2.IntegrityError: # happens when inserting duplicate key 84 | raise DatasetExistsError(dataset, self.user) 85 | return 86 | 87 | try: 88 | # for each dataset, create 3 tables 89 | # dataset_datatable, which includes all records, rid as PK, based on schema 90 | # dataset_version, which keep track of all version information, like version 91 | # dataset_indexTbl, which includes all the vid and rid mapping, like indexTbl 92 | 93 | 94 | if '.csv' not in inputfile: 95 | # TODO: finish other input later 96 | raise NotImplementedError("Loading other than CSV file not implemented!") 97 | return 98 | 99 | if not attributes: 100 | raise NotImplementedError("Attributes inferreing not implemented!") 101 | return 102 | 103 | # create cvd into public schema 104 | #TODO: change to private schema in later version 105 | 106 | print "Creating the data table using the schema provided ..." 107 | # create datatable 108 | self.cursor.execute("CREATE TABLE %s (rid serial primary key, \ 109 | %s);" % (const.PUBLIC_SCHEMA + dataset + const.DATATABLE_SUFFIX, ",".join(map(lambda (attribute_name, attribute_type) : attribute_name + " " + attribute_type, schema)))) 110 | 111 | print "Creating the version table ..." 112 | # create version table 113 | self.cursor.execute("CREATE TABLE %s(vid int primary key, \ 114 | author text, \ 115 | num_records int, \ 116 | parent integer[], \ 117 | children integer[], \ 118 | create_time timestamp, \ 119 | commit_time timestamp, \ 120 | commit_msg text);" % (const.PUBLIC_SCHEMA + dataset + const.VERSIONTABLE_SUFFIX)) 121 | 122 | print "Creating the index table ..." 123 | # create indexTbl table 124 | self.cursor.execute("CREATE TABLE %s (vid int primary key, \ 125 | rlist integer[]);" % (const.PUBLIC_SCHEMA + dataset + const.INDEXTABLE_SUFFIX)) 126 | 127 | # dump data into this dataset 128 | file_path = self.config['orpheus_home'] + inputfile 129 | if header: 130 | self.cursor.execute("COPY %s (%s) FROM '%s' DELIMITER ',' CSV HEADER;" % (const.PUBLIC_SCHEMA + dataset + const.DATATABLE_SUFFIX, ",".join(attributes), file_path)) 131 | else: 132 | self.cursor.execute("COPY %s (%s) FROM '%s' DELIMITER ',' CSV;" % (const.PUBLIC_SCHEMA + dataset + const.DATATABLE_SUFFIX, ",".join(attributes), file_path)) 133 | 134 | 135 | self.connect.commit() 136 | except Exception as e: 137 | raise OperationError() 138 | return 139 | 140 | def drop_dataset(self, dataset): 141 | self.refresh_cursor() 142 | # TODO: refactor for better approach? 143 | try: 144 | self.cursor.execute("DROP table %s;" % (const.PUBLIC_SCHEMA + dataset + const.DATATABLE_SUFFIX)) 145 | self.connect.commit() 146 | except: 147 | self.refresh_cursor() 148 | 149 | try: 150 | self.cursor.execute("DROP table %s;" % (const.PUBLIC_SCHEMA + dataset + const.VERSIONTABLE_SUFFIX)) 151 | self.connect.commit() 152 | except: 153 | self.refresh_cursor() 154 | 155 | try: 156 | self.cursor.execute("DROP table %s;" % (const.PUBLIC_SCHEMA + dataset + const.INDEXTABLE_SUFFIX)) 157 | self.connect.commit() 158 | except: 159 | self.refresh_cursor() 160 | try: 161 | self.cursor.execute("DELETE from %s where dataset_name = '%s';" % (self.user + ".datasets", dataset)) 162 | self.connect.commit() 163 | except: 164 | self.refresh_cursor() 165 | 166 | self.connect.commit() 167 | return 168 | 169 | def list_dataset(self): 170 | self.refresh_cursor() 171 | try: 172 | self.cursor.execute("SELECT * from %s;" % (self.user + '.datasets')) 173 | return [x[0] for x in self.cursor.fetchall()] 174 | except psycopg2.ProgrammingError: 175 | raise BadStateError("No dataset has been initialized before, try init first") 176 | return 177 | 178 | def show_dataset(self, dataset): 179 | self.refresh_cursor() 180 | raise NotImplementedError("Show a specified dataset not implemented!") 181 | return 182 | 183 | 184 | @classmethod 185 | def load_config(cls): 186 | try: 187 | with open('config.yaml', 'r') as f: 188 | obj = yaml.load(f) 189 | except IOError: 190 | raise BadStateError("config.yaml file not found or data not clean, abort") 191 | return None 192 | return obj 193 | 194 | @classmethod 195 | def create_user(cls, user, password, db): 196 | # Create user in the database 197 | # Using corresponding SQL or prostegres commands 198 | # Set one-time only connection to the database to create user 199 | try: 200 | server_config = cls.load_config() 201 | conn_string = "host=" + server_config['host'] + " port=" + str(server_config['port']) + " dbname=" + db 202 | connect = psycopg2.connect(conn_string) 203 | cursor = connect.cursor() 204 | # passphrase = EncryptionTool.passphrase_hash(password) 205 | cursor.execute("CREATE USER %s SUPERUSER;" % user) # TODO: add password detection later 206 | connect.commit() 207 | except psycopg2.OperationalError: 208 | raise ConnectionError("Cannot connect to %s at %s:%s" % (db, server_config['host'], str(server_config['port']))) 209 | except Exception as e: # unknown error 210 | raise e 211 | return 212 | 213 | 214 | 215 | -------------------------------------------------------------------------------- /orpheus/clt/click_entry.py: -------------------------------------------------------------------------------- 1 | import os 2 | import yaml 3 | import click 4 | 5 | from orpheus.core.executor import Executor 6 | from orpheus.core.user_control import UserManager 7 | from orpheus.core.orpheus_exceptions import BadStateError, NotImplementedError, BadParametersError 8 | from orpheus.core.orpheus_sqlparse import SQLParser 9 | from db import DatabaseManager 10 | 11 | class Context(): 12 | def __init__(self): 13 | self.config_file = 'config.yaml' 14 | if 'ORPHEUS_HOME' not in os.environ: 15 | os.environ['ORPHEUS_HOME'] = os.getcwd() 16 | self.config_path = os.environ['ORPHEUS_HOME'] + '/' + self.config_file 17 | try: 18 | with open(self.config_path, 'r') as f: 19 | self.config = yaml.load(f) 20 | 21 | assert(self.config['orpheus_home'] != None) 22 | 23 | if not self.config['orpheus_home'].endswith("/"): 24 | self.config['orpheus_home'] += "/" 25 | # if user overwrite the ORPHEUS_HOME, rewrite the enviormental parameters 26 | if 'orpheus_home' in self.config: 27 | os.environ['ORPHEUS_HOME'] = self.config['orpheus_home'] 28 | except (IOError, KeyError) as e: 29 | raise BadStateError("config.yaml file not found or data not clean, abort") 30 | return 31 | except AssertionError as e: 32 | raise BadStateError("orpheus_home not specified in config.yaml") 33 | return 34 | except: # unknown error 35 | raise BadStateError("Unknown error during loading the config file, abort") 36 | return 37 | 38 | 39 | @click.group() 40 | @click.pass_context 41 | def cli(ctx): 42 | try: 43 | ctx.obj = Context().config #Orpheus context obj 44 | user_obj = UserManager.get_current_state() 45 | for key in user_obj: 46 | ctx.obj[key] = user_obj[key] 47 | except Exception as e: 48 | click.secho(str(e), fg='red') 49 | 50 | @cli.command() 51 | @click.option('--database', prompt='Enter database name', help='Specify the database name that you want to configure to.') 52 | @click.option('--user', prompt='Enter user name', help='Specify the user name that you want to configure to.') 53 | @click.option('--password', prompt=True, hide_input=True, help='Specify the password.', default='') 54 | @click.pass_context 55 | def config(ctx, user, password, database): 56 | newctx = ctx.obj # default 57 | 58 | try: 59 | newctx['database'] = database 60 | newctx['user'] = user 61 | newctx['passphrase'] = password 62 | conn = DatabaseManager(newctx) 63 | except Exception as e: 64 | click.secho(str(e), fg='red') 65 | return 66 | 67 | try: 68 | UserManager.create_user(user, password) 69 | if UserManager.verify_credential(user, password): 70 | UserManager.create_user(user, password) 71 | from orpheus.core.encryption import EncryptionTool 72 | newctx['passphrase'] = EncryptionTool.passphrase_hash(password) 73 | UserManager.write_current_state(newctx) # pass down to user manager 74 | click.echo('Logged to the database [%s] as [%s] ' % (ctx.obj['database'],ctx.obj['user'])) 75 | except Exception as e: 76 | click.secho(str(e), fg='red') 77 | 78 | 79 | @cli.command() 80 | @click.pass_context 81 | def create_user(ctx): 82 | # check this user has permission to create new user or not 83 | # create user in UserManager 84 | if not ctx.obj['user'] or not ctx.obj['database']: 85 | click.secho("No session in use, please call config first", fg='red') 86 | return # stop the following commands 87 | 88 | user = click.prompt('Please enter user name') 89 | password = click.prompt('Please enter password', hide_input=True, confirmation_prompt=True) 90 | 91 | click.echo("Creating user into database [%s]" % ctx.obj['database']) 92 | try: 93 | DatabaseManager.create_user(user, password, ctx.obj['database']) #TODO: need revise 94 | UserManager.create_user(user, password) 95 | click.echo('User created.') 96 | except Exception as e: 97 | click.secho(str(e), fg='red') 98 | 99 | # TODO: check permission? 100 | 101 | @cli.command() 102 | @click.pass_context 103 | def whoami(ctx): 104 | if not ctx.obj['user'] or not ctx.obj['database']: 105 | click.secho("No session in use, please call config first", fg='red') 106 | return # stop the following commands 107 | 108 | click.echo('Logged to the database [%s] as [%s] ' % (ctx.obj['database'],ctx.obj['user'])) 109 | 110 | 111 | @cli.command() 112 | @click.argument('input_file', type=click.Path(exists=True)) 113 | @click.argument('dataset') 114 | @click.option('--table_name', '-t', help='Create the dataset with existing table schema') 115 | @click.option('--schema', '-s', help='Create the dataset with schema file', type=click.Path(exists=True)) 116 | @click.pass_context 117 | def init(ctx, input_file, dataset, table_name, schema): 118 | # TODO: add header support 119 | # By default, we connect to the database specified in the -config- command earlier 120 | 121 | # Two cases need to be taken care of: 122 | # 1.add version control on an outside file 123 | # 1.1 Load a csv or other format of the file into DB 124 | # 1.2 Schema 125 | # 2.add version control on a existing table in DB 126 | executor = Executor(ctx.obj) 127 | conn = DatabaseManager(ctx.obj) 128 | executor.exec_init(input_file, dataset, table_name, schema, conn) 129 | 130 | @cli.command() 131 | @click.argument('dataset') 132 | @click.pass_context 133 | def drop(ctx, dataset): 134 | if click.confirm('Are you sure you want to drop %s?' % dataset): 135 | try: 136 | conn = DatabaseManager(ctx.obj) 137 | click.echo("Dropping dataset [%s] ..." % dataset) 138 | executor = Executor(ctx.obj) 139 | executor.exec_drop(dataset, conn) 140 | except Exception as e: 141 | click.secho(str(e), fg='red') 142 | 143 | 144 | @cli.command() 145 | @click.option('--dataset', '-d', help='Specify the dataset to show') 146 | @click.option('--table_name', '-t', help='Specify the table to show') 147 | @click.pass_context 148 | def ls(ctx, dataset, table_name): 149 | # if no dataset specified, show the list of dataset the current user owns 150 | try: 151 | conn = DatabaseManager(ctx.obj) 152 | print "The current database contains the following CVDs:" 153 | if not dataset: 154 | click.echo("\n".join(conn.list_dataset())) 155 | else: 156 | click.echo(conn.show_dataset(dataset)) 157 | 158 | # when showing dataset, chop off rid 159 | except Exception as e: 160 | click.secho(str(e), fg='red') 161 | 162 | 163 | # the call back function to execute file 164 | # execute line by line 165 | def execute_sql_file(ctx, param, value): 166 | if not value or ctx.resilient_parsing: 167 | return 168 | # value is the relative path of file 169 | conn = DatabaseManager(ctx.obj) 170 | parser = SQLParser(conn) 171 | abs_path = ctx.obj['orpheus_home'] + value 172 | click.echo("Executing SQL file at %s" % value) 173 | with open(abs_path, 'r') as f: 174 | for line in f: 175 | executable_sql = parser.parse(line) 176 | #print executable_sql 177 | ctx.exit() 178 | 179 | @cli.command() 180 | @click.option('--file', '-f', callback=execute_sql_file, expose_value=False, is_eager=True, type=click.Path(exists=True)) 181 | @click.option('--sql', prompt="Input sql statement") 182 | @click.pass_context 183 | def run(ctx, sql): 184 | # TODO: add finer grained try-catch for SQLParser 185 | try: 186 | # execute_sql_line(ctx, sql) 187 | conn = DatabaseManager(ctx.obj) 188 | parser = SQLParser(conn) 189 | executable_sql = parser.parse(sql) 190 | # print executable_sql 191 | conn.execute_sql(executable_sql) 192 | 193 | except Exception as e: 194 | import traceback 195 | traceback.print_exc() 196 | click.secho(str(e), fg='red') 197 | 198 | @cli.command() 199 | @click.argument('dataset') 200 | @click.option('--vlist', '-v', multiple=True, required=True, help='Specify version you want to checkout, use multiple -v for multiple version checkout') 201 | @click.option('--to_table', '-t', help='Specify the table name to checkout to.') 202 | @click.option('--to_file', '-f', help='Specify the location of file') 203 | @click.option('--delimiters', '-d', default=',', help='Specify the delimiter used for checkout file') 204 | @click.option('--header', '-h', is_flag=True, help="If set, the first line of checkout file will be the header") 205 | @click.option('--ignore/--no-ignore', default=False, help='If set, checkout versions into table will ignore duplicated key') 206 | @click.pass_context 207 | def checkout(ctx, dataset, vlist, to_table, to_file, delimiters, header, ignore): 208 | conn = DatabaseManager(ctx.obj) 209 | executor = Executor(ctx.obj) 210 | executor.exec_checkout(dataset, vlist, to_table, to_file, delimiters, header, ignore, conn) 211 | 212 | 213 | @cli.command() 214 | @click.option('--msg','-m', help='Commit message', required = True) 215 | @click.option('--table_name','-t', help='The table to be committed') # changed to optional later 216 | @click.option('--file_name', '-f', help='The file to be committed', type=click.Path(exists=True)) 217 | @click.option('--delimiters', '-d', default=',', help='Specify the delimiters used for checkout file') 218 | @click.option('--header', '-h', is_flag=True, help="If set, the first line of checkout file will be the header") 219 | @click.pass_context 220 | def commit(ctx, msg, table_name, file_name, delimiters, header): 221 | 222 | conn = DatabaseManager(ctx.obj) 223 | executor = Executor(ctx.obj) 224 | executor.exec_commit(msg, table_name, file_name, delimiters, header, conn) 225 | 226 | @cli.command() 227 | @click.pass_context 228 | def clean(ctx): 229 | config = ctx.obj 230 | open(config['meta_info'], 'w').close() 231 | f = open(config['meta_info'], 'w') 232 | f.write('{"file_map": {}, "table_map": {}, "table_created_time": {}, "merged_tables": []}') 233 | f.close() 234 | click.echo("meta_info cleaned") 235 | open(config['meta_modifiedIds'], 'w').close() 236 | f = open(config['meta_modifiedIds'], 'w') 237 | f.write('{}') 238 | f.close() 239 | click.echo("modifiedID cleaned") 240 | -------------------------------------------------------------------------------- /orpheus/core/orpheus_sqlparse.py: -------------------------------------------------------------------------------- 1 | # this class uses the sqlparse library to extract the semantics of OrpheusDB SQL statement 2 | 3 | import sqlparse, re 4 | from sqlparse.sql import Identifier, Token, Where 5 | from sqlparse.tokens import DML 6 | 7 | import orpheus_const as const 8 | from relation import RelationManager 9 | from collections import defaultdict 10 | 11 | class InvalidSyntaxError(Exception): 12 | def __init__(self, statement): 13 | self.statement = statement 14 | def __str__(self): 15 | return "Error parsing '%s'" % self.statement 16 | 17 | 18 | class SQLParser(object): 19 | 20 | def __init__(self, conn): 21 | self.conn = conn 22 | self.relation = RelationManager(self.conn) 23 | self.reserved_column_names = ['cvd'] 24 | 25 | def construct_identifier(self, content): 26 | return Identifier([Token('', content)]) 27 | 28 | def get_fields_mapping(self, attributes): 29 | # mapping from attribute name to corresponding table 30 | # by default, d = datatable, i = indextable, v = versiontable 31 | fields_mapping = {'vid' : 'i'} 32 | for attribute in attributes: 33 | fields_mapping[attribute] = 'd' 34 | 35 | versiontable_attributes = ["author", "num_records", "parent", "children", "create_time", "commit_time", "commit_msg"] 36 | # take in version table attributes 37 | for version_attribute in versiontable_attributes: 38 | fields_mapping[version_attribute] = 'v' 39 | 40 | return fields_mapping 41 | 42 | def get_touched_table(self, touched_columns, fields_mapping): 43 | touched_table = set() 44 | for column in touched_columns.keys(): 45 | try: 46 | touched_table.add(fields_mapping[column]) 47 | except KeyError: 48 | pass # user defined alias 49 | return touched_table 50 | 51 | # anything in this parsed statement 52 | def get_touched_column_names(self, parent, stop_words=set()): 53 | tokens = parent.flatten() 54 | column_names = defaultdict(list) 55 | for token in tokens: 56 | if token.ttype == sqlparse.tokens.Name: 57 | # this is a column 58 | column_value = token.value 59 | if column_value not in stop_words: 60 | token_parent = token.parent 61 | token_index = token_parent.token_index(token) 62 | column_names[column_value].append((token_parent, token_index)) 63 | return column_names 64 | 65 | # return replaced from clause 66 | def get_from_clause(self, dataset_name, touched_table): 67 | # rule based ! 68 | datatable = dataset_name + const.DATATABLE_SUFFIX 69 | indextable = dataset_name + const.INDEXTABLE_SUFFIX 70 | versiontable = dataset_name + const.VERSIONTABLE_SUFFIX 71 | if 'd' in touched_table and 'i' in touched_table: 72 | return "%s, %s" % (datatable + ' d', indextable + ' i') 73 | elif 'v' in touched_table and 'i' in touched_table: 74 | return "%s, %s" % (versiontable + ' v', indextable + ' i') 75 | elif 'd' in touched_table and len(touched_table) == 1: # meaning there is only datatable attributes are touched 76 | return "%s" % datatable + ' d' 77 | elif 'v' in touched_table and len(touched_table) == 1: # meaning there is only versiontable attributes are touched 78 | return "%s" % versiontable + ' v' 79 | else: 80 | return "%s, %s, %s" % (versiontable + ' v', indextable + ' i', datatable + ' d') 81 | 82 | 83 | def get_where_clause(self, touched_table): 84 | # rule based! 85 | if 'd' in touched_table and 'i' in touched_table: 86 | return "d.rid = ANY(i.rlist)" 87 | else: 88 | return None 89 | 90 | 91 | # return the first occurrence of versions (1,2), OF cvd (ds1) 92 | def get_dataset_name_and_versions(self, parent): 93 | tokens = list(parent.flatten()) 94 | parent, dataset_name, version_idx, vlist = None, None, None, None 95 | for i,token in enumerate(tokens): 96 | if token.value == 'version': 97 | parent = token.parent 98 | while type(parent) != sqlparse.sql.Parenthesis and type(parent) != sqlparse.sql.Statement: 99 | # stops when we find a handle to either () or statement 100 | token = parent 101 | parent = parent.parent # traverse up tree 102 | version_idx = parent.token_index(token) 103 | break 104 | 105 | vlist = parent.tokens[version_idx + 2].value 106 | dataset_name = parent.tokens[version_idx + 6].value.split()[-1] 107 | return vlist, dataset_name, parent, version_idx 108 | 109 | # find the first occurrence of CVD, return the name of CVD, its handle and index 110 | def find_cvd_handle(self, parent): 111 | tokens = list(parent.flatten()) 112 | parent, dataset_name, cvd_index = None, None, None 113 | for i,token in enumerate(tokens): 114 | if token.value == 'cvd': 115 | # found the clause, need to find its parent handle 116 | parent = token.parent 117 | dataset_name = tokens[i + 2].value 118 | while type(parent) != sqlparse.sql.Parenthesis and type(parent) != sqlparse.sql.Statement: 119 | # stops when we find a handle to either () or statement 120 | token = parent 121 | parent = parent.parent # traverse up tree 122 | cvd_index = parent.token_index(token) 123 | break 124 | return dataset_name, parent, cvd_index 125 | 126 | @classmethod 127 | def is_select(cls, raw_sql): 128 | parsed = sqlparse.parse(raw_sql)[0] 129 | item = parsed.tokens[0] 130 | if item.ttype is DML and item.value.upper() == 'SELECT': 131 | return True 132 | return False 133 | 134 | 135 | 136 | # find the Where clause index under parent.tokens 137 | def find_where_index(self, parent): 138 | lis = parent.tokens 139 | for i, token in enumerate(lis): 140 | if type(token) is sqlparse.sql.Where: 141 | return i 142 | return -1 143 | 144 | # find the place in parent to insert WHERE clause 145 | def find_where_insert(self, parent): 146 | lis = parent.tokens 147 | for i,token in enumerate(lis): 148 | if token.value == 'group' or token.value == 'order' or token.value == 'limit': 149 | return i - 1 # anything that before group by or order by, -1 for the space 150 | return len(lis) if lis[-1].value != ')' and lis[-1].value != ';' else len(lis) - 1 151 | 152 | 153 | # version known, replace the tokens 154 | def replace_known_version(self, dataset_name, vlist, parent, version_idx): 155 | # if parent has where, append a new where 156 | datatable = dataset_name + const.DATATABLE_SUFFIX 157 | indextable = dataset_name + const.INDEXTABLE_SUFFIX 158 | rlist = self.relation.select_records_of_version_list(vlist.split(','), indextable) 159 | constraint = "rid = ANY('%s'::int[])" % rlist 160 | 161 | # replace the FROM clause 162 | parent.tokens = parent.tokens[:version_idx] + [self.construct_identifier(datatable)] + parent.tokens[version_idx + 7:] 163 | 164 | # replace the WHERE clause 165 | where_indx = self.find_where_index(parent) 166 | if where_indx < 0: 167 | new_idex = self.find_where_insert(parent) # find the place to insert new where 168 | parent.insert_before(new_idex, self.construct_identifier(" where " + constraint)) 169 | else: 170 | where_token = parent.tokens[where_indx] 171 | where_token = parent.tokens[where_indx] 172 | new_idex = self.find_where_insert(where_token) 173 | where_token.insert_before(new_idex, self.construct_identifier(" and " + constraint)) 174 | 175 | 176 | def replace_unknown_version(self, parent, cvd_idx, dataset_name, fields_mapping, touched_column_names): 177 | # find touched tables 178 | touched_table = self.get_touched_table(touched_column_names, fields_mapping) 179 | table_constraint = self.get_from_clause(dataset_name, touched_table) 180 | 181 | # replace the from clause 182 | parent.tokens = parent.tokens[:cvd_idx] + [self.construct_identifier(table_constraint)] + parent[cvd_idx+1:] 183 | 184 | where_constraint = self.get_where_clause(touched_table) 185 | 186 | # replace the where clause if needed 187 | if where_constraint: 188 | where_indx = self.find_where_index(parent) 189 | if where_indx < 0: 190 | # no where, needs to add 191 | new_idex = self.find_where_insert(parent) # find the place to insert new where 192 | parent.insert_before(new_idex, self.construct_identifier(" where " + where_constraint)) 193 | else: 194 | where_token = parent.tokens[where_indx] 195 | where_token.tokens.extend(self.construct_identifier(" and " + where_constraint + " ")) 196 | 197 | # print touched_column_names 198 | 199 | # replace all the touched columns by prefix a alias 200 | for column in touched_column_names.keys(): 201 | for (column_parent, column_idx) in touched_column_names[column]: 202 | if column in fields_mapping: # only those we found in tables 203 | if '.' in column_parent.value: 204 | continue 205 | # replace them 206 | mapped_table_alias = fields_mapping[column] 207 | column_parent.tokens = column_parent.tokens[:column_idx] + [self.construct_identifier("%s.%s" % (mapped_table_alias, column))] + column_parent.tokens[column_idx + 1:] 208 | 209 | 210 | 211 | # main method to parse 212 | # all char casted to lower case 213 | def parse(self, raw_sql): 214 | relation = RelationManager(self.conn) 215 | line = raw_sql.lower() 216 | try: 217 | while 1: 218 | # two cases 219 | # 1. version is specified, version 1,2 from cvd ds1 220 | # 2. version is not specified, from CVD 221 | # TODO: add more cases? 222 | version_specified_re = re.compile('.*?from\sversion\s(\d+|\d+(,\d+)+)\sof\scvd\s(\w+);?') 223 | version_matched = version_specified_re.match(line) 224 | if version_matched: # found case 1 225 | # vlist = version_matched.group(1) # list of version separted by comma 226 | # dataset_name = version_matched.group(3) # whatever after keyword CVD 227 | parsed_statement = sqlparse.parse(line)[0] 228 | vlist, dataset_name, parent, version_idx = self.get_dataset_name_and_versions(parsed_statement) 229 | self.replace_known_version(dataset_name, vlist, parent, version_idx) 230 | line = str(parsed_statement) 231 | continue 232 | version_unknown_re = re.compile('.*from\scvd\s(\w+);?') 233 | version_unknown_matched = version_unknown_re.match(line) 234 | if version_unknown_matched: # found case 2 235 | parsed_statement = sqlparse.parse(line)[0] 236 | dataset_name, parent, cvd_idx = self.find_cvd_handle(parsed_statement) 237 | 238 | datatable_attributes, _ = self.relation.get_datatable_attribute(dataset_name + const.DATATABLE_SUFFIX) 239 | 240 | # get the mapping from each field to alias 241 | fields_mapping = self.get_fields_mapping(datatable_attributes) 242 | 243 | #print fields_mapping 244 | 245 | touched_column_names = self.get_touched_column_names(parent, stop_words=set(self.reserved_column_names + [dataset_name])) 246 | 247 | # print touched_column_names 248 | 249 | self.replace_unknown_version(parent, cvd_idx, dataset_name, fields_mapping, touched_column_names) 250 | 251 | line = str(parsed_statement) 252 | continue 253 | 254 | # either no keyword found or all resolved 255 | break 256 | # print parsed_statement 257 | return line 258 | 259 | except: 260 | import traceback 261 | traceback.print_exc() 262 | raise InvalidSyntaxError(raw_sql) 263 | return -------------------------------------------------------------------------------- /orpheus/interface/main/templates/main/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | {% load static %} 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | OrpheusDB Demonstration 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 34 | 35 | 36 | 37 | 38 | 59 | 60 |
61 |
62 | 98 |
99 |

Command Input

100 |
101 |
{% csrf_token %} 102 | 103 | 104 |

Please enter either the SQL or the version control command below:

105 |
106 | 107 | 108 |
109 |
110 | 111 | 112 |

Output Results

113 |
114 | 115 | {% if messages %} 116 |
    117 | {% for message in messages %} 118 | 119 | 124 | 125 | 126 | {% if message.tags == 'success' %} 127 |
     {{ message| linebreaks }} 
    128 | {% else %} 129 |
  • {{ message| linebreaks }}
  • 130 | {% endif %} 131 | 132 | {% endfor %} 133 |
134 | {% endif %} 135 | 136 | {% for attr_names, transactions in table_list %} 137 | 138 | {% if attr_names %} 139 | 140 | 141 | {% for attr in attr_names %} 142 | 143 | {% endfor %} 144 | 145 | 146 | {% endif %} 147 | 148 | {% if transactions %} 149 | 150 | {% for row in transactions %} 151 | 152 | {% for col in row %} 153 | 154 | {% endfor %} 155 | 156 | {% endfor %} 157 | 158 | {% endif %} 159 | 160 |
{{attr}}
{{col}}
161 | {% endfor %} 162 | 163 |
164 |
165 | 166 |
167 |

Version Visualization

168 | 169 | {% include "vGraph.html" %} 170 | 171 | {% if vGraph_json %} 172 | 173 | 177 | {% endif %} 178 | 179 |
180 | 181 |
182 |
{% csrf_token %} 183 | 184 | 193 | 194 |
195 |
196 |
197 |
198 | 199 | 200 | 201 | 202 | 203 |
{% csrf_token %} 204 | 205 | 206 | 207 | 208 | 210 | 211 | 212 | 213 |
214 | 215 |
216 |
217 |
218 | 219 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OrpheusDB: Bolt-On Versioning for Relational Databases 2 | [OrpheusDB][orpheus] is a hosted system that supports _relational dataset version management_. OrpheusDB is built on top of standard relational databases, thus it inherits much of the same benefits of relational databases, while also compactly storing, tracking, and recreating versions on demand, all very efficiently. 3 | 4 | OrpheusDB is built using [PostgreSQL][postgressite] and [Click][clicksite], a command line tool written in Python. Our current version supports advanced querying capabilities, using both the git-style version control commands, as well as SQL queries on one or more dataset versions. The paper describing the design, functionality, optimization, and performance evaluation can be found [at this link][papersite]. 5 | 6 | OrpheusDB is a multi-year project, supported by the National Science Foundation via award number 1513407. It shares the vision of the [DataHub][datahub] project in supporting collaborative data analytics. 7 | 8 | 9 | 10 | 11 | 12 | ### Version 13 | The current version is 1.0.0 (Released January 1, 2017). 14 | 15 | 16 | ### Key Design Innovations 17 | * OrpheusDB is built on top of a traditional relational database, thus it inherits all of the standard benefits of relational database systems "for free" 18 | * OrpheusDB supports advanced querying and versioning capabilities, via both SQL queries and git-style version control commands. 19 | * OrpheusDB uses a sophisticated data model, coupled with partition optimization algorithms1, to provide efficient version control performance over large-scale datasets. 20 | 21 | ### Dataset Version Control in OrpheusDB 22 | The fundamental unit of storage within OrpheusDB is a _collaborative versioned dataset (CVD)_ to which one or more users can contribute, 23 | representing a collection of versions of a single relational dataset, with a fixed schema. There is a many-to-many relationship between records in the relation and versions that are captured within the CVD: each record can belong to many versions, and each version can contain many records. 24 | 25 | 26 | Users can operate on CVDs much like they would with source code version control. The _checkout_ command allows users to materialize one or more specific versions of a CVD as a newly created regular table within a relational database or as a csv file; the _commit_ command allows users to add a new version to a CVD by making the local changes made by the user on their materialized table or on their exported csv file visible to others. Other git-style commands we support include _init_, _create\_user_, _config_, _whoami_, _ls_, _drop_, and _optimize_. 27 | 28 | Users can also execute SQL queries on one or more relational dataset versions within a CVD via the command line using the _run_ command, without requiring the corresponding dataset versions to be materialized. Beyond executing queries on a small number of versions, users can also apply aggregation grouped by version ids, or identify versions that satisfy some property. 29 | 30 | 31 | 32 | ### Data Model 33 | Each CVD in OrpheusDB corresponds to three underlying relational tables: the _data_ table, the _index_ table, and the _version_ table. To capture dataset versions, we represent the records of a dataset in the _data_ table and mapping between versions and records in the _index_ table. Finally, we store version-level provenance information in the _version_ table, including attributes such as `author`, `num_records`, `parent`, `children`, `create_time`, `commit_time`, and `commit_msg`. 34 | 35 | 36 | 37 | Our experimental evaluation demonstrates that, compared to other alternative data models, our data model, coupled with the partition optimizer results in **10x** less storage consumption, **1000x** less time for _commit_ and comparable query performance for the _checkout_ command. In other words, OrpheusDB acheives an efficient balance between storage consumption and query latencies. 38 | 39 | ### System Requirement 40 | OrpheusDB requires the following software to be installed successfully prior to setup: 41 | * Python 2.7.x 42 | * PostgreSQL >= 9.5 43 | 44 | ### Installation Instructions 45 | OrpheusDB comes with a standard `setup.py` script for installation. The required python dependency packages include 46 | * click >= 6.6 47 | * psycopy2 >= 2.6.2 48 | * pandas >= 0.19.0 49 | * pyyaml >= 3.12 50 | * pyparsing >=2.1.1 51 | * sqlparse >= 0.2.2 52 | 53 | Users are able to install any of missing dependencies themselves via `pip`. Alternatively, an easier way to install all the requisite dependencies is via `pip install .` (If you encounter permission errors, install via `sudo -H pip install .`) 54 | 55 | After installation, users can use `orpheus --help` to list all the available commands in OrpheusDB. By default, `orpheus` is the alias for OrpheusDB user interface. 56 | 57 | 63 | 64 | ### Configuration 65 | To start with, users need to install PostgresSQL successfully. (A tutorial of installing PostgresSQL on Mac OSX can be found [here][postgres-installation].) After installing, and then starting PostgresSQL (e.g., via `pg_ctl`), users can call `createdb` to create a new database with a new username and password, all under the current user login. Remember the username and password, the parameters of the new database, and other details of the PostgreSQL setup. Once the configuration is complete, edit the appropriate entries in the file `config.yaml`. 66 | 67 | ### User Tutorials 68 | To start working on versioned datasets, users need to run `orpheus config` to set up OrpheusDB for the given user. To start off, use ths same username that was used during the PostgreSQL configuration -- this will initialize a OrpheusDB user with the same username. Following that, users can create new OrpheusDB usernames via the `create_user` command. Upon finishing, this new username will be pushed to the underlying data storage with a SUPERUSER privilege. Command `config` can also be used to login through created username and `whoami` is used to list the current username that is currently logged in. 69 | 70 | Please note here that OrpheusDB provides the most basic implementation for user information, i.e. there is no password protection. However, this feature is subject to change in future versions. 71 | ``` 72 | orpheus config 73 | orpheus create_user 74 | orpheus whoami 75 | ``` 76 | 77 | The `init` command provides a mechanism to load a csv file into OrpheusDB as a CVD, with all the records as its first version (i.e., vid = 1). To let OrpheusDB know what is the schema for this dataset, user can provide a sample schema file through option `-s`. Each line in the schema file has the format `, `. In the following example, `data.csv` file contains 3 attributes, namely `age`, `employee_id` and `salary`. The command below loads the `data.csv` file into OrpheusDB as a CVD named `dataset1`, whose schema is indicated in the file ``sample_schema.csv`. 78 | 79 | 80 | 81 | ``` 82 | orpheus init test/data.csv dataset1 -s test/sample_schema.csv 83 | ``` 84 | 85 | User can checkout one or more desired versions through the `checkout` command, to either a csv file or a structured table in RDBMS. In the following example, version 1 of CVD `dataset1` is checked out as a csv file named `checkout.csv`. 86 | ``` 87 | orpheus checkout dataset1 -v 1 -f checkout.csv 88 | ``` 89 | 90 | After changes are made to the previous checkout versions, OrpheusDB can commit these changes to its corresponding CVD assuming that the schema is unchanged. 91 | 92 | In the following example, we commit the modified checkout.csv back to CVD `dataset1`. Note here that since OrpheusDB internally logged the CVD name that `checkout.csv` file was checked out from, there is no need to specify the CVD name in the `commit` command. 93 | 94 | Any changed or new records from commit file will be appended to the corresponding CVD, labeled with a new version id. A special case is the committing of a subset of a previously checked-out version. In such a setting, OrpheusDB will perform the commit as expected; the new version is added with the subset of the records. 95 | 96 | ``` 97 | orpheus commit -f checkout.csv -m 'first commit' 98 | ``` 99 | 100 | OrpheusDB also supports direct execution of queries on CVDs without materialization. This is done via the run command. The run command will prompt the user to provide the SQL command to be executed directly. If `-f` is specified, it will execute the SQL file specified. 101 | ``` 102 | orpheus run 103 | ``` 104 | 105 | OrpheusDB supports a rich syntax of SQL statements on versions and CVDs. During the execution of these steatements, OrpheusDB will detect keywords like `CVD` so it knows the query is against one or more CVDs. There are mainly the following two types of queries supported: 106 | 107 | 1. Query against known version(s) of a particular dataset 108 | 2. Query against unknown version(s) of a particular dataset 109 | 110 | To query against known version(s), the version number needs to be specified. In the following example, OrpheusDB will select the `employee_id` and `age` columns from CVD `dataset1` whose version id is equal to either `1` or `2`. 111 | ``` 112 | SELECT employee_id, age FROM VERSION 1,2 OF CVD dataset1; 113 | ``` 114 | 115 | If version number is unknown, OrpheusDB supports queries where the desired version numbers are also identified. In the following examples, OrpheusDB will select all the version ids that have one or more records whose age is less than 25. It is worth noting that the `GROUP BY` clause is required to aggregate on version numbers. 116 | ``` 117 | SELECT vid FROM CVD dataset1 WHERE age < 25 GROUP BY vid; 118 | ``` 119 | Here are a couple other examples of SQL on versions: 120 | 121 | (1). Find all versions in CVD `dataset1` that have more than 5 records where salary is larger than 7400. 122 | ``` 123 | SELECT vid FROM CVD dataset1 WHERE salary > 7400 GROUP BY vid HAVING COUNT(employee_id) > 5; 124 | ``` 125 | (2). Find all versions in CVD `dataset1` whose commit time is later than December 1st, 2016. 126 | ``` 127 | SELECT vid FROM CVD dataset1 WHERE commit_time > '2016-12-01' GROUP BY vid; 128 | ``` 129 | 130 | ### Development Plan 131 | We plan to release versions of OrpheusDB in a regular manner, adding on further 132 | querying, partitioning, and query optimization capabilities, as well as regular bug-fixes. 133 | 134 | License 135 | ---- 136 | 137 | MIT 138 | 139 | [//]: # (These are reference links used in the body of this note and get stripped out when the markdown processor does its job. There is no need to format nicely because it shouldn't be seen. Thanks SO - http://stackoverflow.com/questions/4823468/store-comments-in-markdown-syntax) 140 | 141 | [prof]: http://web.engr.illinois.edu/~adityagp/# 142 | [clicksite]: http://click.pocoo.org/5/ 143 | [orpheus]: http://orpheus-db.github.io/ 144 | [datahub]: https://arxiv.org/abs/1409.0798 145 | [postgressite]: https://www.postgresql.org/ 146 | [papersite]:http://data-people.cs.illinois.edu/papers/orpheus.pdf 147 | [postgres-installation]: https://chartio.com/resources/tutorials/how-to-start-postgresql-server-on-mac-os-x/ 148 | 1The partition optimization algorithms are not part of this release. 149 | -------------------------------------------------------------------------------- /orpheus/core/relation.py: -------------------------------------------------------------------------------- 1 | class RelationNotExistError(Exception): 2 | def __init__(self, tablename): 3 | self.name = tablename 4 | def __str__(self): 5 | return "Relation %s does not exist" % self.name 6 | 7 | class RelationOverwriteError(Exception): 8 | def __init__(self, tablename): 9 | self.name = tablename 10 | def __str__(self): 11 | return "Relation %s exists, add flag to allow overwrite" % self.name 12 | 13 | class ReservedRelationError(Exception): 14 | def __init__(self, tablename): 15 | self.name = tablename 16 | def __str__(self): 17 | return "Relation %s is a reserved name, please use a different one" % self.name 18 | 19 | class ColumnNotExistError(Exception): 20 | def __init__(self, column): 21 | self.name = column 22 | def __str__(self): 23 | return "Column %s does not exist" % self.name 24 | 25 | class RelationManager(object): 26 | def __init__(self, conn): 27 | self.conn = conn; 28 | 29 | def get_datatable_attribute(self, from_table): 30 | selectTemplate = "SELECT column_name, data_type from INFORMATION_SCHEMA.COLUMNS where table_name = '%s' and column_name NOT IN ('rid');" % (from_table) 31 | self.conn.cursor.execute(selectTemplate) 32 | _datatable_attribute_types = self.conn.cursor.fetchall() 33 | # column name 34 | _attributes = map(lambda x : str(x[0]), _datatable_attribute_types) 35 | # data type 36 | _attributes_type = map(lambda x: str(x[1]), _datatable_attribute_types) 37 | return _attributes, _attributes_type 38 | 39 | 40 | 41 | def checkout_data_print(self, vlist, datatable, indextable, projection='*', where=None): 42 | if not self.check_table_exists(datatable): 43 | raise RelationNotExistError(datatable) 44 | return 45 | # user can only see everything except rid 46 | _attributes,_attributes_type = self.get_datatable_attribute(datatable) 47 | recordlist = self.select_records_of_version_list(vlist, indextable) 48 | if projection != '*': 49 | _attributes = projection.split(',') 50 | if where: 51 | sql = "SELECT %s FROM %s WHERE rid = ANY('%s'::int[]) AND %s;" % (",".join(_attributes), datatable, recordlist, "".join(where)) 52 | else: 53 | sql = "SELECT %s FROM %s WHERE rid = ANY('%s'::int[]);" % (",".join(_attributes), datatable, recordlist) 54 | self.conn.cursor.execute(sql) 55 | #print sql 56 | return _attributes, self.conn.cursor.fetchall() 57 | 58 | def checkout_meta_print(self, versiontable, projection='*', where=None): 59 | if not self.check_table_exists(versiontable): 60 | raise RelationNotExistError(datatable) 61 | return 62 | _attributes,_attributes_type = self.get_datatable_attribute(versiontable) 63 | 64 | # TODO: need to change the where clause to match the corresponding type 65 | # for example, text -> 'text' 66 | version_type_map = {} 67 | for (a,b) in zip(_attributes, _attributes_type): 68 | version_type_map[a] = b 69 | 70 | if where: 71 | # where can be any type, need to interpreter 72 | try: 73 | where_type = version_type_map[where[0]] # the attribute to do select on 74 | except KeyError: 75 | raise ColumnNotExistError(where[0]) 76 | return 77 | where_clause = where[0] + where[1] + "'%s'" % where[2] if where_type=='text' else "".join(where) 78 | sql = "SELECT %s from %s WHERE %s;" % (projection, versiontable, where_clause) 79 | else: 80 | sql = "SELECT %s from %s;" % (projection, versiontable) 81 | self.conn.cursor.execute(sql) 82 | #print sql 83 | return _attributes, self.conn.cursor.fetchall() 84 | 85 | # to_file needs an absolute path 86 | def checkout(self, vlist, datatable, indextable, to_table=None, to_file=None, delimiters=',', header=False, ignore=False): 87 | # sanity check 88 | if to_table: 89 | if RelationManager.reserve_table_check(to_table): 90 | raise ReservedRelationError(to_table) 91 | return 92 | if self.check_table_exists(to_table): # ask if user want to overwrite 93 | if ignore: 94 | self.drop_table_force(to_table) 95 | else: 96 | raise RelationOverwriteError(to_table) 97 | return 98 | 99 | if not self.check_table_exists(datatable): 100 | raise RelationNotExistError(datatable) 101 | return 102 | 103 | _attributes,_attributes_type = self.get_datatable_attribute(datatable) 104 | recordlist = self.select_records_of_version_list(vlist, indextable) 105 | #print recordlist 106 | if to_table: 107 | self.checkout_table(_attributes, recordlist, datatable, to_table, ignore) 108 | if to_file: 109 | self.checkout_file(_attributes, recordlist, datatable, to_file, delimiters, header) 110 | 111 | self.conn.connect.commit() 112 | 113 | def checkout_file(self, attributes, ridlist, datatable, to_file, delimiters, header): 114 | # convert to a tmp_table first 115 | self.drop_table_force('tmp_table') 116 | self.checkout_table(attributes, ridlist, datatable, 'tmp_table', None) 117 | sql = "COPY %s (%s) TO '%s' DELIMITER '%s' CSV HEADER;" if header else "COPY %s (%s) TO '%s' DELIMITER '%s' CSV;" 118 | sql = sql % ('tmp_table', ','.join(attributes), to_file, delimiters) 119 | self.conn.cursor.execute(sql) 120 | 121 | 122 | # Select the records into a new table 123 | def checkout_table(self, attributes, ridlist, datatable, to_table, ignore): 124 | if not ignore: 125 | sql = "SELECT %s INTO %s FROM %s WHERE rid = ANY('%s'::int[]);" \ 126 | % (', '.join(attributes), to_table, datatable, ridlist) 127 | else: 128 | # TODO 129 | self.get_primary_key(datatable) 130 | sql = "SELECT %s INTO %s FROM %s WHERE rid = ANY('%s'::int[]);" \ 131 | % (', '.join(attributes), to_table, datatable, ridlist) 132 | #print sql 133 | self.conn.cursor.execute(sql) 134 | 135 | 136 | def drop_table(self, table_name): 137 | if not self.check_table_exists(table_name): 138 | raise RelationNotExistError(table_name) 139 | return 140 | drop_sql = "DROP TABLE %s" % table_name 141 | self.conn.cursor.execute(drop_sql) 142 | self.conn.connect.commit() 143 | 144 | 145 | def drop_table_force(self, table_name): 146 | if not self.check_table_exists(table_name): 147 | return 148 | drop_sql = "DROP TABLE %s" % table_name 149 | self.conn.cursor.execute(drop_sql) 150 | self.conn.connect.commit() 151 | 152 | def select_all_rid(self, table_name): 153 | select_sql = "SELECT rid from %s;" % table_name 154 | self.conn.cursor.execute(select_sql) 155 | return [x[0] for x in self.conn.cursor.fetchall()] 156 | 157 | def generate_complement_sql(self, table1, view_name, attributes=None): 158 | if not attributes: 159 | sql = "TABLE %s EXCEPT TABLE %s" % (table1, view_name) 160 | else: 161 | sql = "(SELECT %s from %s) EXCEPT (SELECT %s from %s)" % (','.join(attributes), table1, ','.join(attributes), view_name) 162 | return sql 163 | 164 | def create_parent_view(self, datatable, indextable, parent_vlist, view_name): 165 | plist = ",".join(parent_vlist) 166 | sql = "CREATE VIEW %s AS \ 167 | SELECT * FROM %s INNER JOIN %s ON rid = ANY(rlist) \ 168 | WHERE vid = ANY(ARRAY[%s]);" % (view_name, datatable, indextable, plist) 169 | self.conn.cursor.execute(sql) 170 | 171 | def drop_view(self, view_name): 172 | sql = "DROP VIEW IF EXISTS %s;" % view_name 173 | self.conn.cursor.execute(sql) 174 | 175 | def select_intersection_table(self, table1, view_name, join_attributes, projection='rid'): 176 | # SELECT rid FROM tmp_table INNER JOIN dataset1_datatable ON tmp_table.employee_id=dataset1_datatable.employee_id; 177 | join_clause = " AND ".join(["%s.%s=%s.%s" % (table1, attr, view_name, attr) for attr in join_attributes]) 178 | sql = "SELECT %s.%s FROM %s INNER JOIN %s on %s;" % (view_name, projection, table1, view_name, join_clause) 179 | self.conn.cursor.execute(sql) 180 | return self.conn.cursor.fetchall() 181 | 182 | def convert_csv_to_table(self, file_path, destination_table, attributes, delimiters=',', header=False): 183 | sql = "COPY %s (%s) FROM '%s' DELIMITER '%s' CSV HEADER;" % (destination_table, ",".join(attributes), file_path, delimiters) if header \ 184 | else "COPY %s (%s) FROM '%s' DELIMITER '%s' CSV;" % (destination_table, ",".join(attributes), file_path, delimiters) 185 | self.conn.cursor.execute(sql) 186 | self.conn.connect.commit() 187 | 188 | def create_relation(self,table_name): 189 | # Use CREATE SQL COMMAND 190 | print "create_relation: Under Construction." 191 | 192 | # will drop existing table to create the new table 193 | def create_relation_force(self, table_name, sample_table, sample_table_attributes=None): 194 | if self.check_table_exists(table_name): 195 | self.drop_table(table_name) 196 | if not sample_table_attributes: 197 | sample_table_attributes,_ = self.get_datatable_attribute(sample_table) 198 | # sql = "CREATE TABLE %s ( like %s including all);" % (table_name, sample_table) 199 | 200 | # an easier approach to create empty table 201 | sql = "CREATE TABLE %s AS SELECT %s FROM %s WHERE 1=2;" % (table_name, ",".join(sample_table_attributes), sample_table) 202 | self.conn.cursor.execute(sql) 203 | self.conn.connect.commit() 204 | 205 | 206 | def check_table_exists(self,table_name): 207 | # SQL to check the exisistence of the table 208 | # print "checking if table %s exists" %(table_name) 209 | sql= "SELECT EXISTS (" \ 210 | "SELECT 1 " \ 211 | "FROM information_schema.tables " \ 212 | "WHERE table_name = '%s');" % table_name 213 | # print sql 214 | self.conn.cursor.execute(sql) 215 | result = self.conn.cursor.fetchall() 216 | # print result[0][0] 217 | return result[0][0] 218 | 219 | def update_datatable(self, datatable_name, sql): 220 | _attributes, _attributes_type = self.get_datatable_attribute(datatable_name) 221 | sql = "INSERT INTO %s (%s) %s RETURNING rid;" % (datatable_name, ', '.join(_attributes), sql) 222 | self.conn.cursor.execute(sql) 223 | new_rids=[t[0] for t in self.conn.cursor.fetchall()] 224 | self.conn.connect.commit() 225 | # print new_rids 226 | return new_rids 227 | 228 | def clean(self): 229 | print "Clean: Under Construction."#???? 230 | 231 | @staticmethod 232 | def reserve_table_check(name): 233 | ''' 234 | @summary: check if name is reserved 235 | @param name: name to be checked 236 | @result: return True if it is reserved 237 | ''' 238 | # return name == 'datatable' or name == 'indextbl' or name == 'version' or name == 'tmp_table' 239 | return '_datatable' in name or '_indexTbl' in name or '_version' in name or 'orpheus' in name 240 | 241 | 242 | def select_records_of_version_list(self, vlist, indextable): 243 | targetv= ','.join(vlist) 244 | # sql = "SELECT distinct rlist FROM %s WHERE vlist && (ARRAY[%s]);" % (indextable, targetv) 245 | sql = "SELECT distinct rlist FROM %s WHERE vid = ANY(ARRAY[%s]);" % (indextable, targetv) 246 | self.conn.cursor.execute(sql) 247 | data = [','.join(map(str,x[0])) for x in self.conn.cursor.fetchall()] 248 | # data 249 | return '{' + ','.join(data) + '}' 250 | 251 | def get_primary_key(self,tablename): #this method return nothing, what you want? 252 | sql="SELECT a.attname, format_type(a.atttypid, a.atttypmod) AS data_type FROM pg_index i " \ 253 | "JOIN pg_attribute a ON a.attrelid = i.indrelid " \ 254 | "AND a.attnum = ANY(i.indkey)" \ 255 | "WHERE i.indrelid = '%s'::regclass " \ 256 | "AND i.indisprimary;"%tablename 257 | self.conn.cursor.execute(sql) 258 | #print tablename+'\'s primary key' 259 | #print self.conn.cursor.fetchall() 260 | 261 | def get_number_of_rows(self,tablename): 262 | sql = "SELECT COUNT (*) from %s" % tablename 263 | self.conn.cursor.execute(sql) 264 | result = self.conn.cursor.fetchall() 265 | # print result 266 | return result[0][0] -------------------------------------------------------------------------------- /orpheus/core/executor.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import user 3 | import json 4 | import pandas as pd 5 | import os 6 | import sqlparse 7 | 8 | from db import DatasetExistsError 9 | from relation import RelationManager, RelationNotExistError, RelationOverwriteError, ReservedRelationError 10 | from orpheus_exceptions import BadStateError, NotImplementedError, BadParametersError 11 | from orpheus_sqlparse import SQLParser 12 | from django.contrib import messages 13 | from orpheus.core.vgraph import VersionGraph 14 | from access import AccessManager 15 | from version import VersionManager 16 | from metadata import MetadataManager 17 | from user_control import UserManager 18 | from orpheus_schema_parser import Parser as SimpleSchemaParser 19 | from helper import Print 20 | import orpheus_const as const 21 | 22 | class Executor(object): 23 | def __init__(self, config, request = False): 24 | self.config = config 25 | self.request = request 26 | self.p = Print(request) 27 | 28 | def exec_init(self, input_file, dataset, table, schema, conn): 29 | try: 30 | rel = RelationManager(conn) 31 | 32 | if (not table and not schema) or (table and schema): 33 | raise BadParametersError("Need either (not both) a table or a schema file") 34 | return 35 | 36 | abs_path = self.config['orpheus_home'] + schema if schema and schema[0] != '/' else schema 37 | 38 | if table: 39 | attribute_name , attribute_type = rel.get_datatable_attribute(table) 40 | else: 41 | attribute_name , attribute_type = SimpleSchemaParser.get_attribute_from_file(abs_path) 42 | 43 | except Exception as e: 44 | import traceback 45 | traceback.print_exc() 46 | self.p.perror(str(e)) 47 | raise Exception 48 | return 49 | # at this point, we have a valid conn obj and rel obj 50 | try: 51 | # schema of the dataset, of the type (name, type) 52 | schema_tuple = zip(attribute_name, attribute_type) 53 | # create new dataset 54 | conn.create_dataset(input_file, dataset, schema_tuple, attributes=attribute_name) 55 | # get all rids in list 56 | lis_rid = rel.select_all_rid(const.PUBLIC_SCHEMA + dataset + const.DATATABLE_SUFFIX) 57 | # init version info 58 | version = VersionManager(conn, self.request) 59 | 60 | version.init_version_graph_dataset(dataset, lis_rid, self.config['user']) 61 | version.init_index_table_dataset(dataset, lis_rid) 62 | except DatasetExistsError as e: 63 | self.p.perror(str(e)) 64 | return 65 | except Exception as e: 66 | # revert back to the state before create 67 | conn.drop_dataset(dataset) 68 | self.p.perror(str(e)) 69 | return 70 | 71 | graph = VersionGraph(self.config, self.request) 72 | try: 73 | graph.init_vGraph_json(dataset, 1) # init vid = 1 74 | except Exception as e: 75 | graph.delete_vGraph_json(dataset) 76 | raise Exception 77 | return 78 | self.p.pmessage("Dataset [%s] has been created successful" % dataset) 79 | 80 | def exec_drop(self, dataset, conn): 81 | # TODO: add a popup window to confirm 82 | # E.g. if click.confirm('Are you sure you want to drop %s?' % dataset): 83 | try: 84 | 85 | conn.drop_dataset(dataset) 86 | self.p.pmessage("Dataset [%s] has been dropped." % dataset) 87 | except Exception as e: 88 | self.p.perror(str(e)) 89 | raise Exception 90 | return 91 | 92 | graph = VersionGraph(self.config, self.request) 93 | graph.delete_vGraph_json(dataset) 94 | 95 | def exec_checkout(self, dataset, vlist, to_table, to_file, delimiters, header, ignore, conn): 96 | if not to_table and not to_file: 97 | self.p.perror(str(BadParametersError("Need a destination, either a table (-t) or a file (-f)"))) 98 | return 99 | try: 100 | relation = RelationManager(conn) 101 | except Exception as e: 102 | self.p.perror(str(e)) 103 | raise Exception 104 | return 105 | abs_path = self.config['orpheus_home'] + to_file if to_file and to_file[0] != '/' else to_file 106 | try: 107 | metadata = MetadataManager(self.config, self.request) 108 | meta_obj = metadata.load_meta() 109 | datatable = dataset + const.DATATABLE_SUFFIX 110 | indextable = dataset + const.INDEXTABLE_SUFFIX 111 | relation.checkout(vlist, datatable, indextable, to_table=to_table, to_file=abs_path, delimiters=delimiters, header=header, ignore=ignore) 112 | # update meta info 113 | AccessManager.grant_access(to_table, conn.user) 114 | metadata.update(to_table, abs_path, dataset, vlist, meta_obj) 115 | metadata.commit_meta(meta_obj) 116 | if to_table: 117 | self.p.pmessage("Table %s has been cloned from version %s" % (to_table, ",".join(vlist))) 118 | if to_file: 119 | self.p.pmessage("File %s has been cloned from version %s" % (to_file, ",".join(vlist))) 120 | except Exception as e: 121 | if to_table and not (RelationOverwriteError or ReservedRelationError): 122 | relation.drop_table(to_table) 123 | if to_file: 124 | pass # delete the file 125 | self.p.perror(str(e)) 126 | raise Exception 127 | return 128 | 129 | def exec_commit(self, message, table_name, file_name, delimiters, header, conn): 130 | # sanity check 131 | if not table_name and not file_name: 132 | self.p.perror(str(BadParametersError("Need a source, either a table (-t) or a file (-f)"))) 133 | return 134 | 135 | if table_name and file_name: 136 | self.p.perror(str(NotImplementedError("Can either commit a file or a table at a time"))) 137 | return 138 | 139 | try: 140 | relation = RelationManager(conn) 141 | metadata = MetadataManager(self.config, self.request) 142 | version = VersionManager(conn, self.request) 143 | except Exception as e: 144 | self.p.perror(str(e)) 145 | raise Exception 146 | return 147 | if table_name and not relation.check_table_exists(table_name): 148 | self.p.perror(str(RelationNotExistError(table_name))) 149 | raise Exception 150 | return 151 | # load parent information about the table 152 | # We need to get the derivation information of the committed table; 153 | # Otherwise, in the multitable scenario, we do not know which datatable/version_graph/index_table 154 | # that we need to update information. 155 | try: 156 | abs_path = self.config['orpheus_home'] + file_name if file_name else self.config['orpheus_home'] 157 | parent_vid_list = metadata.load_parent_id(table_name) if table_name else metadata.load_parent_id(abs_path, mapping='file_map') 158 | self.p.pmessage("Parent dataset is %s " % parent_vid_list[0]) 159 | self.p.pmessage("Parent versions are %s " % ",".join(parent_vid_list[1])) 160 | except Exception as e: 161 | self.p.perror(str(e)) 162 | raise Exception 163 | return 164 | parent_name = parent_vid_list[0] 165 | parent_list = parent_vid_list[1] 166 | 167 | datatable_name = parent_name + const.DATATABLE_SUFFIX 168 | indextable_name = parent_name + const.INDEXTABLE_SUFFIX 169 | graph_name = parent_name + const.VERSIONTABLE_SUFFIX 170 | try: 171 | # convert file into tmp_table first, then set the table_name to tmp_table 172 | if file_name: 173 | # need to know the schema for this file 174 | _attributes, _attributes_type = relation.get_datatable_attribute(datatable_name) 175 | 176 | relation.create_relation_force('tmp_table', datatable_name, sample_table_attributes=_attributes) # create a tmp table 177 | relation.convert_csv_to_table(abs_path, 'tmp_table', _attributes , delimiters=delimiters, header=header) # push everything from csv to tmp_table 178 | table_name = 'tmp_table' 179 | except Exception as e: 180 | self.p.perror(str(e)) 181 | raise Exception 182 | return 183 | 184 | 185 | if table_name: 186 | try: 187 | _attributes, _attributes_type = relation.get_datatable_attribute(datatable_name) 188 | commit_attributes, commit_type = relation.get_datatable_attribute(table_name) 189 | if len(set(_attributes) - set(commit_attributes)) > 0: 190 | raise BadStateError("%s and %s have different attributes" % (table_name, parent_name)) 191 | view_name = "%s_view" % parent_name 192 | relation.create_parent_view(datatable_name, indextable_name, parent_list, view_name) 193 | existing_rids = [t[0] for t in relation.select_intersection_table(table_name, view_name, commit_attributes)] 194 | sql = relation.generate_complement_sql(table_name, view_name, attributes=_attributes) 195 | 196 | new_rids = relation.update_datatable(datatable_name, sql) 197 | relation.drop_view(view_name) 198 | 199 | self.p.pmessage("Found %s new records" % len(new_rids)) 200 | self.p.pmessage("Found %s existing records" % len(existing_rids)) 201 | 202 | current_version_rid = existing_rids + new_rids 203 | 204 | # it can happen that there are duplicate in here 205 | table_create_time = metadata.load_table_create_time(table_name) if table_name != 'tmp_table' else None 206 | 207 | # update version graph 208 | curt_vid = version.update_version_graph(graph_name, self.config['user'], len(current_version_rid), parent_list, table_create_time, message) 209 | 210 | # update index table 211 | version.update_index_table(indextable_name, curt_vid, current_version_rid) 212 | self.p.pmessage("Committing version %s with %s records" % (curt_vid, len(current_version_rid))) 213 | 214 | metadata.update_parent_id(table_name, parent_name, curt_vid) if table_name else metadata.update_parent_id(abs_path, parent_name, curt_vid, mapping='file_map') 215 | except Exception as e: 216 | view_name = "%s_view" % parent_name 217 | relation.drop_view(view_name) 218 | self.p.perror(str(e)) 219 | raise Exception 220 | return 221 | 222 | if relation.check_table_exists('tmp_table'): 223 | relation.drop_table('tmp_table') 224 | 225 | graph = VersionGraph(self.config, self.request) 226 | graph.update_vGraph_json(parent_name, curt_vid, parent_list) 227 | 228 | self.p.pmessage("Version %s has been committed!" % curt_vid) 229 | return parent_name, curt_vid, parent_list 230 | 231 | def exec_run(self, sql, conn): 232 | try: 233 | # execute_sql_line(ctx, sql) 234 | sqlparser = SQLParser(conn) 235 | executable_sql = sqlparser.parse(sql) 236 | return conn.execute_sql(executable_sql) 237 | except Exception as e: 238 | import traceback 239 | traceback.print_exc() 240 | messages.error(self.request, str(e)) 241 | raise Exception 242 | return 243 | 244 | def exec_explain(self, sql, conn): 245 | try: 246 | # execute_sql_line(ctx, sql) 247 | sqlparser = SQLParser(conn) 248 | executable_sql = sqlparser.parse(sql) 249 | ret_sql = sqlparse.format(executable_sql, reindent=True, keyword_case='upper') 250 | # TODO: less hacky -- success = SQL, others = other message 251 | messages.success(self.request, ret_sql) 252 | # messages.info(self.request, ret_sql, extra_tags='sql') 253 | return None 254 | except Exception as e: 255 | import traceback 256 | traceback.print_exc() 257 | messages.error(self.request, str(e)) 258 | raise Exception 259 | return 260 | 261 | # Show the underlying data structure 262 | def exec_show(self, dataset, conn): 263 | table_list = [] 264 | 265 | versiontable = dataset + const.VERSIONTABLE_SUFFIX 266 | self.__exec_show_helper(conn, versiontable, table_list) 267 | 268 | datatable = dataset + const.DATATABLE_SUFFIX 269 | self.__exec_show_helper(conn, datatable, table_list, "rid") 270 | 271 | indextable = dataset + const.INDEXTABLE_SUFFIX 272 | self.__exec_show_helper(conn, indextable, table_list) 273 | 274 | return table_list 275 | 276 | def __exec_show_helper(self, conn, table_name, table_list, pk = "vid"): 277 | sql = "SELECT * FROM %s ORDER BY %s LIMIT 4 ;" % (table_name, pk) 278 | attr_names, transactions = conn.execute_sql(sql) 279 | table_list.append((attr_names, transactions)) 280 | 281 | def exec_restore(self, conn): 282 | cvd_name = "protein_links" 283 | try: 284 | 285 | # Drop all CVDs 286 | cvd_list = CVDs.objects.values('name') 287 | for cvd in cvd_list: 288 | messages.info(self.request, "Dropping the CVD [%s] ..." % cvd['name']) 289 | self.exec_drop(cvd['name']) 290 | 291 | 292 | # Drop all private tables 293 | private_tables = PrivateTables.objects.values('name') 294 | for table in private_tables: 295 | messages.info(self.request, "Dropping the private table [%s] ..." % table['name']) 296 | sql = "DROP TABLE IF EXISTS \"%s\";" % table['name'] 297 | conn.refresh_cursor() 298 | conn.execute_sql(sql) 299 | 300 | # delete all local files 301 | private_files = PrivateFiles.objects.values('name') 302 | for file_name in private_files: 303 | messages.info(self.request, "Dropping the private file [%s]" % file_name['name']) 304 | fpath = self.config['orpheus_home'] + file_name['name'] 305 | try: 306 | os.remove(fpath) 307 | except OSError: 308 | pass 309 | 310 | # delete all tuple in privatetables and privatefiles, 311 | # and all but "protein_link" in CVDs 312 | PrivateTables.objects.all().delete() 313 | PrivateFiles.objects.all().delete() 314 | CVDs.objects.filter(~ Q(name = cvd_name)).delete() 315 | messages.info(self.request, "Cleared all Django models") 316 | 317 | # restore records from protein_links_backup 318 | datatable = const.PUBLIC_SCHEMA + cvd_name + const.DATATABLE_SUFFIX 319 | self.__exec_restore_helper(conn, datatable, True, "rid") 320 | messages.info(self.request, "Restored %s " % datatable) 321 | 322 | indextable = const.PUBLIC_SCHEMA + cvd_name + const.INDEXTABLE_SUFFIX 323 | self.__exec_restore_helper(conn, indextable) 324 | messages.info(self.request, "Restored %s " % indextable) 325 | 326 | versiontable = const.PUBLIC_SCHEMA + cvd_name + const.VERSIONTABLE_SUFFIX 327 | self.__exec_restore_helper(conn, versiontable) 328 | messages.info(self.request, "Restored %s " % versiontable) 329 | 330 | # Copy vGraph from backup 331 | vGraph_path = self.config['vGraph_json'] + "/" + cvd_name 332 | vGraph_path_backup = self.config['vGraph_json'] + "/" + cvd_name + "_backup" 333 | with open(vGraph_path_backup) as f: 334 | with open(vGraph_path, "w") as f1: 335 | for line in f: 336 | f1.write(line) 337 | f.close() 338 | f1.close() 339 | messages.info(self.request, "Restored Version Graph ") 340 | 341 | except Exception: 342 | conn.refresh_cursor() 343 | # TODO the exception message is vague 344 | raise Exception 345 | return 346 | 347 | def __exec_restore_helper(self, conn, table_name, isSerial = False, pk = "vid"): 348 | sql = "SELECT * INTO %s FROM %s;" % (table_name, table_name + "_backup") 349 | conn.refresh_cursor() 350 | conn.execute_sql(sql) 351 | if isSerial: 352 | sql = "ALTER TABLE %s ADD COLUMN %s SERIAL PRIMARY KEY;" % (table_name, pk) 353 | else: 354 | sql = "ALTER TABLE %s ADD COLUMN %s PRIMARY KEY" % (table_name, pk) 355 | conn.connect.commit() 356 | -------------------------------------------------------------------------------- /orpheus/interface/main/static/assets/js/vendor/holder.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | 3 | Holder - client side image placeholders 4 | Version 2.6.0+51ebp 5 | © 2015 Ivan Malopinsky - http://imsky.co 6 | 7 | Site: http://holderjs.com 8 | Issues: https://github.com/imsky/holder/issues 9 | License: http://opensource.org/licenses/MIT 10 | 11 | */ 12 | !function(a,b){"object"==typeof exports&&"object"==typeof module?module.exports=b():"function"==typeof define&&define.amd?define(b):"object"==typeof exports?exports.Holder=b():a.Holder=b()}(this,function(){return function(a){function b(d){if(c[d])return c[d].exports;var e=c[d]={exports:{},id:d,loaded:!1};return a[d].call(e.exports,e,e.exports,b),e.loaded=!0,e.exports}var c={};return b.m=a,b.c=c,b.p="",b(0)}([function(a,b,c){(function(b){function d(a,b,c,d){var g=e(c.substr(c.lastIndexOf(a.domain)),a);g&&f({mode:null,el:d,flags:g,engineSettings:b})}function e(a,b){for(var c={theme:y(K.settings.themes.gray,null),stylesheets:b.stylesheets,holderURL:[]},d=!1,e=String.fromCharCode(11),f=a.replace(/([^\\])\//g,"$1"+e).split(e),g=/%[0-9a-f]{2}/gi,h=f.length,i=0;h>i;i++){var j=f[i];if(j.match(g))try{j=decodeURIComponent(j)}catch(k){j=f[i]}var l=!1;if(K.flags.dimensions.match(j))d=!0,c.dimensions=K.flags.dimensions.output(j),l=!0;else if(K.flags.fluid.match(j))d=!0,c.dimensions=K.flags.fluid.output(j),c.fluid=!0,l=!0;else if(K.flags.textmode.match(j))c.textmode=K.flags.textmode.output(j),l=!0;else if(K.flags.colors.match(j)){var m=K.flags.colors.output(j);c.theme=y(c.theme,m),l=!0}else if(b.themes[j])b.themes.hasOwnProperty(j)&&(c.theme=y(b.themes[j],null)),l=!0;else if(K.flags.font.match(j))c.font=K.flags.font.output(j),l=!0;else if(K.flags.auto.match(j))c.auto=!0,l=!0;else if(K.flags.text.match(j))c.text=K.flags.text.output(j),l=!0;else if(K.flags.size.match(j))c.size=K.flags.size.output(j),l=!0;else if(K.flags.random.match(j)){null==K.vars.cache.themeKeys&&(K.vars.cache.themeKeys=Object.keys(b.themes));var n=K.vars.cache.themeKeys[0|Math.random()*K.vars.cache.themeKeys.length];c.theme=y(b.themes[n],null),l=!0}l&&c.holderURL.push(j)}return c.holderURL.unshift(b.domain),c.holderURL=c.holderURL.join("/"),d?c:!1}function f(a){var b=a.mode,c=a.el,d=a.flags,e=a.engineSettings,f=d.dimensions,h=d.theme,i=f.width+"x"+f.height;if(b=null==b?d.fluid?"fluid":"image":b,null!=d.text&&(h.text=d.text,"object"===c.nodeName.toLowerCase())){for(var l=h.text.split("\\n"),m=0;m1){var l=0,m=0,n=a.width*K.setup.lineWrapRatio,o=0;k=new e.Group("line"+o);for(var p=0;p=n||r===!0)&&(b(g,k,l,g.properties.leading),l=0,m+=g.properties.leading,o+=1,k=new e.Group("line"+o),k.y=m),r!==!0&&(j.moveTo(l,0),l+=h.spaceWidth+q.width,k.add(j))}b(g,k,l,g.properties.leading);for(var s in g.children)k=g.children[s],k.moveTo((g.width-k.width)/2,null,null);g.moveTo((a.width-g.width)/2,(a.height-g.height)/2,null),(a.height-g.height)/2<0&&g.moveTo(null,0,null)}else j=new e.Text(a.text),k=new e.Group("line0"),k.add(j),g.add(k),g.moveTo((a.width-h.boundingBox.width)/2,(a.height-h.boundingBox.height)/2,null);return d}function i(a,b,c){var d=parseInt(a,10),e=parseInt(b,10),f=Math.max(d,e),g=Math.min(d,e),h=.8*Math.min(g,f*K.defaults.scale);return Math.round(Math.max(c,h))}function j(a){var b;b=null==a||null==a.nodeType?K.vars.resizableImages:[a];for(var c=0,d=b.length;d>c;c++){var e=b[c];if(e.holderData){var f=e.holderData.flags,h=E(e);if(h){if(!e.holderData.resizeUpdate)continue;if(f.fluid&&f.auto){var i=e.holderData.fluidConfig;switch(i.mode){case"width":h.height=h.width/i.ratio;break;case"height":h.width=h.height*i.ratio}}var j={mode:"image",holderSettings:{dimensions:h,theme:f.theme,flags:f},el:e,engineSettings:e.holderData.engineSettings};"exact"==f.textmode&&(f.exactDimensions=h,j.holderSettings.dimensions=f.dimensions),g(j)}else n(e)}}}function k(a){if(a.holderData){var b=E(a);if(b){var c=a.holderData.flags,d={fluidHeight:"%"==c.dimensions.height.slice(-1),fluidWidth:"%"==c.dimensions.width.slice(-1),mode:null,initialDimensions:b};d.fluidWidth&&!d.fluidHeight?(d.mode="width",d.ratio=d.initialDimensions.width/parseFloat(c.dimensions.height)):!d.fluidWidth&&d.fluidHeight&&(d.mode="height",d.ratio=parseFloat(c.dimensions.width)/d.initialDimensions.height),a.holderData.fluidConfig=d}else n(a)}}function l(){for(var a,c=[],d=Object.keys(K.vars.invisibleImages),e=0,f=d.length;f>e;e++)a=K.vars.invisibleImages[d[e]],E(a)&&"img"==a.nodeName.toLowerCase()&&(c.push(a),delete K.vars.invisibleImages[d[e]]);c.length&&J.run({images:c}),b.requestAnimationFrame(l)}function m(){K.vars.visibilityCheckStarted||(b.requestAnimationFrame(l),K.vars.visibilityCheckStarted=!0)}function n(a){a.holderData.invisibleId||(K.vars.invisibleId+=1,K.vars.invisibleImages["i"+K.vars.invisibleId]=a,a.holderData.invisibleId=K.vars.invisibleId)}function o(a,b){return null==b?document.createElement(a):document.createElementNS(b,a)}function p(a,b){for(var c in b)a.setAttribute(c,b[c])}function q(a,b,c){var d,e;null==a?(a=o("svg",F),d=o("defs",F),e=o("style",F),p(e,{type:"text/css"}),d.appendChild(e),a.appendChild(d)):e=a.querySelector("style"),a.webkitMatchesSelector&&a.setAttribute("xmlns",F);for(var f=0;f=0;h--){var i=g.createProcessingInstruction("xml-stylesheet",'href="'+f[h]+'" rel="stylesheet"');g.insertBefore(i,g.firstChild)}var j=g.createProcessingInstruction("xml",'version="1.0" encoding="UTF-8" standalone="yes"');g.insertBefore(j,g.firstChild),g.removeChild(g.documentElement),e=d.serializeToString(g)}var k=d.serializeToString(a);return k=k.replace(/\&(\#[0-9]{2,}\;)/g,"&$1"),e+k}}function s(){return b.DOMParser?(new DOMParser).parseFromString("","application/xml"):void 0}function t(a){K.vars.debounceTimer||a.call(this),K.vars.debounceTimer&&b.clearTimeout(K.vars.debounceTimer),K.vars.debounceTimer=b.setTimeout(function(){K.vars.debounceTimer=null,a.call(this)},K.setup.debounce)}function u(){t(function(){j(null)})}var v=c(1),w=c(2),x=c(3),y=x.extend,z=x.cssProps,A=x.encodeHtmlEntity,B=x.decodeHtmlEntity,C=x.imageExists,D=x.getNodeArray,E=x.dimensionCheck,F="http://www.w3.org/2000/svg",G=8,H="2.6.0",I="\nCreated with Holder.js "+H+".\nLearn more at http://holderjs.com\n(c) 2012-2015 Ivan Malopinsky - http://imsky.co\n",J={version:H,addTheme:function(a,b){return null!=a&&null!=b&&(K.settings.themes[a]=b),delete K.vars.cache.themeKeys,this},addImage:function(a,b){var c=document.querySelectorAll(b);if(c.length)for(var d=0,e=c.length;e>d;d++){var f=o("img"),g={};g[K.vars.dataAttr]=a,p(f,g),c[d].appendChild(f)}return this},setResizeUpdate:function(a,b){a.holderData&&(a.holderData.resizeUpdate=!!b,a.holderData.resizeUpdate&&j(a))},run:function(a){a=a||{};var c={},g=y(K.settings,a);K.vars.preempted=!0,K.vars.dataAttr=g.dataAttr||K.vars.dataAttr,c.renderer=g.renderer?g.renderer:K.setup.renderer,-1===K.setup.renderers.join(",").indexOf(c.renderer)&&(c.renderer=K.setup.supportsSVG?"svg":K.setup.supportsCanvas?"canvas":"html");var h=D(g.images),i=D(g.bgnodes),j=D(g.stylenodes),k=D(g.objects);c.stylesheets=[],c.svgXMLStylesheet=!0,c.noFontFallback=g.noFontFallback?g.noFontFallback:!1;for(var l=0;l1){c.nodeValue="";for(var u=0;u=0?b:1)}function f(a){v?e(a):w.push(a)}null==document.readyState&&document.addEventListener&&(document.addEventListener("DOMContentLoaded",function y(){document.removeEventListener("DOMContentLoaded",y,!1),document.readyState="complete"},!1),document.readyState="loading");var g=a.document,h=g.documentElement,i="load",j=!1,k="on"+i,l="complete",m="readyState",n="attachEvent",o="detachEvent",p="addEventListener",q="DOMContentLoaded",r="onreadystatechange",s="removeEventListener",t=p in g,u=j,v=j,w=[];if(g[m]===l)e(b);else if(t)g[p](q,c,j),a[p](i,c,j);else{g[n](r,c),a[n](k,c);try{u=null==a.frameElement&&h}catch(x){}u&&u.doScroll&&!function z(){if(!v){try{u.doScroll("left")}catch(a){return e(z,50)}d(),b()}}()}return f.version="1.4.0",f.isReady=function(){return v},f}a.exports="undefined"!=typeof window&&b(window)},function(a,b,c){var d=c(4),e=function(a){function b(a,b){for(var c in b)a[c]=b[c];return a}var c=1,e=d.defclass({constructor:function(a){c++,this.parent=null,this.children={},this.id=c,this.name="n"+c,null!=a&&(this.name=a),this.x=0,this.y=0,this.z=0,this.width=0,this.height=0},resize:function(a,b){null!=a&&(this.width=a),null!=b&&(this.height=b)},moveTo:function(a,b,c){this.x=null!=a?a:this.x,this.y=null!=b?b:this.y,this.z=null!=c?c:this.z},add:function(a){var b=a.name;if(null!=this.children[b])throw"SceneGraph: child with that name already exists: "+b;this.children[b]=a,a.parent=this}}),f=d(e,function(b){this.constructor=function(){b.constructor.call(this,"root"),this.properties=a}}),g=d(e,function(a){function c(c,d){if(a.constructor.call(this,c),this.properties={fill:"#000"},null!=d)b(this.properties,d);else if(null!=c&&"string"!=typeof c)throw"SceneGraph: invalid node name"}this.Group=d.extend(this,{constructor:c,type:"group"}),this.Rect=d.extend(this,{constructor:c,type:"rect"}),this.Text=d.extend(this,{constructor:function(a){c.call(this),this.properties.text=a},type:"text"})}),h=new f;return this.Shape=g,this.root=h,this};a.exports=e},function(a,b){(function(a){b.extend=function(a,b){var c={};for(var d in a)a.hasOwnProperty(d)&&(c[d]=a[d]);if(null!=b)for(var e in b)b.hasOwnProperty(e)&&(c[e]=b[e]);return c},b.cssProps=function(a){var b=[];for(var c in a)a.hasOwnProperty(c)&&b.push(c+":"+a[c]);return b.join(";")},b.encodeHtmlEntity=function(a){for(var b=[],c=0,d=a.length-1;d>=0;d--)c=a.charCodeAt(d),b.unshift(c>128?["&#",c,";"].join(""):a[d]);return b.join("")},b.getNodeArray=function(b){var c=null;return"string"==typeof b?c=document.querySelectorAll(b):a.NodeList&&b instanceof a.NodeList?c=b:a.Node&&b instanceof a.Node?c=[b]:a.HTMLCollection&&b instanceof a.HTMLCollection?c=b:b instanceof Array?c=b:null===b&&(c=[]),c},b.imageExists=function(a,b){var c=new Image;c.onerror=function(){b.call(this,!1)},c.onload=function(){b.call(this,!0)},c.src=a},b.decodeHtmlEntity=function(a){return a.replace(/&#(\d+);/g,function(a,b){return String.fromCharCode(b)})},b.dimensionCheck=function(a){var b={height:a.clientHeight,width:a.clientWidth};return b.height&&b.width?b:!1}}).call(b,function(){return this}())},function(a){var b=function(){},c=Array.prototype.slice,d=function(a,d){var e=b.prototype="function"==typeof a?a.prototype:a,f=new b,g=d.apply(f,c.call(arguments,2).concat(e));if("object"==typeof g)for(var h in g)f[h]=g[h];if(!f.hasOwnProperty("constructor"))return f;var i=f.constructor;return i.prototype=f,i};d.defclass=function(a){var b=a.constructor;return b.prototype=a,b},d.extend=function(a,b){return d(a,function(a){return this.uber=a,b})},a.exports=d}])}); -------------------------------------------------------------------------------- /orpheus/interface/main/static/assets/css/docs.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * IE10 viewport hack for Surface/desktop Windows 8 bug 3 | * Copyright 2014-2015 Twitter, Inc. 4 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 5 | */@-ms-viewport{width:device-width}@-o-viewport{width:device-width}@viewport{width:device-width}.hll{background-color:#ffc}.c{color:#999}.err{color:#A00;background-color:#FAA}.k{color:#069}.o{color:#555}.cm{color:#999}.cp{color:#099}.c1{color:#999}.cs{color:#999}.gd{background-color:#FCC;border:1px solid #C00}.ge{font-style:italic}.gr{color:red}.gh{color:#030}.gi{background-color:#CFC;border:1px solid #0C0}.go{color:#AAA}.gp{color:#009}.gu{color:#030}.gt{color:#9C6}.kc{color:#069}.kd{color:#069}.kn{color:#069}.kp{color:#069}.kr{color:#069}.kt{color:#078}.m{color:#F60}.s{color:#d44950}.na{color:#4f9fcf}.nb{color:#366}.nc{color:#0A8}.no{color:#360}.nd{color:#99F}.ni{color:#999}.ne{color:#C00}.nf{color:#C0F}.nl{color:#99F}.nn{color:#0CF}.nt{color:#2f6f9f}.nv{color:#033}.ow{color:#000}.w{color:#bbb}.mf{color:#F60}.mh{color:#F60}.mi{color:#F60}.mo{color:#F60}.sb{color:#C30}.sc{color:#C30}.sd{color:#C30;font-style:italic}.s2{color:#C30}.se{color:#C30}.sh{color:#C30}.si{color:#A00}.sx{color:#C30}.sr{color:#3AA}.s1{color:#C30}.ss{color:#FC3}.bp{color:#366}.vc{color:#033}.vg{color:#033}.vi{color:#033}.il{color:#F60}.css .nt+.nt,.css .o,.css .o+.nt{color:#999}/*! 6 | * Bootstrap Docs (http://getbootstrap.com) 7 | * Copyright 2011-2016 Twitter, Inc. 8 | * Licensed under the Creative Commons Attribution 3.0 Unported License. For 9 | * details, see https://creativecommons.org/licenses/by/3.0/. 10 | */body{position:relative}.table code{font-size:13px;font-weight:400}h2 code,h3 code,h4 code{background-color:inherit}.btn-outline{color:#563d7c;background-color:transparent;border-color:#563d7c}.btn-outline:active,.btn-outline:focus,.btn-outline:hover{color:#fff;background-color:#563d7c;border-color:#563d7c}.btn-outline-inverse{color:#fff;background-color:transparent;border-color:#cdbfe3}.btn-outline-inverse:active,.btn-outline-inverse:focus,.btn-outline-inverse:hover{color:#563d7c;text-shadow:none;background-color:#fff;border-color:#fff}.bs-docs-booticon{display:block;font-weight:500;color:#fff;text-align:center;cursor:default;background-color:#563d7c;border-radius:15%}.bs-docs-booticon-sm{width:30px;height:30px;font-size:20px;line-height:28px}.bs-docs-booticon-lg{width:144px;height:144px;font-size:108px;line-height:140px}.bs-docs-booticon-inverse{color:#563d7c;background-color:#fff}.bs-docs-booticon-outline{background-color:transparent;border:1px solid #cdbfe3}#skippy{display:block;padding:1em;color:#fff;background-color:#6f5499;outline:0}#skippy .skiplink-text{padding:.5em;outline:1px dotted}#content:focus{outline:0}.bs-docs-nav{margin-bottom:0;background-color:#fff;border-bottom:0}.bs-home-nav .bs-nav-b{display:none}.bs-docs-nav .navbar-brand,.bs-docs-nav .navbar-nav>li>a{font-weight:500;color:#563d7c}.bs-docs-nav .navbar-nav>.active>a,.bs-docs-nav .navbar-nav>.active>a:hover,.bs-docs-nav .navbar-nav>li>a:hover{color:#463265;background-color:#f9f9f9}.bs-docs-nav .navbar-toggle .icon-bar{background-color:#563d7c}.bs-docs-nav .navbar-header .navbar-toggle{border-color:#fff}.bs-docs-nav .navbar-header .navbar-toggle:focus,.bs-docs-nav .navbar-header .navbar-toggle:hover{background-color:#f9f9f9;border-color:#f9f9f9}.bs-docs-footer{padding-top:50px;padding-bottom:50px;margin-top:100px;color:#99979c;text-align:center;background-color:#2a2730}.bs-docs-footer a{color:#fff}.bs-docs-footer-links{padding-left:0;margin-bottom:20px}.bs-docs-footer-links li{display:inline-block}.bs-docs-footer-links li+li{margin-left:15px}@media (min-width:768px){.bs-docs-footer{text-align:left}.bs-docs-footer p{margin-bottom:0}}.bs-docs-header,.bs-docs-masthead{position:relative;padding:30px 0;color:#cdbfe3;text-align:center;text-shadow:0 1px 0 rgba(0,0,0,.1);background-color:#6f5499;background-image:-webkit-gradient(linear,left top,left bottom,from(#563d7c),to(#6f5499));background-image:-webkit-linear-gradient(top,#563d7c 0,#6f5499 100%);background-image:-o-linear-gradient(top,#563d7c 0,#6f5499 100%);background-image:linear-gradient(to bottom,#563d7c 0,#6f5499 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#563d7c', endColorstr='#6F5499', GradientType=0);background-repeat:repeat-x}.bs-docs-masthead .bs-docs-booticon{margin:0 auto 30px}.bs-docs-masthead h1{font-weight:300;line-height:1;color:#fff}.bs-docs-masthead .lead{margin:0 auto 30px;font-size:20px;color:#fff}.bs-docs-masthead .version{margin-top:-15px;margin-bottom:30px;color:#9783b9}.bs-docs-masthead .btn{width:100%;padding:15px 30px;font-size:20px}@media (min-width:480px){.bs-docs-masthead .btn{width:auto}}@media (min-width:768px){.bs-docs-masthead{padding:80px 0}.bs-docs-masthead h1{font-size:60px}.bs-docs-masthead .lead{font-size:24px}}@media (min-width:992px){.bs-docs-masthead .lead{width:80%;font-size:30px}}.bs-docs-header{margin-bottom:40px;font-size:20px}.bs-docs-header h1{margin-top:0;color:#fff}.bs-docs-header p{margin-bottom:0;font-weight:300;line-height:1.4}.bs-docs-header .container{position:relative}@media (min-width:768px){.bs-docs-header{padding-top:60px;padding-bottom:60px;font-size:24px;text-align:left}.bs-docs-header h1{font-size:60px;line-height:1}}@media (min-width:992px){.bs-docs-header h1,.bs-docs-header p{margin-right:380px}}.carbonad{width:auto!important;height:auto!important;padding:20px!important;margin:30px -15px -31px!important;overflow:hidden;font-size:13px!important;line-height:16px!important;text-align:left;background:0 0!important;border:solid #866ab3!important;border-width:1px 0!important}.carbonad-img{margin:0!important}.carbonad-tag,.carbonad-text{display:block!important;float:none!important;width:auto!important;height:auto!important;margin-left:145px!important;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif!important}.carbonad-text{padding-top:0!important}.carbonad-tag{color:inherit!important;text-align:left!important}.carbonad-tag a,.carbonad-text a{color:#fff!important}.carbonad #azcarbon>img{display:none}@media (min-width:480px){.carbonad{width:330px!important;margin:20px auto!important;border-width:1px!important;border-radius:4px}.bs-docs-masthead .carbonad{margin:50px auto 0!important}}@media (min-width:768px){.carbonad{margin-right:0!important;margin-left:0!important}}@media (min-width:992px){.carbonad{position:absolute;top:0;right:15px;width:330px!important;padding:15px!important;margin:0!important}.bs-docs-masthead .carbonad{position:static}}.bs-docs-featurette{padding-top:40px;padding-bottom:40px;font-size:16px;line-height:1.5;color:#555;text-align:center;background-color:#fff;border-bottom:1px solid #e5e5e5}.bs-docs-featurette+.bs-docs-footer{margin-top:0;border-top:0}.bs-docs-featurette-title{margin-bottom:5px;font-size:30px;font-weight:400;color:#333}.half-rule{width:100px;margin:40px auto}.bs-docs-featurette h3{margin-bottom:5px;font-weight:400;color:#333}.bs-docs-featurette-img{display:block;margin-bottom:20px;color:#333}.bs-docs-featurette-img:hover{color:#337ab7;text-decoration:none}.bs-docs-featurette-img img{display:block;margin-bottom:15px}@media (min-width:480px){.bs-docs-featurette .img-responsive{margin-top:30px}}@media (min-width:768px){.bs-docs-featurette{padding-top:100px;padding-bottom:100px}.bs-docs-featurette-title{font-size:40px}.bs-docs-featurette .lead{max-width:80%;margin-right:auto;margin-left:auto}.bs-docs-featurette .img-responsive{margin-top:0}}.bs-docs-featured-sites{margin-right:-1px;margin-left:-1px}.bs-docs-featured-sites .col-xs-6{padding:1px}.bs-docs-featured-sites .img-responsive{margin-top:0}@media (min-width:768px){.bs-docs-featured-sites .col-sm-3:first-child img{border-top-left-radius:4px;border-bottom-left-radius:4px}.bs-docs-featured-sites .col-sm-3:last-child img{border-top-right-radius:4px;border-bottom-right-radius:4px}}.bs-examples .thumbnail{margin-bottom:10px}.bs-examples h4{margin-bottom:5px}.bs-examples p{margin-bottom:20px}@media (max-width:480px){.bs-examples{margin-right:-10px;margin-left:-10px}.bs-examples>[class^=col-]{padding-right:10px;padding-left:10px}}.bs-docs-sidebar.affix{position:static}@media (min-width:768px){.bs-docs-sidebar{padding-left:20px}}.bs-docs-sidenav{margin-top:20px;margin-bottom:20px}.bs-docs-sidebar .nav>li>a{display:block;padding:4px 20px;font-size:13px;font-weight:500;color:#767676}.bs-docs-sidebar .nav>li>a:focus,.bs-docs-sidebar .nav>li>a:hover{padding-left:19px;color:#563d7c;text-decoration:none;background-color:transparent;border-left:1px solid #563d7c}.bs-docs-sidebar .nav>.active:focus>a,.bs-docs-sidebar .nav>.active:hover>a,.bs-docs-sidebar .nav>.active>a{padding-left:18px;font-weight:700;color:#563d7c;background-color:transparent;border-left:2px solid #563d7c}.bs-docs-sidebar .nav .nav{display:none;padding-bottom:10px}.bs-docs-sidebar .nav .nav>li>a{padding-top:1px;padding-bottom:1px;padding-left:30px;font-size:12px;font-weight:400}.bs-docs-sidebar .nav .nav>li>a:focus,.bs-docs-sidebar .nav .nav>li>a:hover{padding-left:29px}.bs-docs-sidebar .nav .nav>.active:focus>a,.bs-docs-sidebar .nav .nav>.active:hover>a,.bs-docs-sidebar .nav .nav>.active>a{padding-left:28px;font-weight:500}.back-to-top,.bs-docs-theme-toggle{display:none;padding:4px 10px;margin-top:10px;margin-left:10px;font-size:12px;font-weight:500;color:#999}.back-to-top:hover,.bs-docs-theme-toggle:hover{color:#563d7c;text-decoration:none}.bs-docs-theme-toggle{margin-top:0}@media (min-width:768px){.back-to-top,.bs-docs-theme-toggle{display:block}}@media (min-width:992px){.bs-docs-sidebar .nav>.active>ul{display:block}.bs-docs-sidebar.affix,.bs-docs-sidebar.affix-bottom{width:213px}.bs-docs-sidebar.affix{position:fixed;top:20px}.bs-docs-sidebar.affix-bottom{position:absolute}.bs-docs-sidebar.affix .bs-docs-sidenav,.bs-docs-sidebar.affix-bottom .bs-docs-sidenav{margin-top:0;margin-bottom:0}}@media (min-width:1200px){.bs-docs-sidebar.affix,.bs-docs-sidebar.affix-bottom{width:263px}}.bs-docs-section{margin-bottom:60px}.bs-docs-section:last-child{margin-bottom:0}h1[id]{padding-top:20px;margin-top:0}.bs-callout{padding:20px;margin:20px 0;border:1px solid #eee;border-left-width:5px;border-radius:3px}.bs-callout h4{margin-top:0;margin-bottom:5px}.bs-callout p:last-child{margin-bottom:0}.bs-callout code{border-radius:3px}.bs-callout+.bs-callout{margin-top:-5px}.bs-callout-danger{border-left-color:#ce4844}.bs-callout-danger h4{color:#ce4844}.bs-callout-warning{border-left-color:#aa6708}.bs-callout-warning h4{color:#aa6708}.bs-callout-info{border-left-color:#1b809e}.bs-callout-info h4{color:#1b809e}.color-swatches{margin:0 -5px;overflow:hidden}.color-swatch{float:left;width:60px;height:60px;margin:0 5px;border-radius:3px}@media (min-width:768px){.color-swatch{width:100px;height:100px}}.color-swatches .gray-darker{background-color:#222}.color-swatches .gray-dark{background-color:#333}.color-swatches .gray{background-color:#555}.color-swatches .gray-light{background-color:#999}.color-swatches .gray-lighter{background-color:#eee}.color-swatches .brand-primary{background-color:#337ab7}.color-swatches .brand-success{background-color:#5cb85c}.color-swatches .brand-warning{background-color:#f0ad4e}.color-swatches .brand-danger{background-color:#d9534f}.color-swatches .brand-info{background-color:#5bc0de}.color-swatches .bs-purple{background-color:#563d7c}.color-swatches .bs-purple-light{background-color:#c7bfd3}.color-swatches .bs-purple-lighter{background-color:#e5e1ea}.color-swatches .bs-gray{background-color:#f9f9f9}.bs-team .team-member{line-height:32px;color:#555}.bs-team .team-member:hover{color:#333;text-decoration:none}.bs-team .github-btn{float:right;width:180px;height:20px;margin-top:6px;border:none}.bs-team img{float:left;width:32px;margin-right:10px;border-radius:4px}.bs-docs-browser-bugs td p{margin-bottom:0}.bs-docs-browser-bugs th:first-child{width:18%}.show-grid{margin-bottom:15px}.show-grid [class^=col-]{padding-top:10px;padding-bottom:10px;background-color:#eee;background-color:rgba(86,61,124,.15);border:1px solid #ddd;border:1px solid rgba(86,61,124,.2)}.bs-example{position:relative;padding:45px 15px 15px;margin:0 -15px 15px;border-color:#e5e5e5 #eee #eee;border-style:solid;border-width:1px 0;-webkit-box-shadow:inset 0 3px 6px rgba(0,0,0,.05);box-shadow:inset 0 3px 6px rgba(0,0,0,.05)}.bs-example:after{position:absolute;top:15px;left:15px;font-size:12px;font-weight:700;color:#959595;text-transform:uppercase;letter-spacing:1px;content:"Example"}.bs-example-padded-bottom{padding-bottom:24px}.bs-example+.highlight,.bs-example+.zero-clipboard+.highlight{margin:-15px -15px 15px;border-width:0 0 1px;border-radius:0}@media (min-width:768px){.bs-example{margin-right:0;margin-left:0;background-color:#fff;border-color:#ddd;border-width:1px;border-radius:4px 4px 0 0;-webkit-box-shadow:none;box-shadow:none}.bs-example+.highlight,.bs-example+.zero-clipboard+.highlight{margin-top:-16px;margin-right:0;margin-left:0;border-width:1px;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.bs-example-standalone{border-radius:4px}}.bs-example .container{width:auto}.bs-example>.alert:last-child,.bs-example>.form-control:last-child,.bs-example>.jumbotron:last-child,.bs-example>.list-group:last-child,.bs-example>.navbar:last-child,.bs-example>.panel:last-child,.bs-example>.progress:last-child,.bs-example>.table-responsive:last-child>.table,.bs-example>.table:last-child,.bs-example>.well:last-child,.bs-example>blockquote:last-child,.bs-example>ol:last-child,.bs-example>p:last-child,.bs-example>ul:last-child{margin-bottom:0}.bs-example>p>.close{float:none}.bs-example-type .table .type-info{color:#767676;vertical-align:middle}.bs-example-type .table td{padding:15px 0;border-color:#eee}.bs-example-type .table tr:first-child td{border-top:0}.bs-example-type h1,.bs-example-type h2,.bs-example-type h3,.bs-example-type h4,.bs-example-type h5,.bs-example-type h6{margin:0}.bs-example-bg-classes p{padding:15px}.bs-example>.img-circle,.bs-example>.img-rounded,.bs-example>.img-thumbnail{margin:5px}.bs-example>.table-responsive>.table{background-color:#fff}.bs-example>.btn,.bs-example>.btn-group{margin-top:5px;margin-bottom:5px}.bs-example>.btn-toolbar+.btn-toolbar{margin-top:10px}.bs-example-control-sizing input[type=text]+input[type=text],.bs-example-control-sizing select{margin-top:10px}.bs-example-form .input-group{margin-bottom:10px}.bs-example>textarea.form-control{resize:vertical}.bs-example>.list-group{max-width:400px}.bs-example .navbar:last-child{margin-bottom:0}.bs-navbar-bottom-example,.bs-navbar-top-example{z-index:1;padding:0;overflow:hidden}.bs-navbar-bottom-example .navbar-header,.bs-navbar-top-example .navbar-header{margin-left:0}.bs-navbar-bottom-example .navbar-fixed-bottom,.bs-navbar-top-example .navbar-fixed-top{position:relative;margin-right:0;margin-left:0}.bs-navbar-top-example{padding-bottom:45px}.bs-navbar-top-example:after{top:auto;bottom:15px}.bs-navbar-top-example .navbar-fixed-top{top:-1px}.bs-navbar-bottom-example{padding-top:45px}.bs-navbar-bottom-example .navbar-fixed-bottom{bottom:-1px}.bs-navbar-bottom-example .navbar{margin-bottom:0}@media (min-width:768px){.bs-navbar-bottom-example .navbar-fixed-bottom,.bs-navbar-top-example .navbar-fixed-top{position:absolute}}.bs-example .pagination{margin-top:10px;margin-bottom:10px}.bs-example>.pager{margin-top:0}.bs-example-modal{background-color:#f5f5f5}.bs-example-modal .modal{position:relative;top:auto;right:auto;bottom:auto;left:auto;z-index:1;display:block}.bs-example-modal .modal-dialog{left:auto;margin-right:auto;margin-left:auto}.bs-example>.dropdown>.dropdown-toggle{float:left}.bs-example>.dropdown>.dropdown-menu{position:static;display:block;margin-bottom:5px;clear:left}.bs-example-tabs .nav-tabs{margin-bottom:15px}.bs-example-tooltips{text-align:center}.bs-example-tooltips>.btn{margin-top:5px;margin-bottom:5px}.bs-example-tooltip .tooltip{position:relative;display:inline-block;margin:10px 20px;opacity:1}.bs-example-popover{padding-bottom:24px;background-color:#f9f9f9}.bs-example-popover .popover{position:relative;display:block;float:left;width:260px;margin:20px}.scrollspy-example{position:relative;height:200px;margin-top:10px;overflow:auto}.bs-example>.nav-pills-stacked-example{max-width:300px}#collapseExample .well{margin-bottom:0}.bs-events-table>tbody>tr>td:first-child,.bs-events-table>thead>tr>th:first-child{white-space:nowrap}.bs-events-table>thead>tr>th:first-child{width:150px}.js-options-table>thead>tr>th:nth-child(1),.js-options-table>thead>tr>th:nth-child(2){width:100px}.js-options-table>thead>tr>th:nth-child(3){width:50px}.highlight{padding:9px 14px;margin-bottom:14px;background-color:#f7f7f9;border:1px solid #e1e1e8;border-radius:4px}.highlight pre{padding:0;margin-top:0;margin-bottom:0;word-break:normal;white-space:nowrap;background-color:transparent;border:0}.highlight pre code{font-size:inherit;color:#333}.highlight pre code:first-child{display:inline-block;padding-right:45px}.table-responsive .highlight pre{white-space:normal}.bs-table th small,.responsive-utilities th small{display:block;font-weight:400;color:#999}.responsive-utilities tbody th{font-weight:400}.responsive-utilities td{text-align:center}.responsive-utilities td.is-visible{color:#468847;background-color:#dff0d8!important}.responsive-utilities td.is-hidden{color:#ccc;background-color:#f9f9f9!important}.responsive-utilities-test{margin-top:5px}.responsive-utilities-test .col-xs-6{margin-bottom:10px}.responsive-utilities-test span{display:block;padding:15px 10px;font-size:14px;font-weight:700;line-height:1.1;text-align:center;border-radius:4px}.hidden-on .col-xs-6 .hidden-lg,.hidden-on .col-xs-6 .hidden-md,.hidden-on .col-xs-6 .hidden-sm,.hidden-on .col-xs-6 .hidden-xs,.visible-on .col-xs-6 .hidden-lg,.visible-on .col-xs-6 .hidden-md,.visible-on .col-xs-6 .hidden-sm,.visible-on .col-xs-6 .hidden-xs{color:#999;border:1px solid #ddd}.hidden-on .col-xs-6 .visible-lg-block,.hidden-on .col-xs-6 .visible-md-block,.hidden-on .col-xs-6 .visible-sm-block,.hidden-on .col-xs-6 .visible-xs-block,.visible-on .col-xs-6 .visible-lg-block,.visible-on .col-xs-6 .visible-md-block,.visible-on .col-xs-6 .visible-sm-block,.visible-on .col-xs-6 .visible-xs-block{color:#468847;background-color:#dff0d8;border:1px solid #d6e9c6}.bs-glyphicons{margin:0 -10px 20px;overflow:hidden}.bs-glyphicons-list{padding-left:0;list-style:none}.bs-glyphicons li{float:left;width:25%;height:115px;padding:10px;font-size:10px;line-height:1.4;text-align:center;background-color:#f9f9f9;border:1px solid #fff}.bs-glyphicons .glyphicon{margin-top:5px;margin-bottom:10px;font-size:24px}.bs-glyphicons .glyphicon-class{display:block;text-align:center;word-wrap:break-word}.bs-glyphicons li:hover{color:#fff;background-color:#563d7c}@media (min-width:768px){.bs-glyphicons{margin-right:0;margin-left:0}.bs-glyphicons li{width:12.5%;font-size:12px}}.bs-customizer .toggle{float:right;margin-top:25px}.bs-customizer label{margin-top:10px;font-weight:500;color:#555}.bs-customizer h2{padding-top:30px;margin-top:0;margin-bottom:5px}.bs-customizer h3{margin-bottom:0}.bs-customizer h4{margin-top:15px;margin-bottom:0}.bs-customizer .bs-callout h4{margin-top:0;margin-bottom:5px}.bs-customizer input[type=text]{font-family:Menlo,Monaco,Consolas,"Courier New",monospace;background-color:#fafafa}.bs-customizer .help-block{margin-bottom:5px;font-size:12px}#less-section label{font-weight:400}.bs-customize-download .btn-outline{padding:20px}.bs-customizer-alert{position:fixed;top:0;right:0;left:0;z-index:1030;padding:15px 0;color:#fff;background-color:#d9534f;border-bottom:1px solid #b94441;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.25);box-shadow:inset 0 1px 0 rgba(255,255,255,.25)}.bs-customizer-alert .close{margin-top:-4px;font-size:24px}.bs-customizer-alert p{margin-bottom:0}.bs-customizer-alert .glyphicon{margin-right:5px}.bs-customizer-alert pre{margin:10px 0 0;color:#fff;background-color:#a83c3a;border-color:#973634;-webkit-box-shadow:inset 0 2px 4px rgba(0,0,0,.05),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 2px 4px rgba(0,0,0,.05),0 1px 0 rgba(255,255,255,.1)}.bs-dropzone{position:relative;padding:20px;margin-bottom:20px;color:#777;text-align:center;border:2px dashed #eee;border-radius:4px}.bs-dropzone .import-header{margin-bottom:5px}.bs-dropzone .glyphicon-download-alt{font-size:40px}.bs-dropzone hr{width:100px}.bs-dropzone .lead{margin-bottom:10px;font-weight:400;color:#333}#import-manual-trigger{cursor:pointer}.bs-dropzone p:last-child{margin-bottom:0}.bs-brand-logos{display:table;width:100%;margin-bottom:15px;overflow:hidden;color:#563d7c;background-color:#f9f9f9;border-radius:4px}.bs-brand-item{padding:60px 0;text-align:center}.bs-brand-item+.bs-brand-item{border-top:1px solid #fff}.bs-brand-logos .inverse{color:#fff;background-color:#563d7c}.bs-brand-item h1,.bs-brand-item h3{margin-top:0;margin-bottom:0}.bs-brand-item .bs-docs-booticon{margin-right:auto;margin-left:auto}.bs-brand-item .glyphicon{width:30px;height:30px;margin:10px auto -10px;line-height:30px;color:#fff;border-radius:50%}.bs-brand-item .glyphicon-ok{background-color:#5cb85c}.bs-brand-item .glyphicon-remove{background-color:#d9534f}@media (min-width:768px){.bs-brand-item{display:table-cell;width:1%}.bs-brand-item+.bs-brand-item{border-top:0;border-left:1px solid #fff}.bs-brand-item h1{font-size:60px}}.zero-clipboard{position:relative;display:none}.btn-clipboard{position:absolute;top:0;right:0;z-index:10;display:block;padding:5px 8px;font-size:12px;color:#767676;cursor:pointer;background-color:#fff;border:1px solid #e1e1e8;border-radius:0 4px 0 4px}.btn-clipboard-hover{color:#fff;background-color:#563d7c;border-color:#563d7c}@media (min-width:768px){.zero-clipboard{display:block}.bs-example+.zero-clipboard .btn-clipboard{top:-16px;border-top-right-radius:0}}.anchorjs-link{color:inherit}@media (max-width:480px){.anchorjs-link{display:none}}:hover>.anchorjs-link{opacity:.75;-webkit-transition:color .16s linear;-o-transition:color .16s linear;transition:color .16s linear}.anchorjs-link:focus,:hover>.anchorjs-link:hover{text-decoration:none;opacity:1}#focusedInput{border-color:#ccc;border-color:rgba(82,168,236,.8);outline:0;outline:thin dotted\9;-webkit-box-shadow:0 0 8px rgba(82,168,236,.6);box-shadow:0 0 8px rgba(82,168,236,.6)}.v4-tease{display:block;padding:15px 20px;font-weight:700;color:#fff;text-align:center;background-color:#0275d8}.v4-tease:hover{color:#fff;text-decoration:none;background-color:#0269c2}@media print{a[href]:after{content:""!important}} 11 | /*# sourceMappingURL=docs.min.css.map */ --------------------------------------------------------------------------------