├── .dockerignore ├── .gitignore ├── .gitlint.yaml ├── .travis.yml ├── .travis ├── before_deploy.sh └── deploy.sh ├── LICENSE ├── README.md ├── client ├── controller │ ├── .gitignore │ ├── README.md │ ├── build.gradle │ ├── config │ │ ├── sample_mysql_config.json │ │ ├── sample_oracle_config.json │ │ ├── sample_postgres_config.json │ │ └── sample_saphana_config.json │ ├── log4j.properties │ ├── sample_output │ │ ├── mysql │ │ │ ├── knobs.json │ │ │ ├── metrics_after.json │ │ │ ├── metrics_before.json │ │ │ └── summary.json │ │ ├── oracle │ │ │ ├── knobs.json │ │ │ ├── knobs_including_hidden.json │ │ │ ├── metrics_after.json │ │ │ ├── metrics_before.json │ │ │ ├── summary.json │ │ │ ├── v12.1 │ │ │ │ ├── knobs.json │ │ │ │ ├── metrics_after.json │ │ │ │ ├── metrics_before.json │ │ │ │ └── summary.json │ │ │ └── v12.2 │ │ │ │ ├── knobs.json │ │ │ │ ├── metrics_after.json │ │ │ │ ├── metrics_before.json │ │ │ │ └── summary.json │ │ ├── postgres │ │ │ ├── knobs.json │ │ │ ├── metrics_after.json │ │ │ ├── metrics_before.json │ │ │ └── summary.json │ │ ├── rds_postgres │ │ │ ├── knobs.json │ │ │ ├── metrics_after.json │ │ │ ├── metrics_before.json │ │ │ └── summary.json │ │ └── saphana │ │ │ ├── knobs.json │ │ │ ├── metrics_after.json │ │ │ ├── metrics_before.json │ │ │ └── summary.json │ └── src │ │ ├── main │ │ └── java │ │ │ └── com │ │ │ └── controller │ │ │ ├── ControllerConfiguration.java │ │ │ ├── Main.java │ │ │ ├── ResultUploader.java │ │ │ ├── collectors │ │ │ ├── DBCollector.java │ │ │ ├── DBParameterCollector.java │ │ │ ├── MySQLCollector.java │ │ │ ├── OracleCollector.java │ │ │ ├── PostgresCollector.java │ │ │ └── SAPHanaCollector.java │ │ │ ├── json_validation_schema │ │ │ ├── config_schema.json │ │ │ ├── schema.json │ │ │ └── summary_schema.json │ │ │ ├── types │ │ │ ├── DatabaseType.java │ │ │ └── JSONSchemaType.java │ │ │ └── util │ │ │ ├── ClassUtil.java │ │ │ ├── CollectionUtil.java │ │ │ ├── FileUtil.java │ │ │ ├── JSONSerializable.java │ │ │ ├── JSONUtil.java │ │ │ ├── ValidationUtils.java │ │ │ └── json │ │ │ ├── JSONArray.java │ │ │ ├── JSONException.java │ │ │ ├── JSONObject.java │ │ │ ├── JSONString.java │ │ │ ├── JSONStringer.java │ │ │ ├── JSONTokener.java │ │ │ ├── JSONWriter.java │ │ │ └── Test.java │ │ └── test │ │ └── java │ │ └── com │ │ └── controller │ │ └── collectors │ │ ├── AbstractJSONValidationTestCase.java │ │ ├── TestInvalidJSON.java │ │ ├── TestMySQLJSON.java │ │ ├── TestOracleJSON.java │ │ └── TestPostgresJSON.java └── driver │ ├── .gitignore │ ├── driver_config.py │ ├── fabfile.py │ ├── integrationTests │ └── data │ │ ├── 0__knobs.json │ │ ├── 0__metrics_after.json │ │ ├── 0__metrics_before.json │ │ ├── 0__summary.json │ │ ├── 1__knobs.json │ │ ├── 1__metrics_after.json │ │ ├── 1__metrics_before.json │ │ ├── 1__summary.json │ │ ├── 2__knobs.json │ │ ├── 2__metrics_after.json │ │ ├── 2__metrics_before.json │ │ ├── 2__summary.json │ │ ├── 3__knobs.json │ │ ├── 3__metrics_after.json │ │ ├── 3__metrics_before.json │ │ ├── 3__summary.json │ │ ├── 4__knobs.json │ │ ├── 4__metrics_after.json │ │ ├── 4__metrics_before.json │ │ ├── 4__summary.json │ │ ├── 5__knobs.json │ │ ├── 5__metrics_after.json │ │ ├── 5__metrics_before.json │ │ ├── 5__summary.json │ │ ├── 6__knobs.json │ │ ├── 6__metrics_after.json │ │ ├── 6__metrics_before.json │ │ ├── 6__summary.json │ │ ├── 7__knobs.json │ │ ├── 7__metrics_after.json │ │ ├── 7__metrics_before.json │ │ ├── 7__summary.json │ │ ├── 8__knobs.json │ │ ├── 8__metrics_after.json │ │ ├── 8__metrics_before.json │ │ ├── 8__summary.json │ │ ├── 9__knobs.json │ │ ├── 9__metrics_after.json │ │ ├── 9__metrics_before.json │ │ ├── 9__summary.json │ │ ├── x__knobs.json │ │ ├── x__metrics_after.json │ │ ├── x__metrics_before.json │ │ └── x__summary.json │ ├── oracleScripts │ ├── archiveLog.sh │ ├── awrOracle.sh │ ├── createRestore.sh │ ├── createUser.sh │ ├── dropUser.sh │ ├── dumpOracle.sh │ ├── flashBack.sh │ ├── oracle_pfile_example.ora │ ├── removeRestore.sh │ ├── restartOracle.sh │ ├── restoreOracle.sh │ ├── shutdownOracle.sh │ ├── snapshotOracle.sh │ └── startupOracle.sh │ ├── userDefinedMetrics │ └── user_defined_metrics.py │ └── utils.py ├── docker ├── .gitignore ├── Dockerfile.base ├── Dockerfile.driver ├── Dockerfile.web ├── create-docker-compose.sh ├── credentials.py ├── docker-compose.build.yml ├── docker-compose.up.yml ├── install.sh ├── start-test.sh ├── start.sh └── wait-for-it.sh ├── script ├── formatting │ ├── config │ │ ├── google_checks.xml │ │ ├── pycodestyle │ │ └── pylintrc │ └── formatter.py ├── git-hooks │ └── pre-commit ├── query_and_get.py └── validators │ └── source_validator.py └── server ├── analysis ├── __init__.py ├── base.py ├── cluster.py ├── constraints.py ├── ddpg │ ├── __init__.py │ ├── ddpg.py │ ├── ou_process.py │ └── prioritized_replay_memory.py ├── factor_analysis.py ├── gp.py ├── gp_tf.py ├── gpr │ ├── gpr_models.py │ ├── gprc.py │ ├── optimize.py │ ├── predict.py │ └── ucb.py ├── lasso.py ├── nn_tf.py ├── preprocessing.py ├── simulation.py ├── tests │ ├── __init__.py │ ├── test_cluster.py │ ├── test_constraints.py │ ├── test_ddpg.py │ ├── test_gpr.py │ ├── test_nn.py │ └── test_preprocessing.py └── util.py └── website ├── .gitignore ├── LICENSE ├── README.md ├── manage.py ├── requirements.txt ├── script ├── controller_simulator │ ├── .gitignore │ ├── data_generator.py │ ├── samples │ │ ├── knobs.json │ │ ├── metrics_after.json │ │ ├── metrics_before.json │ │ └── summary.json │ └── upload_data.py ├── fix_permissions.py ├── fixture_generators │ ├── knob_identification │ │ ├── .gitignore │ │ ├── create_ranked_knobs.py │ │ └── postgres-96_m3xlarge_ranked_knobs.json │ ├── knob_settings │ │ ├── oracle │ │ │ ├── .gitignore │ │ │ ├── create_knob_settings.py │ │ │ ├── oracle12.csv │ │ │ ├── oracle121.csv │ │ │ ├── oracle19.csv │ │ │ └── sql │ │ │ │ ├── get_knob_info.sql │ │ │ │ └── get_knob_info_12.1.sql │ │ └── postgres_9.6 │ │ │ ├── .gitignore │ │ │ ├── create_knob_settings.py │ │ │ ├── postgres-96_knobs.json │ │ │ └── settings.csv │ ├── metric_settings │ │ ├── oracle │ │ │ ├── .gitignore │ │ │ ├── create_metric_settings.py │ │ │ ├── oracle12.json │ │ │ ├── oracle121.json │ │ │ └── oracle19.json │ │ └── postgres_9.6 │ │ │ ├── .gitignore │ │ │ ├── create_metric_settings.py │ │ │ ├── metrics_sample.json │ │ │ ├── pg96_database_stats.csv │ │ │ ├── pg96_global_stats.csv │ │ │ ├── pg96_index_stats.csv │ │ │ ├── pg96_table_stats.csv │ │ │ └── postgres-96_metrics.json │ └── workload_characterization │ │ ├── .gitignore │ │ ├── create_pruned_metrics.py │ │ └── postgres-96_m3xlarge_pruned_metrics.json ├── installation │ ├── .gitignore │ ├── Vagrantfile │ └── bootstrap.sh ├── management │ ├── beat.sh │ ├── celery.sh │ ├── django.sh │ ├── fabfile.py │ └── loop.sh └── upload │ ├── upload.py │ └── upload_batch.py ├── tests ├── __init__.py ├── runner.py ├── test_files │ ├── sample_knobs.json │ ├── sample_metrics_end.json │ ├── sample_metrics_start.json │ └── sample_summary.json ├── test_parser.py ├── test_tasks.py ├── test_upload.py ├── test_utils.py ├── test_views.py └── utils.py └── website ├── __init__.py ├── admin.py ├── celery.py ├── db ├── __init__.py ├── base │ ├── __init__.py │ ├── parser.py │ └── target_objective.py ├── myrocks │ ├── __init__.py │ ├── parser.py │ └── target_objective.py ├── mysql │ ├── __init__.py │ ├── parser.py │ └── target_objective.py ├── oracle │ ├── __init__.py │ ├── parser.py │ └── target_objective.py ├── parser.py └── postgres │ ├── __init__.py │ ├── parser.py │ └── target_objective.py ├── fixtures ├── dbms_catalog.json ├── myrocks-5.6_knobs.json ├── myrocks-5.6_metrics.json ├── mysql-56_knobs.json ├── mysql-56_metrics.json ├── mysql-57_knobs.json ├── mysql-57_metrics.json ├── mysql-80_knobs.json ├── mysql-80_metrics.json ├── oracle-121_knobs.json ├── oracle-121_metrics.json ├── oracle-12_knobs.json ├── oracle-12_metrics.json ├── oracle-19_knobs.json ├── oracle-19_metrics.json ├── postgres-92_knobs.json ├── postgres-92_metrics.json ├── postgres-93_knobs.json ├── postgres-93_metrics.json ├── postgres-94_knobs.json ├── postgres-94_metrics.json ├── postgres-96_knobs.json ├── postgres-96_m3xlarge_pruned_metrics.json ├── postgres-96_m3xlarge_ranked_knobs.json ├── postgres-96_metrics.json ├── test_user.json ├── test_user_sessions.json └── test_website.json ├── forms.py ├── management └── commands │ ├── cleardblog.py │ ├── createuser.py │ ├── deleteuser.py │ ├── dumpdebuginfo.py │ ├── dumpknob.py │ ├── dumpwebsite.py │ ├── getuploadcode.py │ ├── listusers.py │ ├── loadknob.py │ ├── resetwebsite.py │ ├── runcelery.py │ ├── setuploadcode.py │ ├── startcelery.py │ └── stopcelery.py ├── migrations ├── 0001_initial.py ├── 0002_enable_compression.py ├── 0003_load_initial_data.py ├── 0004_add_lhs.py ├── 0005_add_workload_field.py ├── 0006_session_hyperparameters.py ├── 0007_executiontime.py ├── 0008_change_result_taskids_field.py ├── 0009_change_executiontime_function_field.py ├── 0010_add_pipeline_data_field.py ├── 0011_knob_bound_fields.py ├── 0012_make_workload_status_editable.py ├── 0013_backupdata_other.py └── __init__.py ├── models.py ├── set_default_knobs.py ├── settings ├── .gitignore ├── __init__.py ├── common.py ├── constants.py └── credentials_TEMPLATE.py ├── static ├── css │ ├── base.css │ ├── bootstrap-select.min.css │ ├── bootstrap.min.css │ ├── jquery.dataTables.css │ ├── style.css │ └── themes │ │ ├── bootstrap-darkly.min.css │ │ ├── bootstrap-default.min.css │ │ ├── bootstrap-flatly.min.css │ │ └── bootstrap-sandstone.min.css ├── fonts │ ├── glyphicons-halflings-regular.eot │ ├── glyphicons-halflings-regular.svg │ ├── glyphicons-halflings-regular.ttf │ ├── glyphicons-halflings-regular.woff │ └── glyphicons-halflings-regular.woff2 ├── img │ ├── ajax-loader.gif │ ├── glyphicons-halflings-white.png │ ├── glyphicons-halflings.png │ ├── logo.png │ ├── otter.jpg │ ├── sort_asc.png │ ├── sort_both.png │ └── sort_desc.png └── js │ ├── FixedHeader.min.js │ ├── benchmark_bar.js │ ├── bootstrap-select.js.map │ ├── bootstrap-select.min.js │ ├── bootstrap.min.js │ ├── common.js │ ├── jqplot │ ├── excanvas.min.js │ ├── jqplot.barRenderer.min.js │ ├── jqplot.canvasAxisLabelRenderer.min.js │ ├── jqplot.canvasAxisTickRenderer.min.js │ ├── jqplot.canvasTextRenderer.min.js │ ├── jqplot.categoryAxisRenderer.min.js │ ├── jqplot.cursor.min.js │ ├── jqplot.dateAxisRenderer.min.js │ ├── jqplot.highlighter.min.js │ ├── jqplot.logAxisRenderer.min.js │ ├── jqplot.pointLabels.min.js │ ├── jquery.jqplot.min.css │ └── jquery.jqplot.min.js │ ├── jquery-1.10.2.min.js │ ├── jquery-1.7.2.min.js │ ├── jquery-migrate-1.2.1.min.js │ ├── jquery.address-1.5.min.js │ ├── jquery.dataTables.min.js │ ├── jquery.jqpagination.min.js │ ├── result10.js │ └── timeline.js ├── tasks ├── __init__.py ├── async_tasks.py └── periodic_tasks.py ├── templates ├── 404.html ├── base.html ├── change_password.html ├── dbms_data.html ├── dbms_reference.html ├── edit_knobs.html ├── edit_project.html ├── edit_session.html ├── edit_workload.html ├── home_projects.html ├── login.html ├── pipeline_data.html ├── project_sessions.html ├── result.html ├── session.html ├── signup.html ├── task_status.html └── workload.html ├── templatetags ├── __init__.py └── util_functions.py ├── types.py ├── urls.py ├── utils.py ├── views.py └── wsgi.py /.dockerignore: -------------------------------------------------------------------------------- 1 | * 2 | !client 3 | client/controller/.gradle 4 | client/controller/build 5 | client/controller/output 6 | client/driver/log 7 | client/driver/results 8 | !docker 9 | docker/Dockerfile* 10 | docker/docker-compose*.yml 11 | docker/create-docker-compose.sh 12 | !server 13 | server/website/celerybeat-schedule* 14 | server/website/log 15 | server/website/*.pid 16 | server/website/debug_*.tar.gz 17 | server/website/session_knobs.json 18 | server/website/dump_website.json 19 | server/website/script 20 | server/website/website/settings/*credentials.py 21 | **/.git* 22 | **/*.swp 23 | **/*.swo 24 | **/*~ 25 | **/*.pyc 26 | **/*.log 27 | **/*.bak 28 | **/.DS_Store 29 | **/__pycache__ 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files # 2 | ######################################### 3 | __pycache__/ 4 | *.py[cod] 5 | *$py.class 6 | *.com 7 | *.class 8 | *.dll 9 | *.exe 10 | *.o 11 | *.so 12 | 13 | # OS generated files # 14 | ###################### 15 | .DS_Store 16 | .DS_Store? 17 | ._* 18 | .Spotlight-V100 19 | .Trashes 20 | ehthumbs.db 21 | Thumbs.db 22 | 23 | # Distribution / packaging # 24 | ############################ 25 | .Python 26 | env/ 27 | build/ 28 | develop-eggs/ 29 | dist/ 30 | downloads/ 31 | eggs/ 32 | .eggs/ 33 | lib/ 34 | lib64/ 35 | parts/ 36 | sdist/ 37 | var/ 38 | *.egg-info/ 39 | .installed.cfg 40 | *.egg 41 | 42 | # Packages # 43 | ############ 44 | # it's better to unpack these files and commit the raw source 45 | # git has its own built in compression methods 46 | *.7z 47 | *.dmg 48 | *.gz 49 | *.iso 50 | *.jar 51 | *.rar 52 | *.tar 53 | *.zip 54 | 55 | # PyInstaller # 56 | ############### 57 | *.manifest 58 | *.spec 59 | 60 | # Installer logs # 61 | ################## 62 | pip-log.txt 63 | pip-delete-this-directory.txt 64 | 65 | # Unit test / coverage reports # 66 | ################################ 67 | htmlcov/ 68 | .tox/ 69 | .coverage 70 | .coverage.* 71 | .cache 72 | nosetests.xml 73 | coverage.xml 74 | *,cover 75 | .hypothesis/ 76 | 77 | # Env # 78 | ####### 79 | .python-version 80 | .env 81 | venv/ 82 | ENV/ 83 | 84 | # Eclipse # 85 | ########### 86 | .project 87 | .pydevproject 88 | .settings 89 | .classpath 90 | 91 | # Intellij and PyCharm # 92 | .idea 93 | *.iml 94 | *.iws 95 | out/ 96 | 97 | # vim # 98 | *~ 99 | *.swp 100 | *.swo 101 | 102 | # Text editor configs # 103 | ####################### 104 | .vimrc 105 | -------------------------------------------------------------------------------- /.travis/before_deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | for tag in base web driver; do 6 | docker tag "ottertune-${tag}" "${DOCKER_REPO}:${tag}" 7 | done 8 | 9 | echo "$DOCKER_PASSWD" | docker login -u "$DOCKER_USER" --password-stdin 10 | 11 | -------------------------------------------------------------------------------- /.travis/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | for tag in base web driver; do 6 | docker push "${DOCKER_REPO}:${tag}" 7 | done 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OtterTune 2 | 3 | [![Build Status](https://travis-ci.org/cmu-db/ottertune.svg?branch=master)](https://travis-ci.org/cmu-db/ottertune) 4 | [![codecov.io](https://codecov.io/github/cmu-db/ottertune/coverage.svg?branch=master)](https://codecov.io/github/cmu-db/ottertune) 5 | 6 | OtterTune is a new tool developed by students and researchers in the [Carnegie Mellon Database Group](http://db.cs.cmu.edu/projects/autotune/) that can automatically find good settings for a database management system's configuration knobs. The goal is to make it easier for anyone to deploy a DBMS without any expertise in database administration. To tune new DBMS deployments, OtterTune reuses training data gathered from previous tuning sessions. Because OtterTune does not need to generate an initial dataset for training its ML models, tuning time is drastically reduced. 7 | 8 | For more information, see our [paper](http://db.cs.cmu.edu/papers/2017/p1009-van-aken.pdf). 9 | 10 | ``` 11 | @inproceedings{vanaken17, 12 | author = {Van Aken, Dana and Pavlo, Andrew and Gordon, Geoffrey J. and Zhang, Bohan}, 13 | title = {Automatic Database Management System Tuning Through Large-scale Machine Learning}, 14 | booktitle = {Proceedings of the 2017 ACM International Conference on Management of Data}, 15 | series = {SIGMOD '17}, 16 | year = {2017}, 17 | pages = {1009--1024}, 18 | numpages = {16}, 19 | } 20 | ``` 21 | 22 | ## Contributors 23 | 24 | See the [people page](https://github.com/cmu-db/ottertune/graphs/contributors) for the full list of contributors. 25 | -------------------------------------------------------------------------------- /client/controller/.gitignore: -------------------------------------------------------------------------------- 1 | # Mac OS X hidden file 2 | .DS_Store 3 | 4 | # workspace configuration files 5 | .settings/ 6 | .metadata/ 7 | 8 | # Gradle 9 | .gradle/ 10 | /gradlew.bat 11 | /gradlew 12 | /settings.gradle 13 | 14 | # Intellij 15 | .idea 16 | /dbcollector.iml 17 | 18 | # generated files 19 | bin/ 20 | build/ 21 | out/ 22 | output/ 23 | 24 | # lib 25 | lib/ 26 | 27 | # log file 28 | *.log 29 | 30 | # controller configuration files 31 | config/* 32 | !config/sample_*_config.json 33 | *.pid 34 | 35 | -------------------------------------------------------------------------------- /client/controller/README.md: -------------------------------------------------------------------------------- 1 | ## OtterTune Controller 2 | The controller is responsible for collecting database metrics and knobs information during an experiment.
3 | #### Usage: 4 | To build the project, run `gradle build`.
5 | To run the controller, you need to provide a configuration file and provide command line arguments (command line arguments are optional). Then run `gradle run`. 6 | 7 | * Command line arguments: 8 | * time (flag : `-t`)
9 | The duration of the experiment in `seconds`. The default time is set to 300 seconds. 10 | * configuration file path (flag : `-c`)
11 | The path of the input configuration file (required). Sample config files are under the directory `config`. 12 | 13 | -------------------------------------------------------------------------------- /client/controller/config/sample_mysql_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "database_type" : "mysql", 3 | "database_url" : "jdbc:mysql://localhost:3306/mysqldb", 4 | "username" : "MY_DATABASE_USERNAME", 5 | "password" : "MY_DATABASE_PASSWORD", 6 | "upload_code" : "DEPRECATED", 7 | "upload_url" : "DEPRECATED", 8 | "workload_name" : "workload_name" 9 | } -------------------------------------------------------------------------------- /client/controller/config/sample_oracle_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "database_type" : "oracle", 3 | "database_url" : "jdbc:oracle:thin:@localhost:1521:orcldb", 4 | "username" : "sys as sysdba", 5 | "password" : "oracle", 6 | "upload_code" : "DEPRECATED", 7 | "upload_url" : "DEPRECATED", 8 | "workload_name" : "tpcc" 9 | } 10 | -------------------------------------------------------------------------------- /client/controller/config/sample_postgres_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "database_type" : "postgres", 3 | "database_url" : "jdbc:postgresql://localhost:5432/postgres", 4 | "username" : "MY_DATABASE_USERNAME", 5 | "password" : "MY_DATABASE_PASSWORD", 6 | "upload_code" : "DEPRECATED", 7 | "upload_url" : "DEPRECATED", 8 | "workload_name" : "workload_name" 9 | } 10 | -------------------------------------------------------------------------------- /client/controller/config/sample_saphana_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "database_type" : "saphana", 3 | "database_url" : "jdbc:sap://localhost:39015", 4 | "username" : "MY_DATABASE_USERNAME", 5 | "password" : "MY_DATABASE_PASSWORD", 6 | "upload_code" : "DEPRECATED", 7 | "upload_url" : "DEPRECATED", 8 | "workload_name" : "workload_name" 9 | } -------------------------------------------------------------------------------- /client/controller/log4j.properties: -------------------------------------------------------------------------------- 1 | # Set root logger level to DEBUG and its only appender to A1. 2 | log4j.rootLogger=INFO, A1, FILE 3 | log4j.rootLogger.layout=org.apache.log4j.PatternLayout 4 | 5 | # A1 is set to be a ConsoleAppender. 6 | log4j.appender.A1=org.apache.log4j.ConsoleAppender 7 | log4j.appender.A1.layout=org.apache.log4j.PatternLayout 8 | log4j.appender.A1.layout.ConversionPattern=%d{ABSOLUTE} (%F:%L) %-5p - %m%n 9 | 10 | # Redirect log messages to a log file, support file rolling. 11 | # Define the file appender 12 | log4j.appender.FILE=org.apache.log4j.FileAppender 13 | 14 | # Set the name of the file 15 | log4j.appender.FILE.file=controller.log 16 | 17 | # Set the immediate flush to true (default) 18 | log4j.appender.FILE.immediateFlush=true 19 | 20 | # Set the threshold to debug mode 21 | log4j.appender.FILE.Threshold=debug 22 | 23 | # Set the append to false, overwrite 24 | log4j.appender.FILE.append=true 25 | 26 | # Define the layout for file appender 27 | log4j.appender.FILE.layout=org.apache.log4j.PatternLayout 28 | log4j.appender.A1.layout.ConversionPattern=%d{ABSOLUTE} (%F:%L) %-5p - %m%n -------------------------------------------------------------------------------- /client/controller/sample_output/mysql/summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1535653369274, 3 | "end_time": 1535653559607, 4 | "observation_time": 190, 5 | "database_type": "mysql", 6 | "database_version": "5.7.20", 7 | "workload_name": "workload_name" 8 | } -------------------------------------------------------------------------------- /client/controller/sample_output/oracle/summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1563264802685, 3 | "end_time": 1563265002918, 4 | "observation_time": 200, 5 | "database_type": "oracle", 6 | "database_version": "19.0.0.0.0", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/controller/sample_output/oracle/v12.1/summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1572978787444, 3 | "end_time": 1572978819060, 4 | "observation_time": 31, 5 | "database_type": "oracle", 6 | "database_version": "12.2.0.1.0", 7 | "workload_name": "tpcc", 8 | "real_database_version": "12.1.0.2.0" 9 | } -------------------------------------------------------------------------------- /client/controller/sample_output/oracle/v12.2/summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1572999607609, 3 | "end_time": 1572999638053, 4 | "observation_time": 30, 5 | "database_type": "oracle", 6 | "database_version": "19.0.0.0.0", 7 | "workload_name": "tpcc", 8 | "real_database_version": "12.2.0.1.0" 9 | } -------------------------------------------------------------------------------- /client/controller/sample_output/postgres/summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1535653369274, 3 | "end_time": 1535653559607, 4 | "observation_time": 190, 5 | "database_type": "postgres", 6 | "database_version": "9.3", 7 | "workload_name": "workload_name" 8 | } -------------------------------------------------------------------------------- /client/controller/sample_output/rds_postgres/summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1600373454483, 3 | "end_time": 1600373654484, 4 | "observation_time": 200, 5 | "database_type": "postgres", 6 | "database_version": "11", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/controller/sample_output/saphana/summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1535653369274, 3 | "end_time": 1535653559607, 4 | "observation_time": 190, 5 | "database_type": "saphana", 6 | "database_version": "2.00.023.00.1513691289", 7 | "workload_name": "workload_name" 8 | } -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/ControllerConfiguration.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - ControllerConfiguration.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller; 8 | 9 | /** Controller Configuration. */ 10 | public class ControllerConfiguration { 11 | private DatabaseType dbType; 12 | private String dbName; 13 | private String dbUsername; 14 | private String dbPassword; 15 | private String dbURL; 16 | private String uploadCode; 17 | private String uploadURL; 18 | private String workloadName; 19 | 20 | public ControllerConfiguration() {} 21 | 22 | public ControllerConfiguration( 23 | String dbName, 24 | String dbUsername, 25 | String dbPassword, 26 | String dbURL, 27 | String uploadCode, 28 | String uploadURL, 29 | String workloadName) { 30 | this.dbType = DatabaseType.get(dbName); 31 | this.dbName = dbName; 32 | this.dbUsername = dbUsername; 33 | this.dbPassword = dbPassword; 34 | this.dbURL = dbURL; 35 | this.uploadCode = uploadCode; 36 | this.uploadURL = uploadURL; 37 | this.workloadName = workloadName; 38 | } 39 | 40 | /* Mutators */ 41 | public void setDBType(DatabaseType dbType) { 42 | this.dbType = dbType; 43 | } 44 | 45 | public void setDBName(String dbName) { 46 | this.dbName = dbName; 47 | } 48 | 49 | public void setDBUsername(String dbUsername) { 50 | this.dbUsername = dbUsername; 51 | } 52 | 53 | public void setPassword(String dbPassword) { 54 | this.dbPassword = dbPassword; 55 | } 56 | 57 | public void setDBURL(String dbURL) { 58 | this.dbURL = dbURL; 59 | } 60 | 61 | public void setUploadCode(String uploadCode) { 62 | this.uploadCode = uploadCode; 63 | } 64 | 65 | public void setUploadURL(String uploadURL) { 66 | this.uploadURL = uploadURL; 67 | } 68 | 69 | public void setWorkloadName(String workloadName) { 70 | this.workloadName = workloadName; 71 | } 72 | 73 | /* Getters */ 74 | public DatabaseType getDBType() { 75 | return this.dbType; 76 | } 77 | 78 | public String getDBName() { 79 | return this.dbName; 80 | } 81 | 82 | public String getDBUsername() { 83 | return this.dbUsername; 84 | } 85 | 86 | public String getDBPassword() { 87 | return this.dbPassword; 88 | } 89 | 90 | public String getDBURL() { 91 | return this.dbURL; 92 | } 93 | 94 | public String getUploadCode() { 95 | return this.uploadCode; 96 | } 97 | 98 | public String getUploadURL() { 99 | return this.uploadURL; 100 | } 101 | 102 | public String getWorkloadName() { 103 | return this.workloadName; 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/ResultUploader.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - ResultUploader.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller; 8 | 9 | import java.io.File; 10 | import java.io.IOException; 11 | import java.util.ArrayList; 12 | import java.util.List; 13 | import java.util.Map; 14 | import org.apache.http.HttpEntity; 15 | import org.apache.http.client.methods.CloseableHttpResponse; 16 | import org.apache.http.client.methods.HttpPost; 17 | import org.apache.http.entity.mime.MultipartEntityBuilder; 18 | import org.apache.http.entity.mime.content.FileBody; 19 | import org.apache.http.impl.client.CloseableHttpClient; 20 | import org.apache.http.impl.client.HttpClients; 21 | import org.apache.http.util.EntityUtils; 22 | 23 | /** 24 | * Uploading the result. 25 | * 26 | * @author Shuli 27 | */ 28 | public class ResultUploader { 29 | public static void upload(String uploadURL, String uploadCode, 30 | Map files) throws IOException { 31 | 32 | try { 33 | List filesToSendNames = new ArrayList<>(); 34 | List filesToSend = new ArrayList<>(); 35 | for (String fileName : files.keySet()) { 36 | String path = files.get(fileName); 37 | filesToSendNames.add(fileName); 38 | File f = new File(path); 39 | filesToSend.add(f); 40 | } 41 | CloseableHttpClient httpclient = HttpClients.createDefault(); 42 | HttpPost httppost = new HttpPost(uploadURL); 43 | MultipartEntityBuilder mb = 44 | MultipartEntityBuilder.create().addTextBody("upload_code", uploadCode); 45 | for (int i = 0; i < filesToSendNames.size(); i++) { 46 | mb.addPart(filesToSendNames.get(i), new FileBody(filesToSend.get(i))); 47 | } 48 | 49 | HttpEntity reqEntity = mb.build(); 50 | httppost.setEntity(reqEntity); 51 | CloseableHttpResponse response = httpclient.execute(httppost); 52 | try { 53 | HttpEntity resEntity = response.getEntity(); 54 | EntityUtils.consume(resEntity); 55 | } finally { 56 | response.close(); 57 | } 58 | } catch (IOException e) { 59 | throw new IOException(); 60 | } 61 | } 62 | } -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/collectors/DBCollector.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - DBCollector.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller.collectors; 8 | 9 | import com.controller.util.JSONUtil; 10 | import java.util.Map; 11 | import java.util.TreeMap; 12 | import org.apache.log4j.Logger; 13 | 14 | public class DBCollector implements DBParameterCollector { 15 | 16 | private static final Logger LOG = Logger.getLogger(DBCollector.class); 17 | 18 | protected static final String JSON_GLOBAL_KEY = "global"; 19 | protected static final String JSON_LOCAL_KEY = "local"; 20 | 21 | protected final Map dbParameters = new TreeMap(); 22 | 23 | protected final Map dbMetrics = new TreeMap(); 24 | 25 | protected final StringBuilder version = new StringBuilder(); 26 | 27 | @Override 28 | public boolean hasParameters() { 29 | return (dbParameters.isEmpty() == false); 30 | } 31 | 32 | @Override 33 | public boolean hasMetrics() { 34 | return (dbMetrics.isEmpty() == false); 35 | } 36 | 37 | @Override 38 | public String collectParameters() { 39 | return JSONUtil.format(JSONUtil.toJSONString(dbParameters)); 40 | } 41 | 42 | @Override 43 | public String collectMetrics() { 44 | return JSONUtil.format(JSONUtil.toJSONString(dbMetrics)); 45 | } 46 | 47 | @Override 48 | public String collectVersion() { 49 | return version.toString(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/collectors/DBParameterCollector.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - DBParameterCollector.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller.collectors; 8 | 9 | public interface DBParameterCollector { 10 | boolean hasParameters(); 11 | 12 | boolean hasMetrics(); 13 | 14 | String collectParameters(); 15 | 16 | String collectMetrics(); 17 | 18 | String collectVersion(); 19 | } 20 | -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/json_validation_schema/config_schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-04/schema#", 3 | "title": "summary of collector output", 4 | "description": "config files: user input", 5 | "type": "object", 6 | "properties": { 7 | "database_type": { 8 | "type" : "string" 9 | }, 10 | "database_url": { 11 | "type" : "string" 12 | }, 13 | "username" :{ 14 | "type" : "string" 15 | }, 16 | "password": { 17 | "type" : "string" 18 | }, 19 | "upload_code": { 20 | "type" : "string" 21 | }, 22 | "upload_url": { 23 | "type" : "string" 24 | }, 25 | "workload_name": { 26 | "type" : "string" 27 | } 28 | }, 29 | "required": [ 30 | "database_type", 31 | "database_url", 32 | "username", 33 | "password", 34 | "upload_code", 35 | "upload_url", 36 | "workload_name" 37 | ] 38 | } 39 | -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/json_validation_schema/schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-04/schema#", 3 | "title": "collector output", 4 | "description": "Collected metrics or knobs info", 5 | "type": "object", 6 | "properties": { 7 | "global" : { 8 | "type": ["object", "null"], 9 | "patternProperties" : { 10 | "^[A-Za-z0-9 -_]" : { 11 | "type" : "object", 12 | "patternProperties" : { 13 | "^[A-Za-z0-9 -_]" : { 14 | "type" : "string" 15 | } 16 | }, 17 | "additionalProperties": false 18 | } 19 | }, 20 | "additionalProperties": false 21 | }, 22 | "local" : { 23 | "type": ["object", "null"], 24 | "patternProperties" : { 25 | "^[A-Za-z0-9 -_]" : { 26 | "type" : "object", 27 | "patternProperties" : { 28 | "^[A-Za-z0-9 -_]" : { 29 | "type" : "object", 30 | "patternProperties" : { 31 | "^[A-Za-z0-9 -_]" : { 32 | "type" : "object", 33 | "patternProperties" : { 34 | "^[A-Za-z0-9 -_]" : { 35 | "type" : "string" 36 | } 37 | } 38 | } 39 | } 40 | } 41 | } 42 | } 43 | } 44 | } 45 | }, 46 | "required": ["global", "local"] 47 | } -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/json_validation_schema/summary_schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-04/schema#", 3 | "title": "summary of collector output", 4 | "description": "output summary", 5 | "type": "object", 6 | "properties": { 7 | "database_type": { 8 | "type" : "string" 9 | }, 10 | "start_time": { 11 | "type" : "number" 12 | }, 13 | "observation_time" :{ 14 | "type" : "number" 15 | }, 16 | "end_time": { 17 | "type" : "number" 18 | }, 19 | "database_version": { 20 | "type" : "string" 21 | } 22 | }, 23 | "required": [ 24 | "database_type", 25 | "start_time", 26 | "observation_time", 27 | "end_time", 28 | "database_version" 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/types/DatabaseType.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - DatabaseType.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller; 8 | 9 | import java.util.EnumSet; 10 | import java.util.HashMap; 11 | import java.util.Map; 12 | 13 | /** Database Type. */ 14 | public enum DatabaseType { 15 | 16 | /** Parameters: (1) JDBC Driver String */ 17 | MYSQL("com.mysql.jdbc.Driver"), 18 | MYROCKS("com.mysql.jdbc.Driver"), 19 | POSTGRES("org.postgresql.Driver"), 20 | SAPHANA("com.sap.db.jdbc.Driver"), 21 | ORACLE("oracle.jdbc.driver.OracleDriver"); 22 | 23 | private DatabaseType(String driver) { 24 | this.driver = driver; 25 | } 26 | 27 | /** 28 | * This is the suggested driver string to use in the configuration xml This corresponds to the 29 | * 'driver' attribute. 30 | */ 31 | private final String driver; 32 | 33 | // --------------------------------------------------------------- 34 | // ACCESSORS 35 | // ---------------------------------------------------------------- 36 | 37 | /** 38 | * Returns the suggested driver string to use for the given database type 39 | * 40 | * @return 41 | */ 42 | public String getSuggestedDriver() { 43 | return (this.driver); 44 | } 45 | 46 | // ---------------------------------------------------------------- 47 | // STATIC METHODS + MEMBERS 48 | // ---------------------------------------------------------------- 49 | 50 | protected static final Map idx_lookup = 51 | new HashMap(); 52 | protected static final Map name_lookup = 53 | new HashMap(); 54 | 55 | static { 56 | for (DatabaseType vt : EnumSet.allOf(DatabaseType.class)) { 57 | DatabaseType.idx_lookup.put(vt.ordinal(), vt); 58 | DatabaseType.name_lookup.put(vt.name().toUpperCase(), vt); 59 | } 60 | } 61 | 62 | public static DatabaseType get(String name) { 63 | DatabaseType ret = DatabaseType.name_lookup.get(name.toUpperCase()); 64 | return (ret); 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/types/JSONSchemaType.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - JSONSchemaType.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller.types; 8 | 9 | import com.controller.util.FileUtil; 10 | import com.controller.util.ValidationUtils; 11 | import com.fasterxml.jackson.databind.JsonNode; 12 | import com.github.fge.jsonschema.core.exceptions.ProcessingException; 13 | import com.github.fge.jsonschema.main.JsonSchema; 14 | import java.io.File; 15 | import java.io.IOException; 16 | 17 | public enum JSONSchemaType { 18 | 19 | /** Parameters: (1) schema filename */ 20 | OUTPUT("schema.json"), 21 | CONFIG("config_schema.json"), 22 | SUMMARY("summary_schema.json"); 23 | 24 | // Path to JSON schema directory 25 | private static final String SCHEMA_PATH = "src/main/java/com/controller/json_validation_schema"; 26 | 27 | private final JsonSchema schema; 28 | 29 | private JSONSchemaType(String fileName) { 30 | JsonSchema newSchema = null; 31 | String configPath = FileUtil.joinPath(SCHEMA_PATH, fileName); 32 | try { 33 | newSchema = ValidationUtils.getSchemaNode(new File(configPath)); 34 | } catch (IOException | ProcessingException e) { 35 | e.printStackTrace(); 36 | } 37 | this.schema = newSchema; 38 | } 39 | 40 | public JsonSchema getSchema() { 41 | return this.schema; 42 | } 43 | 44 | public static boolean isValidJson(JSONSchemaType schemaType, String jsonString) { 45 | try { 46 | JsonNode jsonNode = ValidationUtils.getJsonNode(jsonString); 47 | return ValidationUtils.isJsonValid(schemaType.getSchema(), jsonNode); 48 | } catch (IOException | ProcessingException e) { 49 | e.printStackTrace(); 50 | } 51 | return false; 52 | } 53 | 54 | public static boolean isValidJson(JSONSchemaType schemaType, File jsonFile) { 55 | try { 56 | JsonNode jsonNode = ValidationUtils.getJsonNode(jsonFile); 57 | return ValidationUtils.isJsonValid(schemaType.getSchema(), jsonNode); 58 | } catch (IOException | ProcessingException e) { 59 | e.printStackTrace(); 60 | } 61 | return false; 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/util/JSONSerializable.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - JSONSerializable.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller.util; 8 | 9 | import com.controller.util.json.JSONException; 10 | import com.controller.util.json.JSONObject; 11 | import com.controller.util.json.JSONString; 12 | import com.controller.util.json.JSONStringer; 13 | import java.io.IOException; 14 | 15 | public interface JSONSerializable extends JSONString { 16 | public void save(String outputPath) throws IOException; 17 | 18 | public void load(String inputPath) throws IOException; 19 | 20 | public void toJSON(JSONStringer stringer) throws JSONException; 21 | 22 | public void fromJSON(JSONObject jsonObject) throws JSONException; 23 | } 24 | -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/util/json/JSONException.java: -------------------------------------------------------------------------------- 1 | package com.controller.util.json; 2 | 3 | /** 4 | * The JSONException is thrown by the JSON.org classes then things are amiss. 5 | * 6 | * @author JSON.org 7 | * @version 2008-09-18 8 | */ 9 | public class JSONException extends Exception { 10 | private static final long serialVersionUID = 1L; 11 | private Throwable cause; 12 | 13 | /** 14 | * Constructs a JSONException with an explanatory message. 15 | * 16 | * @param message Detail about the reason for the exception. 17 | */ 18 | public JSONException(String message) { 19 | super(message); 20 | } 21 | 22 | public JSONException(Throwable t) { 23 | super(t.getMessage()); 24 | this.cause = t; 25 | } 26 | 27 | public Throwable getCause() { 28 | return this.cause; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /client/controller/src/main/java/com/controller/util/json/JSONString.java: -------------------------------------------------------------------------------- 1 | package com.controller.util.json; 2 | /** 3 | * The JSONString interface allows a toJSONString() method so that a class 4 | * can change the behavior of JSONObject.toString(), JSONArray.toString(), 5 | * and JSONWriter.value(Object). The toJSONString method will 6 | * be used instead of the default behavior of using the Object's toString() method and 7 | * quoting the result. 8 | */ 9 | public interface JSONString { 10 | /** 11 | * The toJSONString method allows a class to produce its own JSON serialization. 12 | * 13 | * @return A strictly syntactically correct JSON text. 14 | */ 15 | public String toJSONString(); 16 | } 17 | -------------------------------------------------------------------------------- /client/controller/src/test/java/com/controller/collectors/AbstractJSONValidationTestCase.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - AbstractJSONValidationTestCase.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller.collectors; 8 | 9 | import com.controller.types.JSONSchemaType; 10 | import com.controller.util.FileUtil; 11 | import java.io.File; 12 | import junit.framework.TestCase; 13 | 14 | public abstract class AbstractJSONValidationTestCase extends TestCase { 15 | 16 | private static final String SAMPLE_OUTPUT_PATH = "sample_output"; 17 | private static final String SAMPLE_CONFIG_PATH = "config"; 18 | 19 | protected String dbName; 20 | 21 | protected void setUp(String dbName) throws Exception { 22 | super.setUp(); 23 | this.dbName = dbName; 24 | } 25 | 26 | public void testJsonKnobs() { 27 | String jsonKnobsPath = FileUtil.joinPath(SAMPLE_OUTPUT_PATH, this.dbName, "knobs.json"); 28 | assertTrue(JSONSchemaType.isValidJson(JSONSchemaType.OUTPUT, new File(jsonKnobsPath))); 29 | } 30 | 31 | public void testJsonMetrics() { 32 | String jsonMetricsBeforePath = 33 | FileUtil.joinPath(SAMPLE_OUTPUT_PATH, this.dbName, "metrics_before.json"); 34 | String jsonMetricsAfterPath = 35 | FileUtil.joinPath(SAMPLE_OUTPUT_PATH, this.dbName, "metrics_after.json"); 36 | assertTrue(JSONSchemaType.isValidJson(JSONSchemaType.OUTPUT, new File(jsonMetricsBeforePath))); 37 | assertTrue(JSONSchemaType.isValidJson(JSONSchemaType.OUTPUT, new File(jsonMetricsAfterPath))); 38 | } 39 | 40 | public void testJsonSummary() { 41 | String jsonSummaryPath = FileUtil.joinPath(SAMPLE_OUTPUT_PATH, this.dbName, "summary.json"); 42 | assertTrue(JSONSchemaType.isValidJson(JSONSchemaType.SUMMARY, new File(jsonSummaryPath))); 43 | } 44 | 45 | public void testJsonConfig() { 46 | String jsonConfigPath = 47 | FileUtil.joinPath(SAMPLE_CONFIG_PATH, "sample_" + this.dbName + "_config.json"); 48 | assertTrue(JSONSchemaType.isValidJson(JSONSchemaType.CONFIG, new File(jsonConfigPath))); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /client/controller/src/test/java/com/controller/collectors/TestInvalidJSON.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - TestInvalidJSON.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller.collectors; 8 | 9 | import com.controller.types.JSONSchemaType; 10 | import junit.framework.TestCase; 11 | 12 | public class TestInvalidJSON extends TestCase { 13 | 14 | // Wrong number of levels for "global" 15 | private static final String BAD_JSON_TEXT_1 = 16 | "{" 17 | + " \"global\" : {" 18 | + " \"global\" : {" 19 | + " \"auto_generate_certs\": {" 20 | + " \"auto_pram\" : \"NO\"" 21 | + " }" 22 | + " }" 23 | + " }," 24 | + " \"local\" : {" 25 | + " }" 26 | + "}"; 27 | 28 | // Lacking "local" 29 | private static final String BAD_JSON_TEXT_2 = 30 | "{" 31 | + " \"global\" : {" 32 | + " \"global1\" : {" 33 | + " \"auto_generate_certs\": \"ON\"" 34 | + " }" 35 | + " }" 36 | + "}"; 37 | 38 | public void testBadJSONOutput() { 39 | assertFalse(JSONSchemaType.isValidJson(JSONSchemaType.OUTPUT, BAD_JSON_TEXT_1)); 40 | assertFalse(JSONSchemaType.isValidJson(JSONSchemaType.OUTPUT, BAD_JSON_TEXT_2)); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /client/controller/src/test/java/com/controller/collectors/TestMySQLJSON.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - TestMySQLJSON.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller.collectors; 8 | 9 | public class TestMySQLJSON extends AbstractJSONValidationTestCase { 10 | 11 | @Override 12 | protected void setUp() throws Exception { 13 | super.setUp("mysql"); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /client/controller/src/test/java/com/controller/collectors/TestOracleJSON.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - TestOracleJSON.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller.collectors; 8 | 9 | public class TestOracleJSON extends AbstractJSONValidationTestCase { 10 | 11 | @Override 12 | protected void setUp() throws Exception { 13 | super.setUp("oracle"); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /client/controller/src/test/java/com/controller/collectors/TestPostgresJSON.java: -------------------------------------------------------------------------------- 1 | /* 2 | * OtterTune - TestPostgresJSON.java 3 | * 4 | * Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | */ 6 | 7 | package com.controller.collectors; 8 | 9 | public class TestPostgresJSON extends AbstractJSONValidationTestCase { 10 | 11 | @Override 12 | protected void setUp() throws Exception { 13 | super.setUp("postgres"); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /client/driver/.gitignore: -------------------------------------------------------------------------------- 1 | dumpfiles 2 | log 3 | results 4 | next_config 5 | -------------------------------------------------------------------------------- /client/driver/integrationTests/data/0__summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1569909672722, 3 | "end_time": 1569909976978, 4 | "observation_time": 304, 5 | "database_type": "postgres", 6 | "database_version": "9.6", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/driver/integrationTests/data/1__summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1569910014242, 3 | "end_time": 1569910319313, 4 | "observation_time": 305, 5 | "database_type": "postgres", 6 | "database_version": "9.6", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/driver/integrationTests/data/2__summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1569910354445, 3 | "end_time": 1569910660810, 4 | "observation_time": 306, 5 | "database_type": "postgres", 6 | "database_version": "9.6", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/driver/integrationTests/data/3__summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1569910713049, 3 | "end_time": 1569911018388, 4 | "observation_time": 305, 5 | "database_type": "postgres", 6 | "database_version": "9.6", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/driver/integrationTests/data/4__summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1569911053776, 3 | "end_time": 1569911357499, 4 | "observation_time": 303, 5 | "database_type": "postgres", 6 | "database_version": "9.6", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/driver/integrationTests/data/5__summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1569911394879, 3 | "end_time": 1569911702048, 4 | "observation_time": 307, 5 | "database_type": "postgres", 6 | "database_version": "9.6", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/driver/integrationTests/data/6__summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1569911740145, 3 | "end_time": 1569912047127, 4 | "observation_time": 306, 5 | "database_type": "postgres", 6 | "database_version": "9.6", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/driver/integrationTests/data/7__summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1569912094747, 3 | "end_time": 1569912402932, 4 | "observation_time": 308, 5 | "database_type": "postgres", 6 | "database_version": "9.6", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/driver/integrationTests/data/8__summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1569912454748, 3 | "end_time": 1569912762783, 4 | "observation_time": 308, 5 | "database_type": "postgres", 6 | "database_version": "9.6", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/driver/integrationTests/data/9__summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "start_time": 1569912808025, 3 | "end_time": 1569913111333, 4 | "observation_time": 303, 5 | "database_type": "postgres", 6 | "database_version": "9.6", 7 | "workload_name": "tpcc" 8 | } 9 | -------------------------------------------------------------------------------- /client/driver/integrationTests/data/x__summary.json: -------------------------------------------------------------------------------- 1 | {"start_time": 620000000, "end_time": 640000000, "observation_time": 100, "database_type": "postgres", "database_version": "9.6", "workload_name": "tpcc"} -------------------------------------------------------------------------------- /client/driver/oracleScripts/archiveLog.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Change log mode 4 | sqlplus / as sysdba < /app/.git_commit || :) \ 13 | && chmod +x install.sh \ 14 | && ./install.sh base 15 | 16 | ENV DEBUG=$DEBUG 17 | -------------------------------------------------------------------------------- /docker/Dockerfile.driver: -------------------------------------------------------------------------------- 1 | FROM ottertune-base 2 | 3 | ARG GRADLE_VERSION=gradle-5.5.1 4 | 5 | ENV GRADLE_HOME=/opt/${GRADLE_VERSION} 6 | ENV PATH=${GRADLE_HOME}/bin:${PATH} 7 | 8 | RUN /install.sh driver 9 | 10 | COPY ./client /app 11 | 12 | WORKDIR /app/driver 13 | 14 | -------------------------------------------------------------------------------- /docker/Dockerfile.web: -------------------------------------------------------------------------------- 1 | FROM ottertune-base 2 | 3 | COPY ./server /app 4 | 5 | WORKDIR /app/website 6 | 7 | COPY ./docker/credentials.py ./website/settings 8 | COPY ./docker/start.sh ./docker/start-test.sh ./docker/wait-for-it.sh ./ 9 | 10 | RUN /install.sh web \ 11 | && chmod +x ./*.sh 12 | 13 | ENV DJANGO_SETTINGS_MODULE=website.settings 14 | ENV C_FORCE_ROOT=true 15 | 16 | ENTRYPOINT ["./start.sh"] 17 | 18 | -------------------------------------------------------------------------------- /docker/credentials.py: -------------------------------------------------------------------------------- 1 | import json 2 | import random 3 | import string 4 | from datetime import timedelta 5 | from os import environ as env 6 | 7 | debug = env.get('DEBUG', 'true').lower() == 'true' 8 | rabbitmq_host = env.get('RABBITMQ_HOST', 'localhost') 9 | backend = env.get('BACKEND', 'postgresql') 10 | db_name = env.get('DB_NAME', 'ottertune') 11 | db_host = env.get('DB_HOST', 'localhost') 12 | db_pwd = env.get('DB_PASSWORD', '') 13 | bg_run_every = env.get('BG_TASKS_RUN_EVERY', None) # minutes 14 | 15 | if backend == 'mysql': 16 | default_user = 'root' 17 | default_port = '3306' 18 | default_opts = { 19 | 'init_command': "SET sql_mode='STRICT_TRANS_TABLES',innodb_strict_mode=1", 20 | } 21 | else: 22 | default_user = 'postgres' 23 | default_port = '5432' 24 | default_opts = {} 25 | 26 | db_user = env.get('DB_USER', default_user) 27 | db_port = env.get('DB_PORT', default_port) 28 | db_opts = env.get('DB_OPTS', default_opts) 29 | if isinstance(db_opts, str): 30 | db_opts = json.loads(db_opts) if db_opts else {} 31 | 32 | SECRET_KEY = ''.join(random.choice(string.hexdigits) for _ in range(16)) 33 | DATABASES = { 34 | 'default': {'ENGINE': 'django.db.backends.' + backend, 35 | 'NAME': db_name, 36 | 'USER': db_user, 37 | 'PASSWORD': db_pwd, 38 | 'HOST': db_host, 39 | 'PORT': db_port, 40 | 'OPTIONS': db_opts, 41 | } 42 | } 43 | DEBUG = debug 44 | ADMINS = () 45 | MANAGERS = ADMINS 46 | ALLOWED_HOSTS = ['*'] 47 | BROKER_URL = 'amqp://guest:guest@{}:5672//'.format(rabbitmq_host) 48 | 49 | if bg_run_every is not None: 50 | # Defines the periodic task schedule for celerybeat 51 | CELERYBEAT_SCHEDULE = { 52 | 'run-every-{}m'.format(bg_run_every): { 53 | 'task': 'run_background_tasks', 54 | 'schedule': timedelta(minutes=int(bg_run_every)), 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /docker/docker-compose.build.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | 4 | base: 5 | build: 6 | context: ../ 7 | dockerfile: ./docker/Dockerfile.base 8 | args: 9 | DEBUG: "true" 10 | image: ottertune-base 11 | labels: 12 | NAME: "ottertune-base" 13 | 14 | web: 15 | build: 16 | context: ../ 17 | dockerfile: ./docker/Dockerfile.web 18 | image: ottertune-web 19 | depends_on: 20 | - base 21 | labels: 22 | NAME: "ottertune-web" 23 | 24 | driver: 25 | build: 26 | context: ../ 27 | dockerfile: ./docker/Dockerfile.driver 28 | image: ottertune-driver 29 | depends_on: 30 | - base 31 | labels: 32 | NAME: "ottertune-driver" 33 | -------------------------------------------------------------------------------- /docker/docker-compose.up.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | 4 | web: 5 | image: ottertune-web 6 | container_name: web 7 | expose: 8 | - "8000" 9 | ports: 10 | - "8000:8000" 11 | links: 12 | - backend 13 | - rabbitmq 14 | depends_on: 15 | - backend 16 | - rabbitmq 17 | environment: 18 | DEBUG: 'true' 19 | ADMIN_PASSWORD: 'changeme' 20 | BACKEND: 'postgresql' 21 | DB_NAME: 'ottertune' 22 | DB_USER: 'postgres' 23 | DB_PASSWORD: 'ottertune' 24 | DB_HOST: 'backend' 25 | DB_PORT: '5432' 26 | DB_OPTS: '{}' 27 | MAX_DB_CONN_ATTEMPTS: 30 28 | RABBITMQ_HOST: 'rabbitmq' 29 | working_dir: /app/website 30 | entrypoint: ./start.sh 31 | labels: 32 | NAME: "ottertune-web" 33 | networks: 34 | - ottertune-net 35 | 36 | driver: 37 | image: ottertune-driver 38 | container_name: driver 39 | depends_on: 40 | - web 41 | environment: 42 | DEBUG: 'true' 43 | working_dir: /app/driver 44 | labels: 45 | NAME: "ottertune-driver" 46 | networks: 47 | - ottertune-net 48 | 49 | rabbitmq: 50 | image: "rabbitmq:3-management" 51 | container_name: rabbitmq 52 | restart: always 53 | hostname: "rabbitmq" 54 | environment: 55 | RABBITMQ_DEFAULT_USER: "guest" 56 | RABBITMQ_DEFAULT_PASS: "guest" 57 | RABBITMQ_DEFAULT_VHOST: "/" 58 | expose: 59 | - "15672" 60 | - "5672" 61 | ports: 62 | - "15673:15672" 63 | - "5673:5672" 64 | labels: 65 | NAME: "rabbitmq" 66 | networks: 67 | - ottertune-net 68 | 69 | backend: 70 | container_name: backend 71 | restart: always 72 | image: postgres:9.6 73 | environment: 74 | POSTGRES_USER: 'postgres' 75 | POSTGRES_PASSWORD: 'ottertune' 76 | POSTGRES_DB: 'ottertune' 77 | expose: 78 | - "5432" 79 | ports: 80 | - "5432:5432" 81 | labels: 82 | NAME: "ottertune-backend" 83 | networks: 84 | - ottertune-net 85 | 86 | networks: 87 | ottertune-net: 88 | driver: bridge 89 | -------------------------------------------------------------------------------- /docker/start-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Wait for MySQL connection 4 | /bin/bash wait-for-it.sh 5 | 6 | ## Needs a connection to a DB so migrations go here 7 | python3 manage.py makemigrations website 8 | python3 manage.py migrate 9 | 10 | python3 manage.py stopcelery 11 | python3 manage.py startcelery 12 | python3 manage.py runserver 0.0.0.0:8000 & 13 | 14 | # Wait for server 15 | sleep 10 16 | 17 | # Integration tests 18 | cd /app/client/driver 19 | fab integration_tests 20 | -------------------------------------------------------------------------------- /docker/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | addrport="0.0.0.0:8000" 4 | 5 | # Wait for backend connection 6 | /bin/bash wait-for-it.sh 7 | 8 | ## Needs a connection to a DB so migrations go here 9 | python3 manage.py makemigrations website 10 | python3 manage.py migrate 11 | python3 manage.py createuser admin $ADMIN_PASSWORD --superuser 12 | python3 manage.py stopcelery 13 | python3 manage.py startcelery 14 | 15 | echo "" 16 | echo "-=------------------------------------------------------" 17 | echo " Starting the web server on '$addrport'..." 18 | echo "-=------------------------------------------------------" 19 | python3 manage.py runserver "$addrport" 20 | -------------------------------------------------------------------------------- /docker/wait-for-it.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | maxcounter=${MAX_DB_CONN_ATTEMPTS:-60} 4 | 5 | if [ "$maxcounter" -le 0 ]; then 6 | echo "Skipping wait-for-it.sh..." 7 | exit 0 8 | fi 9 | 10 | if [ -z "$BACKEND" ]; then 11 | echo "ERROR: variable 'BACKEND' must be set. Exiting." 12 | exit 1 13 | fi 14 | 15 | # wait until the database is really available 16 | echo "Trying to connect to $BACKEND (timeout=${maxcounter}s)" 17 | echo "" 18 | 19 | ready () { 20 | 21 | if [ "$BACKEND" = "mysql" ]; then 22 | mysql \ 23 | --host="$DB_HOST" \ 24 | --protocol TCP \ 25 | -u"$DB_USER" \ 26 | -p"$DB_PASSWORD" \ 27 | -e "show databases;" > /dev/null 2>&1 28 | else 29 | PGPASSWORD="$DB_PASSWORD" psql \ 30 | -h "$DB_HOST" \ 31 | -U "$DB_USER" \ 32 | -c "select * from pg_database" > /dev/null 2>&1 33 | fi 34 | return $? 35 | } 36 | 37 | 38 | counter=1 39 | while ! ready; do 40 | counter=`expr $counter + 1` 41 | 42 | if [ $counter -gt $maxcounter ]; then 43 | >&2 echo "ERROR: Could not connect to $BACKEND after $MAX_DB_CONN_ATTEMPTS seconds; Exiting." 44 | exit 1 45 | fi; 46 | sleep 1 47 | done 48 | 49 | echo "-=------------------------------------------------------" 50 | echo "-=------------------------------------------------------" 51 | echo "Connected to $BACKEND!" 52 | echo "-=------------------------------------------------------" 53 | echo "-=------------------------------------------------------" 54 | -------------------------------------------------------------------------------- /script/formatting/config/pycodestyle: -------------------------------------------------------------------------------- 1 | [pycodestyle] 2 | ignore = E501 3 | -------------------------------------------------------------------------------- /script/git-hooks/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Source validation pre-commit hook 3 | # 4 | # Adapted from the source validation pre-commit hook used in Peloton. 5 | # (see https://github.com/cmu-db/peloton/blob/master/script/git-hooks/pre-commit) 6 | # 7 | # This script collects all modified files and runs it through our source code 8 | # validation script. The validation script returns 0 on success and 1 on any 9 | # failure. This script can also run the server and controller tests by 10 | # uncommenting lines 26-28 and 31-33, respectively. 11 | # 12 | # To enable, symlink this file to '.git/hooks/pre-commit' like so: 13 | # cd $OTTERTUNE_DIR/.git/hooks 14 | # ln -s ../../script/git-hooks/pre-commit ./pre-commit 15 | 16 | 17 | FILES=$(git diff --name-only --cached --diff-filter=d | grep -E '.*\.(py|java)$') 18 | 19 | WEBSITE_TESTS_RESULT=0 20 | ANALYSIS_TESTS_RESULT=0 21 | CONTROLLER_TESTS_RESULT=0 22 | VALIDATOR_RESULT=0 23 | 24 | if [ -n "$FILES" ]; then 25 | 26 | # Uncomment to run the website tests 27 | # cd server/website && python3 manage.py test --noinput -v 2 28 | # WEBSITE_TESTS_RESULT=$? 29 | # cd ../.. 30 | 31 | # Uncomment to run the analysis tests 32 | # cd server && python3 -m unittest discover -s analysis/tests -v 33 | # ANALYSIS_TESTS_RESULT=$? 34 | # cd .. 35 | 36 | # Uncomment to run the controller tests 37 | # cd controller && gradle build -q 38 | # CONTROLLER_TESTS_RESULT=$? 39 | # cd .. 40 | 41 | # Run source code validator 42 | python3 script/validators/source_validator.py $FILES 43 | VALIDATOR_RESULT=$? 44 | 45 | if [ "$VALIDATOR_RESULT" -ne 0 ] || [ "$WEBSITE_TESTS_RESULT" -ne 0 ] || [ "$ANALYSIS_TESTS_RESULT" -ne 0 ] || [ "$CONTROLLER_TESTS_RESULT" -ne 0 ]; then 46 | 47 | echo " +------------------------------------------------------------+" 48 | echo " | |" 49 | echo " | OTTERTUNE PRE-COMMIT HOOK |" 50 | echo " | |" 51 | echo " +------------------------------------------------------------+" 52 | echo "" 53 | 54 | if [ "$WEBSITE_TESTS_RESULT" -ne 0 ]; then 55 | echo " FAILED website tests!" 56 | echo "" 57 | fi 58 | 59 | if [ "$ANALYSIS_TESTS_RESULT" -ne 0 ]; then 60 | echo " FAILED analysis tests!" 61 | echo "" 62 | fi 63 | 64 | if [ "$CONTROLLER_TESTS_RESULT" -ne 0 ]; then 65 | echo " FAILED controller tests!" 66 | echo "" 67 | fi 68 | 69 | if [ "$VALIDATOR_RESULT" -ne 0 ]; then 70 | echo " FAILED source validation!" 71 | echo "" 72 | echo " Use the formatting script to help format all changed files:" 73 | echo " (ottertune/script/formatting/formatter.py)" 74 | echo "" 75 | echo " \"python formatter.py --staged-files\"" 76 | echo "" 77 | fi 78 | 79 | echo " To temporarily bypass the pre-commit hook, use:" 80 | echo "" 81 | echo " \"git commit --no-verify\"" 82 | echo 83 | echo " Be aware that changed files have to be staged again!" 84 | 85 | exit 1 86 | fi 87 | 88 | fi 89 | 90 | exit 0 91 | -------------------------------------------------------------------------------- /script/query_and_get.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - query_and_get.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | ''' 7 | Created on Feb 11, 2018 8 | 9 | @author: taodai 10 | ''' 11 | 12 | import sys 13 | import time 14 | import logging 15 | import json 16 | import urllib.request 17 | 18 | # Logging 19 | LOG = logging.getLogger(__name__) 20 | LOG.addHandler(logging.StreamHandler()) 21 | LOG.setLevel(logging.INFO) 22 | 23 | 24 | # take 3 arguments, save result to next_config in working directory 25 | # base_url: for instance, https://0.0.0.0:8000/ 26 | # upload_code: upload code... 27 | # query_interval: time (in second) between queries 28 | def main(): 29 | base_url = sys.argv[1].strip('/') 30 | upload_code = sys.argv[2] 31 | query_interval = int(sys.argv[3]) 32 | request = base_url + '/query_and_get/' + upload_code 33 | timer = 0 34 | start = time.time() 35 | while True: 36 | response = urllib.request.urlopen(request).read().decode() 37 | if 'Fail' in response: 38 | LOG.info('Tuning failed\n') 39 | break 40 | elif response == 'null' or 'not ready' in response: 41 | time.sleep(query_interval) 42 | timer += query_interval 43 | LOG.info('%s s\n', str(timer)) 44 | else: 45 | next_conf_f = open('next_config', 'w') 46 | next_conf_f.write(json.loads(response)) 47 | next_conf_f.close() 48 | break 49 | elapsed_time = time.time() - start 50 | LOG.info('Elapsed time: %s\n', str(elapsed_time)) 51 | 52 | 53 | if __name__ == "__main__": 54 | main() 55 | -------------------------------------------------------------------------------- /server/analysis/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | -------------------------------------------------------------------------------- /server/analysis/base.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - base.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | ''' 7 | Created on Oct 25, 2017 8 | 9 | @author: dva 10 | ''' 11 | 12 | from abc import ABCMeta, abstractmethod 13 | 14 | 15 | class ModelBase(object, metaclass=ABCMeta): 16 | 17 | @abstractmethod 18 | def _reset(self): 19 | pass 20 | -------------------------------------------------------------------------------- /server/analysis/ddpg/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | -------------------------------------------------------------------------------- /server/analysis/ddpg/ou_process.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - ou_process.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | # from: https://github.com/KqSMea8/CDBTune 7 | # Zhang, Ji, et al. "An end-to-end automatic cloud database tuning system using 8 | # deep reinforcement learning." Proceedings of the 2019 International Conference 9 | # on Management of Data. ACM, 2019 10 | 11 | import numpy as np 12 | 13 | 14 | class OUProcess(object): 15 | 16 | def __init__(self, n_actions, theta=0.15, mu=0, sigma=0.1, ): 17 | 18 | self.n_actions = n_actions 19 | self.theta = theta 20 | self.mu = mu 21 | self.sigma = sigma 22 | self.current_value = np.ones(self.n_actions) * self.mu 23 | 24 | def reset(self, sigma=0, theta=0): 25 | self.current_value = np.ones(self.n_actions) * self.mu 26 | if sigma != 0: 27 | self.sigma = sigma 28 | if theta != 0: 29 | self.theta = theta 30 | 31 | def noise(self): 32 | x = self.current_value 33 | dx = self.theta * (self.mu - x) + self.sigma * np.random.randn(len(x)) 34 | self.current_value = x + dx 35 | return self.current_value 36 | -------------------------------------------------------------------------------- /server/analysis/gpr/gprc.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - analysis/gprc.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | # Author: Dana Van Aken 7 | 8 | from __future__ import absolute_import 9 | 10 | import tensorflow as tf 11 | from gpflow import settings 12 | from gpflow.decors import autoflow, name_scope, params_as_tensors 13 | from gpflow.models import GPR 14 | 15 | 16 | class GPRC(GPR): 17 | 18 | def __init__(self, X, Y, kern, mean_function=None, **kwargs): 19 | super(GPRC, self).__init__(X, Y, kern, mean_function, **kwargs) 20 | self.cholesky = None 21 | self.alpha = None 22 | 23 | @autoflow() 24 | def _compute_cache(self): 25 | K = self.kern.K(self.X) + tf.eye(tf.shape(self.X)[0], dtype=settings.float_type) * self.likelihood.variance 26 | L = tf.cholesky(K, name='gp_cholesky') 27 | V = tf.matrix_triangular_solve(L, self.Y - self.mean_function(self.X), name='gp_alpha') 28 | return L, V 29 | 30 | def update_cache(self): 31 | self.cholesky, self.alpha = self._compute_cache() 32 | 33 | @name_scope('predict') 34 | @params_as_tensors 35 | def _build_predict(self, Xnew, full_cov=False): 36 | if self.cholesky is None: 37 | self.update_cache() 38 | Kx = self.kern.K(self.X, Xnew) 39 | A = tf.matrix_triangular_solve(self.cholesky, Kx, lower=True) 40 | fmean = tf.matmul(A, self.alpha, transpose_a=True) + self.mean_function(Xnew) 41 | if full_cov: 42 | fvar = self.kern.K(Xnew) - tf.matmul(A, A, transpose_a=True) 43 | shape = tf.stack([1, 1, tf.shape(self.Y)[1]]) 44 | fvar = tf.tile(tf.expand_dims(fvar, 2), shape) 45 | else: 46 | fvar = self.kern.Kdiag(Xnew) - tf.reduce_sum(tf.square(A), 0) 47 | fvar = tf.tile(tf.reshape(fvar, (-1, 1)), [1, tf.shape(self.Y)[1]]) 48 | return fmean, fvar, self.kern.variance, self.kern.lengthscales, self.likelihood.variance 49 | -------------------------------------------------------------------------------- /server/analysis/gpr/optimize.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - analysis/optimize.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | # Author: Dana Van Aken 7 | 8 | import numpy as np 9 | import tensorflow as tf 10 | from gpflow import settings 11 | from sklearn.utils import assert_all_finite, check_array 12 | from sklearn.utils.validation import FLOAT_DTYPES 13 | 14 | from analysis.util import get_analysis_logger 15 | 16 | LOG = get_analysis_logger(__name__) 17 | 18 | 19 | class GPRGDResult(): 20 | 21 | def __init__(self, ypreds=None, sigmas=None, minl=None, minl_conf=None): 22 | self.ypreds = ypreds 23 | self.sigmas = sigmas 24 | self.minl = minl 25 | self.minl_conf = minl_conf 26 | 27 | 28 | def tf_optimize(model, Xnew_arr, learning_rate=0.01, maxiter=100, ucb_beta=3., 29 | active_dims=None, bounds=None, debug=True): 30 | Xnew_arr = check_array(Xnew_arr, copy=False, warn_on_dtype=True, dtype=FLOAT_DTYPES) 31 | 32 | Xnew = tf.Variable(Xnew_arr, name='Xnew', dtype=settings.float_type) 33 | if bounds is None: 34 | lower_bound = tf.constant(-np.infty, dtype=settings.float_type) 35 | upper_bound = tf.constant(np.infty, dtype=settings.float_type) 36 | else: 37 | lower_bound = tf.constant(bounds[0], dtype=settings.float_type) 38 | upper_bound = tf.constant(bounds[1], dtype=settings.float_type) 39 | Xnew_bounded = tf.minimum(tf.maximum(Xnew, lower_bound), upper_bound) 40 | 41 | if active_dims: 42 | indices = [] 43 | updates = [] 44 | n_rows = Xnew_arr.shape[0] 45 | for c in active_dims: 46 | for r in range(n_rows): 47 | indices.append([r, c]) 48 | updates.append(Xnew_bounded[r, c]) 49 | part_X = tf.scatter_nd(indices, updates, Xnew_arr.shape) 50 | Xin = part_X + tf.stop_gradient(-part_X + Xnew_bounded) 51 | else: 52 | Xin = Xnew_bounded 53 | 54 | beta_t = tf.constant(ucb_beta, name='ucb_beta', dtype=settings.float_type) 55 | fmean, fvar, kvar, kls, lvar = model._build_predict(Xin) # pylint: disable=protected-access 56 | y_mean_var = model.likelihood.predict_mean_and_var(fmean, fvar) 57 | y_mean = y_mean_var[0] 58 | y_var = y_mean_var[1] 59 | y_std = tf.sqrt(y_var) 60 | loss = tf.subtract(y_mean, tf.multiply(beta_t, y_std), name='loss_fn') 61 | opt = tf.train.AdamOptimizer(learning_rate, epsilon=1e-6) 62 | train_op = opt.minimize(loss) 63 | variables = opt.variables() 64 | init_op = tf.variables_initializer([Xnew] + variables) 65 | session = model.enquire_session(session=None) 66 | with session.as_default(): 67 | session.run(init_op) 68 | for i in range(maxiter): 69 | session.run(train_op) 70 | Xnew_value = session.run(Xnew_bounded) 71 | y_mean_value = session.run(y_mean) 72 | y_std_value = session.run(y_std) 73 | loss_value = session.run(loss) 74 | assert_all_finite(Xnew_value) 75 | assert_all_finite(y_mean_value) 76 | assert_all_finite(y_std_value) 77 | assert_all_finite(loss_value) 78 | if debug: 79 | LOG.info("kernel variance: %f", session.run(kvar)) 80 | LOG.info("kernel lengthscale: %f", session.run(kls)) 81 | LOG.info("likelihood variance: %f", session.run(lvar)) 82 | return GPRGDResult(y_mean_value, y_std_value, loss_value, Xnew_value) 83 | -------------------------------------------------------------------------------- /server/analysis/gpr/predict.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - analysis/optimize.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | # Author: Dana Van Aken 7 | 8 | import tensorflow as tf 9 | from sklearn.utils import assert_all_finite, check_array 10 | from sklearn.utils.validation import FLOAT_DTYPES 11 | 12 | 13 | class GPRResult(): 14 | 15 | def __init__(self, ypreds=None, sigmas=None): 16 | self.ypreds = ypreds 17 | self.sigmas = sigmas 18 | 19 | 20 | def gpflow_predict(model, Xin): 21 | Xin = check_array(Xin, copy=False, warn_on_dtype=True, dtype=FLOAT_DTYPES) 22 | fmean, fvar, _, _, _ = model._build_predict(Xin) # pylint: disable=protected-access 23 | y_mean_var = model.likelihood.predict_mean_and_var(fmean, fvar) 24 | y_mean = y_mean_var[0] 25 | y_var = y_mean_var[1] 26 | y_std = tf.sqrt(y_var) 27 | 28 | session = model.enquire_session(session=None) 29 | with session.as_default(): 30 | y_mean_value = session.run(y_mean) 31 | y_std_value = session.run(y_std) 32 | assert_all_finite(y_mean_value) 33 | assert_all_finite(y_std_value) 34 | return GPRResult(y_mean_value, y_std_value) 35 | -------------------------------------------------------------------------------- /server/analysis/gpr/ucb.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def get_beta_t(t, **kwargs): 5 | assert t > 0. 6 | return 2. * np.log(t / np.sqrt(np.log(2. * t))) 7 | 8 | 9 | def get_beta_td(t, ndim, bound=1.0, **kwargs): 10 | assert t > 0. 11 | assert ndim > 0. 12 | assert bound > 0. 13 | bt = 2. * np.log(float(ndim) * t**2 * np.pi**2 / (6. * bound)) 14 | return np.sqrt(bt) if bt > 0. else 0. 15 | 16 | 17 | _UCB_MAP = { 18 | 'get_beta_t': get_beta_t, 19 | 'get_beta_td': get_beta_td, 20 | } 21 | 22 | 23 | def get_ucb_beta(ucb_beta, scale=1., **kwargs): 24 | check_valid(ucb_beta) 25 | if not isinstance(ucb_beta, float): 26 | ucb_beta = _UCB_MAP[ucb_beta](**kwargs) 27 | assert isinstance(ucb_beta, float), type(ucb_beta) 28 | ucb_beta *= scale 29 | assert ucb_beta >= 0.0 30 | return ucb_beta 31 | 32 | 33 | def check_valid(ucb_beta): 34 | if isinstance(ucb_beta, float): 35 | if ucb_beta < 0.0: 36 | raise ValueError(("Invalid value for 'ucb_beta': {} " 37 | "(expected >= 0.0)").format(ucb_beta)) 38 | else: 39 | if ucb_beta not in _UCB_MAP: 40 | raise ValueError(("Invalid value for 'ucb_beta': {} " 41 | "(expected 'get_beta_t' or 'get_beta_td')").format(ucb_beta)) 42 | -------------------------------------------------------------------------------- /server/analysis/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | -------------------------------------------------------------------------------- /server/analysis/tests/test_ddpg.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - test_ddpg.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | 7 | import random 8 | import unittest 9 | import numpy as np 10 | import torch 11 | from analysis.ddpg.ddpg import DDPG 12 | 13 | 14 | # test ddpg model: 15 | # The enviroment has 1-dim state and 1-dim action, the reward is calculated as follows: 16 | # if state < 0.5, taking action < 0.5 gets reward 1, taking action >= 0.5 gets reward 0 17 | # if state >= 0.5, taking action >= 0.5 gets reward 1, taking action < 0.5 gets reward 0 18 | # Train 500 iterations and test for 500 iterations 19 | # If the average reward during test is larger than 0.9, this test passes 20 | class TestDDPG(unittest.TestCase): 21 | 22 | @classmethod 23 | def setUpClass(cls): 24 | random.seed(0) 25 | np.random.seed(0) 26 | torch.manual_seed(0) 27 | super(TestDDPG, cls).setUpClass() 28 | cls.ddpg = DDPG(n_actions=1, n_states=1, gamma=0, alr=0.02) 29 | knob_data = np.zeros(1) 30 | metric_data = np.array([random.random()]) 31 | for _ in range(100): 32 | prev_metric_data = metric_data 33 | metric_data = np.array([random.random()]) 34 | reward = 1.0 if (prev_metric_data[0] - 0.5) * (knob_data[0] - 0.5) > 0 else 0.0 35 | reward = np.array([reward]) 36 | cls.ddpg.add_sample(prev_metric_data, knob_data, reward, metric_data) 37 | for _ in range(10): 38 | cls.ddpg.update() 39 | knob_data = cls.ddpg.choose_action(metric_data) 40 | 41 | def test_ddpg_ypreds(self): 42 | total_reward = 0.0 43 | for _ in range(500): 44 | prev_metric_data = np.array([random.random()]) 45 | knob_data = self.ddpg.choose_action(prev_metric_data) 46 | reward = 1.0 if (prev_metric_data[0] - 0.5) * (knob_data[0] - 0.5) > 0 else 0.0 47 | total_reward += reward 48 | self.assertGreater(total_reward / 500, 0.5) 49 | 50 | 51 | if __name__ == '__main__': 52 | unittest.main() 53 | -------------------------------------------------------------------------------- /server/analysis/tests/test_nn.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - test_nn.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | import random 7 | import unittest 8 | import numpy as np 9 | from tensorflow import set_random_seed 10 | from sklearn import datasets 11 | from analysis.nn_tf import NeuralNet 12 | 13 | 14 | # test neural network 15 | class TestNN(unittest.TestCase): 16 | 17 | @classmethod 18 | def setUpClass(cls): 19 | super(TestNN, cls).setUpClass() 20 | boston = datasets.load_boston() 21 | data = boston['data'] 22 | X_train = data[0:500] 23 | X_test = data[500:] 24 | y_train = boston['target'][0:500].reshape(500, 1) 25 | random.seed(0) 26 | np.random.seed(0) 27 | set_random_seed(0) 28 | cls.model = NeuralNet(n_input=X_test.shape[1], 29 | batch_size=X_test.shape[0], 30 | reset_seed=True) 31 | cls.model.fit(X_train, y_train) 32 | cls.nn_result = cls.model.predict(X_test) 33 | cls.nn_recommend = cls.model.recommend(X_test) 34 | 35 | def test_nn_ypreds(self): 36 | ypreds_round = ['%.3f' % x[0] for x in self.nn_result] 37 | expected_ypreds = ['21.279', '22.668', '23.115', '27.228', '25.892', '23.967'] 38 | self.assertEqual(ypreds_round, expected_ypreds) 39 | 40 | def test_nn_yrecommend(self): 41 | recommends_round = ['%.3f' % x[0] for x in self.nn_recommend.minl] 42 | expected_recommends = ['21.279', '21.279', '21.279', '21.279', '21.279', '21.279'] 43 | self.assertEqual(recommends_round, expected_recommends) 44 | -------------------------------------------------------------------------------- /server/analysis/tests/test_preprocessing.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - test_preprocessing.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | import unittest 7 | import numpy as np 8 | 9 | from analysis.preprocessing import DummyEncoder, consolidate_columnlabels 10 | 11 | 12 | class TestDummyEncoder(unittest.TestCase): 13 | 14 | def test_no_categoricals(self): 15 | X = [[1, 2, 3], [4, 5, 6]] 16 | n_values = [] 17 | categorical_features = [] 18 | cat_columnlabels = [] 19 | noncat_columnlabels = ['a', 'b', 'c'] 20 | 21 | enc = DummyEncoder(n_values, categorical_features, 22 | cat_columnlabels, noncat_columnlabels) 23 | X_encoded = enc.fit_transform(X) 24 | new_labels = enc.new_labels 25 | self.assertTrue(np.all(X == X_encoded)) 26 | self.assertEqual(noncat_columnlabels, new_labels) 27 | 28 | def test_simple_categorical(self): 29 | X = [[0, 1, 2], [1, 1, 2], [2, 1, 2]] 30 | n_values = [3] 31 | categorical_features = [0] 32 | cat_columnlabels = ['label'] 33 | noncat_columnlabels = ['a', 'b'] 34 | 35 | X_expected = [[1, 0, 0, 1, 2], [0, 1, 0, 1, 2], [0, 0, 1, 1, 2]] 36 | new_labels_expected = ['label____0', 'label____1', 'label____2', 'a', 'b'] 37 | enc = DummyEncoder(n_values, categorical_features, 38 | cat_columnlabels, noncat_columnlabels) 39 | X_encoded = enc.fit_transform(X) 40 | new_labels = enc.new_labels 41 | self.assertTrue(np.all(X_expected == X_encoded)) 42 | self.assertEqual(new_labels_expected, new_labels) 43 | 44 | def test_mixed_categorical(self): 45 | X = [[1, 0, 2], [1, 1, 2], [1, 2, 2]] 46 | n_values = [3] 47 | categorical_features = [1] 48 | cat_columnlabels = ['label'] 49 | noncat_columnlabels = ['a', 'b'] 50 | 51 | X_expected = [[1, 0, 0, 1, 2], [0, 1, 0, 1, 2], [0, 0, 1, 1, 2]] 52 | new_labels_expected = ['label____0', 'label____1', 'label____2', 'a', 'b'] 53 | enc = DummyEncoder(n_values, categorical_features, 54 | cat_columnlabels, noncat_columnlabels) 55 | X_encoded = enc.fit_transform(X) 56 | new_labels = enc.new_labels 57 | self.assertTrue(np.all(X_expected == X_encoded)) 58 | self.assertEqual(new_labels_expected, new_labels) 59 | 60 | def test_consolidate(self): 61 | labels = ['label1____0', 'label1____1', 'label2____0', 'label2____1', 'noncat'] 62 | consolidated = consolidate_columnlabels(labels) 63 | expected = ['label1', 'label2', 'noncat'] 64 | self.assertEqual(expected, consolidated) 65 | 66 | def test_inverse_transform(self): 67 | X = [[1, 0, 2], [1, 1, 2], [1, 2, 2]] 68 | n_values = [3] 69 | categorical_features = [1] 70 | cat_columnlabels = ['label'] 71 | noncat_columnlabels = ['a', 'b'] 72 | 73 | X_expected = [[1, 0, 0, 1, 2], [0, 1, 0, 1, 2], [0, 0, 1, 1, 2]] 74 | enc = DummyEncoder(n_values, categorical_features, 75 | cat_columnlabels, noncat_columnlabels) 76 | X_encoded = enc.fit_transform(X) 77 | self.assertTrue(np.all(X_encoded == X_expected)) 78 | X_decoded = enc.inverse_transform(X_encoded) 79 | self.assertTrue(np.all(X == X_decoded)) 80 | 81 | 82 | if __name__ == '__main__': 83 | unittest.main() 84 | -------------------------------------------------------------------------------- /server/analysis/util.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - util.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | ''' 7 | Created on Oct 24, 2017 8 | 9 | @author: dva 10 | ''' 11 | 12 | import logging 13 | from numbers import Number 14 | 15 | import contextlib 16 | import datetime 17 | import numpy as np 18 | 19 | 20 | def get_analysis_logger(name, level=logging.INFO): 21 | logger = logging.getLogger(name) 22 | log_handler = logging.StreamHandler() 23 | log_formatter = logging.Formatter( 24 | fmt='%(asctime)s [%(funcName)s:%(lineno)03d] %(levelname)-5s: %(message)s', 25 | datefmt='%m-%d-%Y %H:%M:%S' 26 | ) 27 | log_handler.setFormatter(log_formatter) 28 | logger.addHandler(log_handler) 29 | logger.setLevel(level) 30 | np.set_printoptions(formatter={'float': '{: 0.3f}'.format}) 31 | return logger 32 | 33 | 34 | LOG = get_analysis_logger(__name__) 35 | 36 | 37 | def stdev_zero(data, axis=None, nearzero=1e-8): 38 | mstd = np.expand_dims(data.std(axis=axis), axis=axis) 39 | return (np.abs(mstd) < nearzero).squeeze() 40 | 41 | 42 | def get_datetime(): 43 | return datetime.datetime.utcnow() 44 | 45 | 46 | class TimerStruct(object): 47 | 48 | def __init__(self): 49 | self.__start_time = 0.0 50 | self.__stop_time = 0.0 51 | self.__elapsed = None 52 | 53 | @property 54 | def elapsed_seconds(self): 55 | if self.__elapsed is None: 56 | return (get_datetime() - self.__start_time).total_seconds() 57 | return self.__elapsed.total_seconds() 58 | 59 | def start(self): 60 | self.__start_time = get_datetime() 61 | 62 | def stop(self): 63 | self.__stop_time = get_datetime() 64 | self.__elapsed = (self.__stop_time - self.__start_time) 65 | 66 | 67 | @contextlib.contextmanager 68 | def stopwatch(message=None): 69 | ts = TimerStruct() 70 | ts.start() 71 | try: 72 | yield ts 73 | finally: 74 | ts.stop() 75 | if message is not None: 76 | LOG.info('Total elapsed_seconds time for %s: %.3fs', message, ts.elapsed_seconds) 77 | 78 | 79 | def get_data_base(arr): 80 | """For a given Numpy array, finds the 81 | base array that "owns" the actual data.""" 82 | base = arr 83 | while isinstance(base.base, np.ndarray): 84 | base = base.base 85 | return base 86 | 87 | 88 | def arrays_share_data(x, y): 89 | return get_data_base(x) is get_data_base(y) 90 | 91 | 92 | def array_tostring(arr): 93 | arr_shape = arr.shape 94 | arr = arr.ravel() 95 | arr = np.array([str(a) for a in arr]) 96 | return arr.reshape(arr_shape) 97 | 98 | 99 | def is_numeric_matrix(matrix): 100 | assert matrix.size > 0 101 | return isinstance(matrix.ravel()[0], Number) 102 | 103 | 104 | def is_lexical_matrix(matrix): 105 | assert matrix.size > 0 106 | return isinstance(matrix.ravel()[0], str) 107 | -------------------------------------------------------------------------------- /server/website/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs and settings # 2 | ##################### 3 | log/ 4 | *.log 5 | local_settings.py 6 | 7 | # celery/celerybeat # 8 | ##################### 9 | celerybeat-schedule* 10 | *.pid 11 | 12 | # Management commands # 13 | ####################### 14 | debug_*.tar.gz 15 | session_knobs.json 16 | dump_website.json 17 | .info 18 | -------------------------------------------------------------------------------- /server/website/README.md: -------------------------------------------------------------------------------- 1 | Website 2 | ======= 3 | 4 | OLTP-Bench Website is an intermediate between the client's database and OtterTune (DBMS Auto-tuning system). 5 | 6 | ## Requirements 7 | 8 | ##### Ubuntu Packages 9 | 10 | ``` 11 | sudo apt-get install python-pip python-dev python-mysqldb rabbitmq-server 12 | ``` 13 | 14 | ##### Python Packages 15 | 16 | ``` 17 | sudo pip install -r requirements.txt 18 | ``` 19 | 20 | ## Installation Instructions 21 | 22 | 23 | ##### 1. Update the Django settings 24 | 25 | Navigate to the settings directory: 26 | 27 | ``` 28 | cd website/settings 29 | ``` 30 | 31 | Copy the credentials template: 32 | 33 | ``` 34 | cp credentials_TEMPLATE.py credentials.py 35 | ``` 36 | 37 | Edit `credentials.py` and update the secret key and database information. 38 | 39 | ##### 2. Serve the static files 40 | 41 | If you do not use the website for production, simply set `DEBUG = True` in `credentials.py`. Then Django will handle static files automatically. 42 | 43 | This is not an efficient way for production. You need to configure other servers like Apache to serve static files in the production environment. ([Details](https://docs.djangoproject.com/en/1.11/howto/static-files/deployment/)) 44 | 45 | ##### 3. Create the MySQL database if it does not already exist 46 | 47 | ``` 48 | mysqladmin create -u -p ottertune 49 | ``` 50 | 51 | ##### 4. Migrate the Django models into the database 52 | 53 | ``` 54 | python manage.py makemigrations website 55 | python manage.py migrate 56 | ``` 57 | 58 | ##### 5. Create the super user 59 | 60 | ``` 61 | python manage.py createsuperuser 62 | ``` 63 | 64 | ##### 6. Start the message broker, celery worker, website server, and periodic task 65 | 66 | ``` 67 | sudo rabbitmq-server -detached 68 | python manage.py celery worker --loglevel=info --pool=threads 69 | python manage.py runserver 0.0.0.0:8000 70 | python manage.py celerybeat --verbosity=2 --loglevel=info 71 | 72 | ``` 73 | -------------------------------------------------------------------------------- /server/website/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "website.settings") 7 | 8 | from django.core.management import execute_from_command_line 9 | 10 | execute_from_command_line(sys.argv) 11 | -------------------------------------------------------------------------------- /server/website/requirements.txt: -------------------------------------------------------------------------------- 1 | autopep8==1.4.4 2 | celery>=3.1.23,<4.0.0 3 | Django==1.11.27 4 | django-celery<=3.3.1 5 | django-debug-toolbar==1.5 6 | django-db-logger>=0.1.7 7 | django-request-logging==0.4.6 8 | mock==2.0.0 9 | Fabric3>=1.13.1.post1 10 | git-lint==0.1.2 11 | gpflow==1.5.0 12 | hurry.filesize>=0.9 13 | numpy==1.15.4 14 | requests==2.20.0 15 | pycodestyle==2.5.0 16 | astroid==2.3.2 17 | psycopg2-binary>=2.5.4 18 | pylint==2.4.3 19 | pyDOE>=0.3.8 20 | mysqlclient==1.3.12 21 | scikit-learn==0.19.1 22 | scipy==1.0.0 23 | grpcio==1.27.2 24 | tensorflow==1.12.2 25 | threadpool>=1.3.2 26 | torch>=1.2.0 27 | torchvision>=0.4.0 28 | -------------------------------------------------------------------------------- /server/website/script/controller_simulator/.gitignore: -------------------------------------------------------------------------------- 1 | generated_data 2 | -------------------------------------------------------------------------------- /server/website/script/controller_simulator/samples/summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "workload_name": "workload-0", 3 | "database_type": "postgres", 4 | "start_time": 1512076859887, 5 | "observation_time": 300, 6 | "end_time": 1512076864891, 7 | "database_version": "9.6" 8 | } 9 | -------------------------------------------------------------------------------- /server/website/script/controller_simulator/upload_data.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - upload_data.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | ''' 7 | Created on Nov 30, 2017 8 | 9 | @author: dvanaken 10 | ''' 11 | 12 | import argparse 13 | import glob 14 | import logging 15 | import os 16 | import requests 17 | 18 | # Logging 19 | LOG = logging.getLogger(__name__) 20 | LOG.addHandler(logging.StreamHandler()) 21 | LOG.setLevel(logging.INFO) 22 | 23 | 24 | def upload(basedir, upload_code, upload_url): 25 | for wkld_dir in sorted(glob.glob(os.path.join(basedir, '*'))): 26 | LOG.info('Uploading sample for workload %s...', wkld_dir) 27 | sample_idx = 0 28 | while True: 29 | samples = glob.glob(os.path.join(wkld_dir, 'sample-{}__*').format(sample_idx)) 30 | if len(samples) == 0: 31 | break 32 | assert len(samples) == 4 33 | basename = samples[0].split('__')[0] 34 | params = { 35 | 'summary': open(basename + '__summary.json', 'r'), 36 | 'knobs': open(basename + '__knobs.json', 'r'), 37 | 'metrics_before': open(basename + '__metrics_start.json', 'r'), 38 | 'metrics_after': open(basename + '__metrics_end.json', 'r'), 39 | } 40 | 41 | response = requests.post(upload_url + "/new_result/", 42 | files=params, 43 | data={'upload_code': upload_code}) 44 | LOG.info("Response: %s\n", response.content.decode()) 45 | sample_idx += 1 46 | 47 | 48 | def main(): 49 | parser = argparse.ArgumentParser(description="Upload generated data to the website") 50 | parser.add_argument('basedir', type=str, nargs=1, 51 | help='Directory containing the generated data') 52 | parser.add_argument('upload_code', type=str, nargs=1, 53 | help='The website\'s upload code') 54 | parser.add_argument('upload_url', type=str, default='http://0.0.0.0:8000', 55 | nargs='?', help='The website\'s URL') 56 | 57 | args = parser.parse_args() 58 | upload(args.basedir[0], args.upload_code[0], args.upload_url) 59 | 60 | 61 | if __name__ == "__main__": 62 | main() 63 | -------------------------------------------------------------------------------- /server/website/script/fix_permissions.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - fix_permissions.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from fabric.api import local 7 | 8 | PATH = "/var/www/ottertune" 9 | USER = "www-data" 10 | local("sudo chown -R {0}:{0} {1}".format(USER, PATH)) 11 | local("sudo chmod -R ugo+rX,ug+w {}".format(PATH)) 12 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/knob_identification/.gitignore: -------------------------------------------------------------------------------- 1 | *.txt 2 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/knob_identification/create_ranked_knobs.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - create_ranked_knobs.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | import logging 7 | import os 8 | import shutil 9 | import json 10 | import itertools 11 | 12 | LOG = logging.getLogger(__name__) 13 | 14 | DATADIR = '/dataset/oltpbench/first_paper_experiments/analysis/knob_identification' 15 | 16 | DBMSS = {'postgres-9.6': 1} 17 | HARDWARES = {'m3.xlarge': 16} 18 | TIMESTAMP = '2016-12-04 11:00' 19 | TASK_TYPE = 2 20 | 21 | PREFIX = 'global' 22 | MODEL = 'website.PipelineResult' 23 | VALIDATE = True 24 | EXTRA_EXCEPTIONS = { 25 | PREFIX + '.' + 'checkpoint_segments', 26 | } 27 | 28 | 29 | def validate_postgres(knobs, dbms): 30 | with open('../knob_settings/{}/{}_knobs.json'.format(dbms.replace('-', '_'), 31 | dbms.replace('.', '')), 'r') as f: 32 | knob_info = json.load(f) 33 | knob_info = {k['fields']['name']: k['fields'] for k in knob_info} 34 | for kname, kinfo in list(knob_info.items()): 35 | if kname not in knobs and kinfo['tunable'] is True: 36 | knobs.append(kname) 37 | LOG.warning("Adding missing knob to end of list (%s)", kname) 38 | knob_names = list(knob_info.keys()) 39 | for kname in knobs: 40 | if kname not in knob_names: 41 | if kname not in EXTRA_EXCEPTIONS: 42 | raise Exception('Extra knob: {}'.format(kname)) 43 | knobs.remove(kname) 44 | LOG.warning("Removing extra knob (%s)", kname) 45 | 46 | 47 | def main(): 48 | for dbms, hw in itertools.product(list(DBMSS.keys()), HARDWARES): 49 | datapath = os.path.join(DATADIR, '{}_{}'.format(dbms, hw)) 50 | if not os.path.exists(datapath): 51 | raise IOError('Path does not exist: {}'.format(datapath)) 52 | with open(os.path.join(datapath, 'featured_knobs.txt'), 'r') as f: 53 | knobs = [k.strip() for k in f.read().split('\n')] 54 | knobs = [PREFIX + '.' + k for k in knobs] 55 | if VALIDATE and dbms.startswith('postgres'): 56 | validate_postgres(knobs, dbms) 57 | 58 | basename = '{}_{}_ranked_knobs'.format(dbms, hw).replace('.', '') 59 | with open(basename + '.txt', 'w') as f: 60 | f.write('\n'.join(knobs)) 61 | 62 | django_entry = [{ 63 | 'model': MODEL, 64 | 'fields': { 65 | 'dbms': DBMSS[dbms], 66 | 'hardware': HARDWARES[hw], 67 | 'creation_timestamp': TIMESTAMP, 68 | 'task_type': TASK_TYPE, 69 | 'value': json.dumps(knobs, indent=4) 70 | } 71 | }] 72 | savepath = basename + '.json' 73 | with open(savepath, 'w') as f: 74 | json.dump(django_entry, f, indent=4) 75 | 76 | shutil.copy(savepath, '../../../preload/{}'.format(savepath)) 77 | 78 | 79 | if __name__ == '__main__': 80 | main() 81 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/knob_identification/postgres-96_m3xlarge_ranked_knobs.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "hardware": 16, 5 | "dbms": 1, 6 | "task_type": 2, 7 | "creation_timestamp": "2016-12-04 11:00", 8 | "value": "[\n \"global.shared_buffers\", \n \"global.effective_cache_size\", \n \"global.bgwriter_lru_maxpages\", \n \"global.bgwriter_delay\", \n \"global.checkpoint_completion_target\", \n \"global.deadlock_timeout\", \n \"global.default_statistics_target\", \n \"global.effective_io_concurrency\", \n \"global.checkpoint_timeout\", \n \"global.commit_delay\", \n \"global.commit_siblings\", \n \"global.wal_buffers\", \n \"global.temp_buffers\", \n \"global.from_collapse_limit\", \n \"global.join_collapse_limit\", \n \"global.bgwriter_lru_multiplier\", \n \"global.random_page_cost\", \n \"global.work_mem\", \n \"global.maintenance_work_mem\", \n \"global.min_wal_size\", \n \"global.max_parallel_workers_per_gather\", \n \"global.seq_page_cost\", \n \"global.max_worker_processes\", \n \"global.wal_sync_method\", \n \"global.checkpoint_flush_after\", \n \"global.wal_writer_delay\", \n \"global.backend_flush_after\", \n \"global.bgwriter_flush_after\", \n \"global.min_parallel_relation_size\", \n \"global.wal_writer_flush_after\", \n \"global.max_wal_size\"\n]" 9 | }, 10 | "model": "website.PipelineResult" 11 | } 12 | ] -------------------------------------------------------------------------------- /server/website/script/fixture_generators/knob_settings/oracle/.gitignore: -------------------------------------------------------------------------------- 1 | oracle-*_knobs.json 2 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/knob_settings/oracle/sql/get_knob_info.sql: -------------------------------------------------------------------------------- 1 | set colsep , 2 | set headsep off 3 | set pagesize 0 4 | set trimspool on 5 | set linesize 4100 6 | set numwidth 19 7 | set heading on 8 | set feed off 9 | set echo off 10 | set tab off 11 | set trim on 12 | set trims on 13 | 14 | -- This option is only available in versions 12.2 and up 15 | set markup csv on 16 | 17 | spool knob_info.csv 18 | 19 | select * from v$parameter order by name; 20 | 21 | spool off 22 | 23 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/knob_settings/oracle/sql/get_knob_info_12.1.sql: -------------------------------------------------------------------------------- 1 | SET echo off 2 | SET linesize 32767 3 | SET LONG 90000 4 | SET LONGCHUNKSIZE 90000; 5 | SET wrap off; 6 | SET heading off 7 | SET colsep '|' 8 | SET pagesize 0; 9 | SET feed off; 10 | SET termout off; 11 | SET trimspool ON; 12 | SELECT * FROM v$parameter order by name; 13 | spool t2.csv 14 | / 15 | spool off 16 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/knob_settings/postgres_9.6/.gitignore: -------------------------------------------------------------------------------- 1 | tunable_params.txt 2 | settings.json 3 | postgres-96_tunable_knob_names.json 4 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/metric_settings/oracle/.gitignore: -------------------------------------------------------------------------------- 1 | oracle-*_metrics.json 2 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/metric_settings/postgres_9.6/.gitignore: -------------------------------------------------------------------------------- 1 | postgres-96_numeric_metric_names.json 2 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/metric_settings/postgres_9.6/metrics_sample.json: -------------------------------------------------------------------------------- 1 | {"pg_stat_database_conflicts": ["datname", "confl_deadlock", "confl_bufferpin", "datid", "confl_tablespace", "confl_lock", "confl_snapshot"], "pg_stat_user_indexes": ["indexrelid", "relid", "indexrelname", "relname", "idx_tup_fetch", "idx_tup_read", "idx_scan", "schemaname"], "pg_stat_archiver": ["failed_count", "archived_count", "stats_reset", "last_archived_time", "last_failed_time", "last_failed_wal", "last_archived_wal"], "pg_stat_database": ["numbackends", "datname", "blks_read", "deadlocks", "tup_fetched", "tup_updated", "stats_reset", "tup_inserted", "datid", "xact_commit", "tup_deleted", "blk_read_time", "xact_rollback", "conflicts", "blks_hit", "tup_returned", "temp_files", "blk_write_time", "temp_bytes"], "pg_stat_user_tables": ["last_vacuum", "n_tup_ins", "n_dead_tup", "last_analyze", "idx_tup_fetch", "n_tup_upd", "schemaname", "seq_tup_read", "vacuum_count", "n_mod_since_analyze", "n_tup_del", "last_autovacuum", "seq_scan", "relid", "n_tup_hot_upd", "autoanalyze_count", "n_live_tup", "relname", "last_autoanalyze", "idx_scan", "autovacuum_count", "analyze_count"], "pg_stat_bgwriter": ["buffers_backend", "checkpoints_timed", "buffers_alloc", "buffers_clean", "buffers_backend_fsync", "checkpoint_sync_time", "checkpoints_req", "checkpoint_write_time", "maxwritten_clean", "buffers_checkpoint", "stats_reset"], "pg_statio_user_indexes": ["indexrelid", "relid", "indexrelname", "idx_blks_hit", "relname", "idx_blks_read", "schemaname"], "pg_statio_user_tables": ["relid", "heap_blks_hit", "tidx_blks_read", "tidx_blks_hit", "toast_blks_hit", "idx_blks_hit", "relname", "toast_blks_read", "idx_blks_read", "schemaname", "heap_blks_read"]} -------------------------------------------------------------------------------- /server/website/script/fixture_generators/metric_settings/postgres_9.6/pg96_database_stats.csv: -------------------------------------------------------------------------------- 1 | column_name,data_type,metric_type,description 2 | blk_read_time,double precision,counter,"Time spent reading data file blocks by backends in this database, in milliseconds" 3 | blks_hit,bigint,counter,"Number of times disk blocks were found already in the buffer cache, so that a read was not necessary (this only includes hits in the PostgreSQL buffer cache, not the operating system's file system cache)" 4 | blks_read,bigint,counter,Number of disk blocks read in this database 5 | blk_write_time,double precision,counter,"Time spent writing data file blocks by backends in this database, in milliseconds" 6 | conflicts,bigint,counter,"Number of queries canceled due to conflicts with recovery in this database. (Conflicts occur only on standby servers; see pg_stat_database_conflicts for details.)" 7 | datid,oid,info,OID of a database 8 | datname,name,info,Name of this database 9 | deadlocks,bigint,counter,Number of deadlocks detected in this database 10 | numbackends,integer,info,Number of backends currently connected to this database. This is the only column in this view that returns a value reflecting current state; all other columns return the accumulated values since the last reset. 11 | stats_reset,timestamp with time zone,info,Time at which these statistics were last reset 12 | temp_bytes,bigint,counter,"Total amount of data written to temporary files by queries in this database. All temporary files are counted, regardless of why the temporary file was created, and regardless of the log_temp_files setting." 13 | temp_files,bigint,counter,"Number of temporary files created by queries in this database. All temporary files are counted, regardless of why the temporary file was created (e.g., sorting or hashing), and regardless of the log_temp_files setting." 14 | tup_deleted,bigint,counter,Number of rows deleted by queries in this database 15 | tup_fetched,bigint,counter,Number of rows fetched by queries in this database 16 | tup_inserted,bigint,counter,Number of rows inserted by queries in this database 17 | tup_returned,bigint,counter,Number of rows returned by queries in this database 18 | tup_updated,bigint,counter,Number of rows updated by queries in this database 19 | xact_commit,bigint,counter,Number of transactions in this database that have been committed 20 | xact_rollback,bigint,counter,Number of transactions in this database that have been rolled back 21 | confl_tablespace,bigint,counter,Number of queries in this database that have been canceled due to dropped tablespaces 22 | confl_lock,bigint,counter,Number of queries in this database that have been canceled due to lock timeouts 23 | confl_snapshot,bigint,counter,Number of queries in this database that have been canceled due to old snapshots 24 | confl_bufferpin,bigint,counter,Number of queries in this database that have been canceled due to pinned buffers 25 | confl_deadlock,bigint,counter,Number of queries in this database that have been canceled due to deadlocks -------------------------------------------------------------------------------- /server/website/script/fixture_generators/metric_settings/postgres_9.6/pg96_global_stats.csv: -------------------------------------------------------------------------------- 1 | column_name,data_type,metric_type,description 2 | buffers_alloc,bigint,counter,Number of buffers allocated 3 | buffers_backend,bigint,counter,Number of buffers written directly by a backend 4 | buffers_backend_fsync,bigint,counter,Number of times a backend had to execute its own fsync call (normally the background writer handles those even when the backend does its own write) 5 | buffers_checkpoint,bigint,counter,Number of buffers written during checkpoints 6 | buffers_clean,bigint,counter,Number of buffers written by the background writer 7 | checkpoints_req,bigint,counter,Number of requested checkpoints that have been performed 8 | checkpoints_timed,bigint,counter,Number of scheduled checkpoints that have been performed 9 | checkpoint_sync_time,double precision,counter,"Total amount of time that has been spent in the portion of checkpoint processing where files are synchronized to disk, in milliseconds" 10 | checkpoint_write_time,double precision,counter,"Total amount of time that has been spent in the portion of checkpoint processing where files are written to disk, in milliseconds" 11 | maxwritten_clean,bigint,counter,Number of times the background writer stopped a cleaning scan because it had written too many buffers 12 | stats_reset,timestamp with time zone,info,Time at which these statistics were last reset 13 | archived_count,bigint,counter,Number of WAL files that have been successfully archived 14 | last_archived_wal,text,info,Name of the last WAL file successfully archived 15 | last_archived_time,timestamp with time zone,info,Time of the last successful archive operation 16 | failed_count,bigint,counter,Number of failed attempts for archiving WAL files 17 | last_failed_wal,text,info,Name of the WAL file of the last failed archival operation 18 | last_failed_time,timestamp with time zone,info,Time of the last failed archival operation 19 | stats_reset,timestamp with time zone,info,Time at which these statistics were last reset -------------------------------------------------------------------------------- /server/website/script/fixture_generators/metric_settings/postgres_9.6/pg96_index_stats.csv: -------------------------------------------------------------------------------- 1 | column_name,data_type,metric_type,description 2 | idx_blks_hit,bigint,counter,Number of buffer hits in this index 3 | idx_blks_read,bigint,counter,Number of disk blocks read from this index 4 | idx_scan,bigint,counter,Number of index scans initiated on this index 5 | idx_tup_fetch,bigint,counter,Number of live table rows fetched by simple index scans using this index 6 | idx_tup_read,bigint,counter,Number of index entries returned by scans on this index 7 | indexrelid,oid,info,OID of this index 8 | indexrelname,name,info,Name of this index 9 | relid,oid,info,OID of the table for this index 10 | relname,name,info,Name of the table for this index 11 | schemaname,name,info,Name of the schema this index is in 12 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/metric_settings/postgres_9.6/pg96_table_stats.csv: -------------------------------------------------------------------------------- 1 | column_name,data_type,metric_type,description 2 | analyze_count,bigint,counter,Number of times this table has been manually analyzed 3 | autoanalyze_count,bigint,counter,Number of times this table has been analyzed by the autovacuum daemon 4 | autovacuum_count,bigint,counter,Number of times this table has been vacuumed by the autovacuum daemon 5 | heap_blks_hit,bigint,counter,Number of buffer hits in this table 6 | heap_blks_read,bigint,counter,Number of disk blocks read from this table 7 | idx_blks_hit,bigint,counter,Number of buffer hits in all indexes on this table 8 | idx_blks_read,bigint,counter,Number of disk blocks read from all indexes on this table 9 | idx_scan,bigint,counter,Number of index scans initiated on this table 10 | idx_tup_fetch,bigint,counter,Number of live rows fetched by index scans 11 | last_analyze,timestamp with time zone,info,Last time at which this table was manually analyzed 12 | last_autoanalyze,timestamp with time zone,info,Last time at which this table was analyzed by the autovacuum daemon 13 | last_autovacuum,timestamp with time zone,info,Last time at which this table was vacuumed by the autovacuum daemon 14 | last_vacuum,timestamp with time zone,info,Last time at which this table was manually vacuumed (not counting VACUUM FULL) 15 | n_dead_tup,bigint,counter,Estimated number of dead rows 16 | n_live_tup,bigint,counter,Estimated number of live rows 17 | n_tup_del,bigint,counter,Number of rows deleted 18 | n_tup_hot_upd,bigint,counter,"Number of rows HOT updated (i.e., with no separate index update required)" 19 | n_tup_ins,bigint,counter,Number of rows inserted 20 | n_tup_upd,bigint,counter,Number of rows updated 21 | n_mod_since_analyze,bigint,counter,Estimated number of rows modified since this table was last analyzed 22 | relid,oid,info,OID of a table 23 | relname,name,info,Name of this table 24 | schemaname,name,info,Name of the schema that this table is in 25 | seq_scan,bigint,counter,Number of sequential scans initiated on this table 26 | seq_tup_read,bigint,counter,Number of live rows fetched by sequential scans 27 | tidx_blks_hit,bigint,counter,Number of buffer hits in this table's TOAST table index (if any) 28 | tidx_blks_read,bigint,counter,Number of disk blocks read from this table's TOAST table index (if any) 29 | toast_blks_hit,bigint,counter,Number of buffer hits in this table's TOAST table (if any) 30 | toast_blks_read,bigint,counter,Number of disk blocks read from this table's TOAST table (if any) 31 | vacuum_count,bigint,counter,Number of times this table has been manually vacuumed (not counting VACUUM FULL) 32 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/workload_characterization/.gitignore: -------------------------------------------------------------------------------- 1 | *.txt 2 | -------------------------------------------------------------------------------- /server/website/script/fixture_generators/workload_characterization/postgres-96_m3xlarge_pruned_metrics.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "hardware": 16, 5 | "dbms": 1, 6 | "task_type": 1, 7 | "creation_timestamp": "2016-12-04 11:00", 8 | "value": "[\n \"99th Percentile Latency (microseconds)\", \n \"Maximum Latency (microseconds)\", \n \"Throughput (requests/second)\", \n \"pg_stat_bgwriter.buffers_alloc\", \n \"pg_stat_bgwriter.buffers_checkpoint\", \n \"pg_stat_bgwriter.checkpoints_req\", \n \"pg_stat_bgwriter.maxwritten_clean\", \n \"pg_stat_database.blks_hit\", \n \"pg_stat_database.tup_deleted\", \n \"pg_stat_database.tup_inserted\", \n \"pg_stat_database.tup_returned\", \n \"pg_stat_database.tup_updated\", \n \"pg_stat_user_tables.autoanalyze_count\"\n]" 9 | }, 10 | "model": "website.PipelineResult" 11 | } 12 | ] -------------------------------------------------------------------------------- /server/website/script/installation/.gitignore: -------------------------------------------------------------------------------- 1 | .vagrant 2 | ottertune 3 | -------------------------------------------------------------------------------- /server/website/script/installation/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | VM_NAME = "OtterTuneVM" 5 | VM_MEMORY = "2048" # MB 6 | 7 | Vagrant.configure(2) do |config| 8 | # The online documentation for the configuration options is located at 9 | # https://docs.vagrantup.com 10 | 11 | # Our box 12 | config.vm.box = "ubuntu/xenial64" 13 | 14 | # Customize the amount of memory on the VM: 15 | config.vm.provider "virtualbox" do |vb| 16 | vb.name = VM_NAME 17 | vb.memory = VM_MEMORY 18 | end 19 | 20 | # SSH 21 | config.ssh.forward_agent = true 22 | config.ssh.forward_x11 = true 23 | config.ssh.keep_alive = true 24 | 25 | # Forwarded port mapping which allows access to a specific port 26 | # within the machine from a port on the host machine. Accessing 27 | # "localhost:8000" will access port 8000 on the guest machine. 28 | config.vm.network :forwarded_port, guest: 8000, host: 8000 29 | 30 | # Link current repo into VM 31 | config.vm.synced_folder "../../../..", "/ottertune" 32 | 33 | # Custom provisioning and setup script 34 | config.vm.provision :shell, path: "bootstrap.sh" 35 | 36 | end 37 | -------------------------------------------------------------------------------- /server/website/script/installation/bootstrap.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Variables 4 | DBHOST=localhost 5 | DBNAME=ottertune 6 | DBUSER=dbuser 7 | DBPASSWD=test123 8 | 9 | LOG=/vagrant/vm_build.log 10 | REPOPATH=/ottertune 11 | SETTINGSPATH=$REPOPATH/server/website/website/settings 12 | 13 | # Clear old log contents 14 | > $LOG 15 | 16 | # Install Ubuntu packages 17 | echo -e "\n--- Installing Ubuntu packages ---\n" 18 | apt-get -qq update 19 | apt-get -y install python3-pip python-dev python-mysqldb rabbitmq-server gradle default-jdk libmysqlclient-dev python3-tk >> $LOG 2>&1 20 | 21 | echo -e "\n--- Installing Python packages ---\n" 22 | pip3 install --upgrade pip >> $LOG 2>&1 23 | pip install -r ${REPOPATH}/server/website/requirements.txt >> $LOG 2>&1 24 | 25 | # Install MySQL 26 | echo -e "\n--- Install MySQL specific packages and settings ---\n" 27 | debconf-set-selections <<< "mysql-server mysql-server/root_password password $DBPASSWD" 28 | debconf-set-selections <<< "mysql-server mysql-server/root_password_again password $DBPASSWD" 29 | apt-get -y install mysql-server >> $LOG 2>&1 30 | 31 | # Setup MySQL 32 | echo -e "\n--- Setting up the MySQL user and database ---\n" 33 | mysql -uroot -p$DBPASSWD -e "CREATE DATABASE IF NOT EXISTS $DBNAME" >> /vagrant/vm_build.log 2>&1 34 | mysql -uroot -p$DBPASSWD -e "GRANT ALL PRIVILEGES ON $DBNAME.* TO '$DBUSER'@'localhost' IDENTIFIED BY '$DBPASSWD'" >> $LOG 2>&1 35 | mysql -uroot -p$DBPASSWD -e "GRANT ALL PRIVILEGES ON test_$DBNAME.* TO '$DBUSER'@'localhost' IDENTIFIED BY '$DBPASSWD'" >> $LOG 2>&1 36 | 37 | # Update Django settings 38 | echo -e "\n--- Updating Django settings ---\n" 39 | if [ ! -f "$SETTINGSPATH/credentials.py" ]; then 40 | cp $SETTINGSPATH/credentials_TEMPLATE.py $SETTINGSPATH/credentials.py >> $LOG 2>&1 41 | sed -i -e "s/^DEBUG.*/DEBUG = True/" \ 42 | -e "s/^ALLOWED_HOSTS.*/ALLOWED_HOSTS = ['0\.0\.0\.0']/" \ 43 | -e "s/'USER': 'ADD ME\!\!'/'USER': '$DBUSER'/" \ 44 | -e "s/'PASSWORD': 'ADD ME\!\!'/'PASSWORD': '$DBPASSWD'/" \ 45 | $SETTINGSPATH/credentials.py >> $LOG 2>&1 46 | fi 47 | rm /usr/bin/python 48 | ln -s /usr/bin/python3.5 /usr/bin/python -------------------------------------------------------------------------------- /server/website/script/management/beat.sh: -------------------------------------------------------------------------------- 1 | python3 manage.py celerybeat --verbosity=2 --loglevel=info > beat.log 2>&1 & 2 | -------------------------------------------------------------------------------- /server/website/script/management/celery.sh: -------------------------------------------------------------------------------- 1 | python3 manage.py celery worker --loglevel=info --pool=threads --concurrency=1 > celery.log 2>&1 & 2 | -------------------------------------------------------------------------------- /server/website/script/management/django.sh: -------------------------------------------------------------------------------- 1 | python3 manage.py runserver 0.0.0.0:8000 > django.log 2>&1 & 2 | -------------------------------------------------------------------------------- /server/website/script/management/loop.sh: -------------------------------------------------------------------------------- 1 | sudo -b -E env "PATH=$PATH" nohup fab run_loops:100 > loop.log 2>&1 < /dev/null 2 | -------------------------------------------------------------------------------- /server/website/script/upload/upload.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - upload.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | import argparse 7 | import logging 8 | import os 9 | import requests 10 | 11 | 12 | # Logging 13 | LOG = logging.getLogger(__name__) 14 | LOG.addHandler(logging.StreamHandler()) 15 | LOG.setLevel(logging.INFO) 16 | 17 | 18 | def upload(datadir, upload_code, url): 19 | params = { 20 | 'summary': open(os.path.join(datadir, 'summary.json'), 'rb'), 21 | 'knobs': open(os.path.join(datadir, 'knobs.json'), 'rb'), 22 | 'metrics_before': open(os.path.join(datadir, 'metrics_before.json'), 'rb'), 23 | 'metrics_after': open(os.path.join(datadir, 'metrics_after.json'), 'rb'), 24 | } 25 | 26 | response = requests.post(url, 27 | files=params, 28 | data={'upload_code': upload_code}) 29 | LOG.info(response.content) 30 | 31 | 32 | def main(): 33 | parser = argparse.ArgumentParser(description="Upload generated data to the website") 34 | parser.add_argument('datadir', type=str, nargs=1, 35 | help='Directory containing the generated data') 36 | parser.add_argument('upload_code', type=str, nargs=1, 37 | help='The website\'s upload code') 38 | parser.add_argument('url', type=str, default='http://0.0.0.0:8000/new_result/', 39 | nargs='?', help='The upload url: server_ip/new_result/') 40 | args = parser.parse_args() 41 | upload(args.datadir[0], args.upload_code[0], args.url) 42 | 43 | 44 | if __name__ == "__main__": 45 | main() 46 | -------------------------------------------------------------------------------- /server/website/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | -------------------------------------------------------------------------------- /server/website/tests/runner.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - runner.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | ''' 7 | Created on Jan 29, 2018 8 | 9 | @author: dvanaken 10 | ''' 11 | 12 | import logging 13 | 14 | from django.test.runner import DiscoverRunner 15 | 16 | 17 | class BaseRunner(DiscoverRunner): 18 | 19 | def run_tests(self, test_labels, extra_tests=None, **kwargs): 20 | # Disable logging while running tests 21 | logging.disable(logging.CRITICAL) 22 | return super(BaseRunner, self).run_tests(test_labels, extra_tests, **kwargs) 23 | -------------------------------------------------------------------------------- /server/website/tests/test_files/sample_summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "workload_name": "workload-0", 3 | "observation_time": 300, 4 | "database_type": "postgres", 5 | "end_time": 1513113439011, 6 | "start_time": 1513113139011, 7 | "database_version": "9.6" 8 | } 9 | -------------------------------------------------------------------------------- /server/website/tests/utils.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - utils.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | TEST_USERNAME = 'user' 7 | 8 | TEST_PASSWORD = 'abcd123' 9 | 10 | TEST_PROJECT_ID = 1 11 | 12 | TEST_BASIC_SESSION_ID = 1 13 | 14 | TEST_TUNING_SESSION_ID = 2 15 | 16 | TEST_WORKLOAD_ID = 1 17 | 18 | TEST_BASIC_SESSION_UPLOAD_CODE = '1234567890' 19 | 20 | TEST_TUNING_SESSION_UPLOAD_CODE = '0987654321' 21 | -------------------------------------------------------------------------------- /server/website/website/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from __future__ import absolute_import 7 | 8 | from .celery import app as celery_app # noqa 9 | -------------------------------------------------------------------------------- /server/website/website/celery.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import os 4 | 5 | from celery import Celery 6 | 7 | # set the default Django settings module for the 'celery' program. 8 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'website.settings') 9 | 10 | from django.conf import settings # noqa, pylint: disable=wrong-import-position 11 | 12 | app = Celery('website') # pylint: disable=invalid-name 13 | 14 | # Using a string here means the worker will not have to 15 | # pickle the object when using Windows. 16 | app.config_from_object('django.conf:settings') 17 | app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) 18 | 19 | 20 | @app.task(bind=True) 21 | def debug_task(self): 22 | print('Request: {0!r}'.format(self.request)) 23 | -------------------------------------------------------------------------------- /server/website/website/db/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | 7 | from .base.target_objective import target_objectives 8 | -------------------------------------------------------------------------------- /server/website/website/db/base/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | -------------------------------------------------------------------------------- /server/website/website/db/myrocks/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | -------------------------------------------------------------------------------- /server/website/website/db/myrocks/target_objective.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - target_objective.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | 7 | from ..base.target_objective import BaseThroughput 8 | from website.types import DBMSType 9 | 10 | target_objective_list = tuple((DBMSType.MYROCKS, target_obj) for target_obj in [ # pylint: disable=invalid-name 11 | BaseThroughput(transactions_counter='session_status.questions') 12 | ]) 13 | -------------------------------------------------------------------------------- /server/website/website/db/mysql/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | -------------------------------------------------------------------------------- /server/website/website/db/mysql/parser.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - parser.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | 7 | from website.types import KnobUnitType 8 | from website.utils import ConversionUtil 9 | from ..base.parser import BaseParser # pylint: disable=relative-beyond-top-level 10 | 11 | 12 | # pylint: disable=no-self-use 13 | class MysqlParser(BaseParser): 14 | 15 | def __init__(self, dbms_obj): 16 | super().__init__(dbms_obj) 17 | self.bytes_system = ( 18 | (1024 ** 4, 'T'), 19 | (1024 ** 3, 'G'), 20 | (1024 ** 2, 'M'), 21 | (1024 ** 1, 'k'), 22 | ) 23 | self.time_system = None 24 | self.min_bytes_unit = 'k' 25 | self.valid_true_val = ("on", "true", "yes", '1', 'enabled') 26 | self.valid_false_val = ("off", "false", "no", '0', 'disabled') 27 | 28 | def convert_integer(self, int_value, metadata): 29 | # Collected knobs/metrics do not show unit, convert to int directly 30 | if len(str(int_value)) == 0: 31 | # The value collected from the database is empty 32 | return 0 33 | try: 34 | try: 35 | converted = int(int_value) 36 | except ValueError: 37 | converted = int(float(int_value)) 38 | 39 | except ValueError: 40 | raise Exception('Invalid integer format for {}: {}'.format( 41 | metadata.name, int_value)) 42 | return converted 43 | 44 | def format_integer(self, int_value, metadata): 45 | int_value = int(round(int_value)) 46 | if int_value > 0 and metadata.unit == KnobUnitType.BYTES: 47 | int_value = ConversionUtil.get_human_readable2( 48 | int_value, self.bytes_system, self.min_bytes_unit) 49 | return int_value 50 | 51 | def parse_version_string(self, version_string): 52 | s = version_string.split('.')[0] + '.' + version_string.split('.')[1] 53 | return s 54 | -------------------------------------------------------------------------------- /server/website/website/db/mysql/target_objective.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - target_objective.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | 7 | from website.types import DBMSType 8 | from ..base.target_objective import (BaseThroughput, BaseUserDefinedTarget, 9 | LESS_IS_BETTER, MORE_IS_BETTER) # pylint: disable=relative-beyond-top-level 10 | 11 | target_objective_list = tuple((DBMSType.MYSQL, target_obj) for target_obj in [ # pylint: disable=invalid-name 12 | BaseThroughput(transactions_counter=('innodb_metrics.trx_rw_commits', 13 | 'innodb_metrics.trx_ro_commits', 14 | 'innodb_metrics.trx_nl_ro_commits')), 15 | BaseUserDefinedTarget(target_name='latency_99', improvement=LESS_IS_BETTER, 16 | unit='microseconds', short_unit='us'), 17 | BaseUserDefinedTarget(target_name='throughput', improvement=MORE_IS_BETTER, 18 | unit='transactions / seconds', short_unit='txn/s') 19 | ]) 20 | -------------------------------------------------------------------------------- /server/website/website/db/oracle/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | -------------------------------------------------------------------------------- /server/website/website/db/oracle/parser.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - parser.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | 7 | from ..base.parser import BaseParser 8 | 9 | 10 | class OracleParser(BaseParser): 11 | 12 | def __init__(self, dbms_obj): 13 | super().__init__(dbms_obj) 14 | self.true_value = 'TRUE' 15 | self.false_value = 'FALSE' 16 | self.bytes_system = ( 17 | (1024 ** 4, 'T'), 18 | (1024 ** 3, 'G'), 19 | (1024 ** 2, 'M'), 20 | (1024 ** 1, 'k'), 21 | ) 22 | self.min_bytes_unit = 'M' 23 | 24 | def format_enum(self, enum_value, metadata): 25 | enum = super().format_enum(enum_value, metadata) 26 | return "'{}'".format(enum) 27 | -------------------------------------------------------------------------------- /server/website/website/db/parser.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - parser.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | 7 | from website.models import DBMSCatalog 8 | from website.types import DBMSType 9 | 10 | from .myrocks.parser import MyRocksParser 11 | from .postgres.parser import PostgresParser 12 | from .oracle.parser import OracleParser 13 | from .mysql.parser import MysqlParser 14 | 15 | _DBMS_PARSERS = {} 16 | 17 | 18 | def _get(dbms_id): 19 | dbms_id = int(dbms_id) 20 | db_parser = _DBMS_PARSERS.get(dbms_id, None) 21 | if db_parser is None: 22 | obj = DBMSCatalog.objects.get(id=dbms_id) 23 | if obj.type == DBMSType.POSTGRES: 24 | clz = PostgresParser 25 | elif obj.type == DBMSType.MYROCKS: 26 | clz = MyRocksParser 27 | elif obj.type == DBMSType.ORACLE: 28 | clz = OracleParser 29 | elif obj.type == DBMSType.MYSQL: 30 | clz = MysqlParser 31 | else: 32 | raise NotImplementedError('Implement me! {}'.format(obj)) 33 | 34 | db_parser = clz(obj) 35 | _DBMS_PARSERS[dbms_id] = db_parser 36 | 37 | return db_parser 38 | 39 | 40 | def parse_version_string(dbms_type, version_string): 41 | dbmss = DBMSCatalog.objects.filter(type=dbms_type) 42 | parsed_version = None 43 | for instance in dbmss: 44 | db_parser = _get(instance.pk) 45 | try: 46 | parsed_version = db_parser.parse_version_string(version_string) 47 | except AttributeError: 48 | pass 49 | if parsed_version is not None: 50 | break 51 | return parsed_version 52 | 53 | 54 | def convert_dbms_knobs(dbms_id, knobs, knob_catalog=None): 55 | return _get(dbms_id).convert_dbms_knobs(knobs, knob_catalog) 56 | 57 | 58 | def convert_dbms_metrics(dbms_id, numeric_metrics, observation_time, target_objective): 59 | return _get(dbms_id).convert_dbms_metrics( 60 | numeric_metrics, observation_time, target_objective) 61 | 62 | 63 | def parse_dbms_knobs(dbms_id, knobs): 64 | return _get(dbms_id).parse_dbms_knobs(knobs) 65 | 66 | 67 | def parse_dbms_metrics(dbms_id, metrics): 68 | return _get(dbms_id).parse_dbms_metrics(metrics) 69 | 70 | 71 | def create_knob_configuration(dbms_id, tuning_knobs): 72 | return _get(dbms_id).create_knob_configuration(tuning_knobs) 73 | 74 | 75 | def format_dbms_knobs(dbms_id, knobs): 76 | return _get(dbms_id).format_dbms_knobs(knobs) 77 | 78 | 79 | def calculate_change_in_metrics(dbms_id, metrics_start, metrics_end): 80 | return _get(dbms_id).calculate_change_in_metrics( 81 | metrics_start, metrics_end) 82 | -------------------------------------------------------------------------------- /server/website/website/db/postgres/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | -------------------------------------------------------------------------------- /server/website/website/db/postgres/parser.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - parser.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | 7 | import re 8 | 9 | from ..base.parser import BaseParser 10 | from website.utils import ConversionUtil 11 | 12 | 13 | class PostgresParser(BaseParser): 14 | 15 | def __init__(self, dbms_obj): 16 | super().__init__(dbms_obj) 17 | self.valid_true_val = ("on", "true", "yes", 1) 18 | self.valid_false_val = ("off", "false", "no", 0) 19 | self.bytes_system = [(f, s) for f, s in ConversionUtil.DEFAULT_BYTES_SYSTEM 20 | if s in ('TB', 'GB', 'MB', 'kB')] 21 | 22 | def parse_version_string(self, version_string): 23 | dbms_version = version_string.split(',')[0] 24 | return re.search(r'\d+\.\d+(?=\.\d+)', dbms_version).group(0) 25 | -------------------------------------------------------------------------------- /server/website/website/db/postgres/target_objective.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - target_objective.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | 7 | from ..base.target_objective import BaseThroughput 8 | from website.types import DBMSType 9 | 10 | target_objective_list = tuple((DBMSType.POSTGRES, target_obj) for target_obj in [ # pylint: disable=invalid-name 11 | BaseThroughput(transactions_counter='pg_stat_database.xact_commit') 12 | ]) 13 | -------------------------------------------------------------------------------- /server/website/website/fixtures/dbms_catalog.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "model":"website.DBMSCatalog", 4 | "pk":2, 5 | "fields":{ 6 | "type":2, 7 | "version":"9.2" 8 | } 9 | }, 10 | { 11 | "model":"website.DBMSCatalog", 12 | "pk":3, 13 | "fields":{ 14 | "type":2, 15 | "version":"9.3" 16 | } 17 | }, 18 | { 19 | "model":"website.DBMSCatalog", 20 | "pk":4, 21 | "fields":{ 22 | "type":2, 23 | "version":"9.4" 24 | } 25 | }, 26 | { 27 | "model":"website.DBMSCatalog", 28 | "pk":1, 29 | "fields":{ 30 | "type":2, 31 | "version":"9.6" 32 | } 33 | }, 34 | { 35 | "model":"website.DBMSCatalog", 36 | "pk":9, 37 | "fields":{ 38 | "type":9, 39 | "version":"5.6" 40 | } 41 | }, 42 | { 43 | "model":"website.DBMSCatalog", 44 | "pk":156, 45 | "fields":{ 46 | "type":1, 47 | "version":"5.6" 48 | } 49 | }, 50 | { 51 | "model":"website.DBMSCatalog", 52 | "pk":157, 53 | "fields":{ 54 | "type":1, 55 | "version":"5.7" 56 | } 57 | }, 58 | { 59 | "model":"website.DBMSCatalog", 60 | "pk":180, 61 | "fields":{ 62 | "type":1, 63 | "version":"8.0" 64 | } 65 | }, 66 | { 67 | "model":"website.DBMSCatalog", 68 | "pk":121, 69 | "fields":{ 70 | "type":4, 71 | "version":"12.1.0.2.0" 72 | } 73 | }, 74 | { 75 | "model":"website.DBMSCatalog", 76 | "pk":12, 77 | "fields":{ 78 | "type":4, 79 | "version":"12.2.0.1.0" 80 | } 81 | }, 82 | { 83 | "model":"website.DBMSCatalog", 84 | "pk":19, 85 | "fields":{ 86 | "type":4, 87 | "version":"19.0.0.0.0" 88 | } 89 | } 90 | ] 91 | -------------------------------------------------------------------------------- /server/website/website/fixtures/postgres-96_m3xlarge_pruned_metrics.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "hardware": 16, 5 | "dbms": 1, 6 | "task_type": 1, 7 | "creation_timestamp": "2016-12-04 11:00", 8 | "value": "[\n \"throughput_txn_per_sec\", \n \"pg_stat_bgwriter.buffers_alloc\", \n \"pg_stat_bgwriter.buffers_checkpoint\", \n \"pg_stat_bgwriter.checkpoints_req\", \n \"pg_stat_bgwriter.maxwritten_clean\", \n \"pg_stat_database.blks_hit\", \n \"pg_stat_database.tup_deleted\", \n \"pg_stat_database.tup_inserted\", \n \"pg_stat_database.tup_returned\", \n \"pg_stat_database.tup_updated\", \n \"pg_stat_user_tables.autoanalyze_count\"\n]" 9 | }, 10 | "model": "website.PipelineResult" 11 | } 12 | ] -------------------------------------------------------------------------------- /server/website/website/fixtures/postgres-96_m3xlarge_ranked_knobs.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "hardware": 16, 5 | "dbms": 1, 6 | "task_type": 2, 7 | "creation_timestamp": "2016-12-04 11:00", 8 | "value": "[\n \"global.shared_buffers\", \n \"global.effective_cache_size\", \n \"global.bgwriter_lru_maxpages\", \n \"global.bgwriter_delay\", \n \"global.checkpoint_completion_target\", \n \"global.deadlock_timeout\", \n \"global.default_statistics_target\", \n \"global.effective_io_concurrency\", \n \"global.checkpoint_timeout\", \n \"global.commit_delay\", \n \"global.commit_siblings\", \n \"global.wal_buffers\", \n \"global.temp_buffers\", \n \"global.from_collapse_limit\", \n \"global.join_collapse_limit\", \n \"global.bgwriter_lru_multiplier\", \n \"global.random_page_cost\", \n \"global.work_mem\", \n \"global.maintenance_work_mem\", \n \"global.min_wal_size\", \n \"global.max_parallel_workers_per_gather\", \n \"global.seq_page_cost\", \n \"global.max_worker_processes\", \n \"global.wal_sync_method\", \n \"global.checkpoint_flush_after\", \n \"global.wal_writer_delay\", \n \"global.backend_flush_after\", \n \"global.bgwriter_flush_after\", \n \"global.min_parallel_relation_size\", \n \"global.wal_writer_flush_after\", \n \"global.max_wal_size\"\n]" 9 | }, 10 | "model": "website.PipelineResult" 11 | } 12 | ] -------------------------------------------------------------------------------- /server/website/website/fixtures/test_user.json: -------------------------------------------------------------------------------- 1 | 2 | [ 3 | { 4 | "model": "auth.user", 5 | "pk": 1, 6 | "fields": { 7 | "password": "pbkdf2_sha256$30000$wefcDHxU5ctV$8NUjP4GZouOdr4LU47/WGZgqGU4V4brcS8Xt1Yx7ut0=", 8 | "last_login": null, 9 | "is_superuser": false, 10 | "username": "user", 11 | "first_name": "test", 12 | "last_name": "user", 13 | "email": "user@email.com", 14 | "is_staff": false, 15 | "is_active": true, 16 | "date_joined": "2017-12-04T20:46:28.365Z", 17 | "groups": [], 18 | "user_permissions": [] 19 | } 20 | } 21 | ] -------------------------------------------------------------------------------- /server/website/website/fixtures/test_user_sessions.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "user": 1, 5 | "name": "test_project", 6 | "description": "", 7 | "creation_time": "2017-11-30T02:00:49.611Z", 8 | "last_update": "2017-11-30T02:00:49.611Z" 9 | }, 10 | "model": "website.Project", 11 | "pk": 1 12 | }, 13 | { 14 | "fields": { 15 | "cpu": 4, 16 | "memory": 16, 17 | "storage": 32, 18 | "storage_type": 5, 19 | "additional_specs": null 20 | }, 21 | "model": "website.Hardware", 22 | "pk": 1 23 | }, 24 | { 25 | "fields": { 26 | "user": 1, 27 | "name": "basic_session", 28 | "description": "(no tuning)", 29 | "dbms": 1, 30 | "hardware": 1, 31 | "project": 1, 32 | "upload_code": "1234567890", 33 | "tuning_session": "no_tuning_session", 34 | "target_objective": "throughput_txn_per_sec", 35 | "creation_time": "2017-11-30T02:00:49.611Z", 36 | "last_update": "2017-11-30T02:00:49.611Z" 37 | }, 38 | "model": "website.Session", 39 | "pk": 1 40 | }, 41 | { 42 | "fields": { 43 | "user": 1, 44 | "name": "tuning_session", 45 | "description": "", 46 | "dbms": 1, 47 | "hardware": 1, 48 | "project": 1, 49 | "upload_code": "0987654321", 50 | "tuning_session": "tuning_session", 51 | "target_objective": "throughput_txn_per_sec", 52 | "creation_time": "2017-11-30T02:00:49.611Z", 53 | "last_update": "2017-11-30T02:00:49.611Z" 54 | }, 55 | "model": "website.Session", 56 | "pk": 2 57 | } 58 | ] 59 | -------------------------------------------------------------------------------- /server/website/website/management/commands/cleardblog.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - cleardblog.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from django.core.management.base import BaseCommand 7 | from django_db_logger.models import StatusLog 8 | 9 | 10 | class Command(BaseCommand): 11 | help = 'Clear all log entries from the django_db_logger table.' 12 | 13 | def handle(self, *args, **options): 14 | StatusLog.objects.all().delete() 15 | self.stdout.write(self.style.SUCCESS( 16 | "Successfully cleared the django_db_logger table.")) 17 | -------------------------------------------------------------------------------- /server/website/website/management/commands/createuser.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - createuser.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from django.contrib.auth.models import User 7 | from django.core.management.base import BaseCommand 8 | 9 | from website.utils import create_user # pylint: disable=no-name-in-module,import-error 10 | 11 | 12 | class Command(BaseCommand): 13 | help = 'Create a new user.' 14 | 15 | def add_arguments(self, parser): 16 | parser.add_argument( 17 | 'username', 18 | metavar='USERNAME', 19 | help='Specifies the login for the user.') 20 | parser.add_argument( 21 | 'password', 22 | metavar='PASSWORD', 23 | help='Specifies the password for the user.') 24 | parser.add_argument( 25 | '--email', 26 | metavar='EMAIL', 27 | default=None, 28 | help='Specifies the email for the user.') 29 | parser.add_argument( 30 | '--superuser', 31 | action='store_true', 32 | help='Creates a superuser.') 33 | 34 | def handle(self, *args, **options): 35 | username = options['username'] 36 | password = options['password'] 37 | email = options['email'] 38 | superuser = options['superuser'] 39 | 40 | _, created = create_user(username, password, email, superuser) 41 | 42 | if created: 43 | self.stdout.write(self.style.SUCCESS("Successfully created {} '{}'{}.".format( 44 | 'superuser' if superuser else 'user', username, 45 | " ('{}')".format(email) if email else ''))) 46 | else: 47 | self.stdout.write(self.style.NOTICE( 48 | "ERROR: User '{}' already exists.".format(username))) 49 | -------------------------------------------------------------------------------- /server/website/website/management/commands/deleteuser.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - deleteuser.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from django.contrib.auth.models import User 7 | from django.core.management.base import BaseCommand 8 | 9 | from website.utils import delete_user # pylint: disable=no-name-in-module,import-error 10 | 11 | 12 | class Command(BaseCommand): 13 | help = 'Delete an existing user.' 14 | 15 | def add_arguments(self, parser): 16 | parser.add_argument( 17 | 'username', 18 | metavar='USERNAME', 19 | # required=True, 20 | help='Specifies the login of the user to delete.') 21 | 22 | def handle(self, *args, **options): 23 | username = options['username'] 24 | _, deleted = delete_user(username) 25 | if deleted: 26 | self.stdout.write(self.style.SUCCESS( 27 | "Successfully deleted user '{}'.".format(username))) 28 | else: 29 | self.stdout.write(self.style.NOTICE( 30 | "ERROR: User '{}' does not exist.".format(username))) 31 | -------------------------------------------------------------------------------- /server/website/website/management/commands/dumpdebuginfo.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - dumpdebuginfo.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | import os 7 | 8 | from django.core.management.base import BaseCommand, CommandError 9 | 10 | from website.models import Session 11 | from website.utils import dump_debug_info 12 | 13 | 14 | class Command(BaseCommand): 15 | help = 'Dump debug information for the session with the given upload code.' 16 | 17 | def add_arguments(self, parser): 18 | parser.add_argument( 19 | 'uploadcode', 20 | metavar='UPLOADCODE', 21 | help="The session's upload code to.") 22 | parser.add_argument( 23 | '-f', '--filename', 24 | metavar='FILE', 25 | help='Name of the file to write the debug information. ' 26 | 'Default: debug_[timestamp].tar.gz') 27 | parser.add_argument( 28 | '-d', '--directory', 29 | metavar='DIR', 30 | help='Path of the directory to write the debug information to. ' 31 | 'Default: current directory') 32 | parser.add_argument( 33 | '--prettyprint', 34 | action='store_true', 35 | help='Pretty print the output.') 36 | 37 | def handle(self, *args, **options): 38 | directory = options['directory'] or '' 39 | if directory and not os.path.exists(directory): 40 | os.makedirs(directory) 41 | try: 42 | session = Session.objects.get(upload_code=options['uploadcode']) 43 | except Session.DoesNotExist: 44 | raise CommandError( 45 | "ERROR: Session with upload code '{}' not exist.".format(options['uploadcode'])) 46 | 47 | debug_info, root = dump_debug_info(session, pretty_print=options['prettyprint']) 48 | 49 | filename = options['filename'] or root 50 | if not filename.endswith('.tar.gz'): 51 | filename += '.tar.gz' 52 | path = os.path.join(directory, filename) 53 | 54 | with open(path, 'wb') as f: 55 | f.write(debug_info.getvalue()) 56 | 57 | self.stdout.write(self.style.SUCCESS( 58 | "Successfully dumped debug information to '{}'.".format(path))) 59 | -------------------------------------------------------------------------------- /server/website/website/management/commands/dumpknob.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - dumpknob.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | import json 7 | import os 8 | from collections import OrderedDict 9 | 10 | from django.core.management.base import BaseCommand, CommandError 11 | 12 | from website.models import Session, SessionKnob, SessionKnobManager 13 | 14 | 15 | class Command(BaseCommand): 16 | help = 'Dump knobs for the session with the given upload code.' 17 | 18 | def add_arguments(self, parser): 19 | parser.add_argument( 20 | 'uploadcode', 21 | metavar='UPLOADCODE', 22 | help="The session's upload code.") 23 | parser.add_argument( 24 | '-f', '--filename', 25 | metavar='FILE', 26 | default='session_knobs.json', 27 | help='Name of the file to write the session knob tunability to. ' 28 | 'Default: session_knobs.json') 29 | parser.add_argument( 30 | '-d', '--directory', 31 | metavar='DIR', 32 | help='Path of the directory to write the session knob tunability to. ' 33 | 'Default: current directory') 34 | parser.add_argument( 35 | '--tunable-only', 36 | action='store_true', 37 | help='Dump tunable knobs only. Default: False') 38 | 39 | def handle(self, *args, **options): 40 | directory = options['directory'] or '' 41 | if directory and not os.path.exists(directory): 42 | os.makedirs(directory) 43 | try: 44 | session = Session.objects.get(upload_code=options['uploadcode']) 45 | except Session.DoesNotExist: 46 | raise CommandError( 47 | "ERROR: Session with upload code '{}' not exist.".format(options['uploadcode'])) 48 | 49 | session_knobs = SessionKnobManager.get_knob_min_max_tunability( 50 | session, tunable_only=options['tunable_only']) 51 | 52 | path = os.path.join(directory, options['filename']) 53 | 54 | with open(path, 'w') as f: 55 | json.dump(OrderedDict(sorted(session_knobs.items())), f, indent=4) 56 | 57 | self.stdout.write(self.style.SUCCESS( 58 | "Successfully dumped knob information to '{}'.".format(path))) 59 | -------------------------------------------------------------------------------- /server/website/website/management/commands/dumpwebsite.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - dumpwebsite.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from django.core.management import call_command 7 | from django.core.management.base import BaseCommand 8 | from fabric.api import hide, local 9 | 10 | 11 | class Command(BaseCommand): 12 | help = 'dump the website.' 13 | 14 | def add_arguments(self, parser): 15 | parser.add_argument( 16 | '-d', '--dumpfile', 17 | metavar='FILE', 18 | default='dump_website.json', 19 | help="Name of the file to dump data to. " 20 | "Default: 'dump_website.json[.gz]'") 21 | parser.add_argument( 22 | '--compress', 23 | action='store_true', 24 | help='Compress dump data (gzip). Default: False') 25 | 26 | def handle(self, *args, **options): 27 | dumpfile = options['dumpfile'] 28 | compress = options['compress'] 29 | if compress: 30 | if dumpfile.endswith('.gz'): 31 | dstfile = dumpfile 32 | dumpfile = dumpfile[:-len('.gz')] 33 | else: 34 | dstfile = dumpfile + '.gz' 35 | else: 36 | dstfile = dumpfile 37 | 38 | self.stdout.write("Dumping database to file '{}'...".format(dstfile)) 39 | call_command('dumpdata', 'admin', 'auth', 'django_db_logger', 'djcelery', 'sessions', 40 | 'sites', 'website', output=dumpfile) 41 | 42 | if compress: 43 | with hide("commands"): # pylint: disable=not-context-manager 44 | local("gzip {}".format(dumpfile)) 45 | 46 | self.stdout.write(self.style.SUCCESS( 47 | "Successfully dumped website to '{}'.".format(dstfile))) 48 | -------------------------------------------------------------------------------- /server/website/website/management/commands/getuploadcode.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - getuploadcode.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from django.core.management.base import BaseCommand 7 | 8 | from website.models import Session 9 | 10 | 11 | class Command(BaseCommand): 12 | help = 'Get the upload code for a session.' 13 | 14 | def add_arguments(self, parser): 15 | parser.add_argument( 16 | 'username', 17 | metavar='USERNAME', 18 | help='Specifies the username of the session owner.') 19 | parser.add_argument( 20 | 'projectname', 21 | metavar='PROJECTNAME', 22 | help='Specifies the name of the project that the session belongs to.') 23 | parser.add_argument( 24 | 'sessionname', 25 | metavar='SESSIONNAME', 26 | help='Specifies the name of the session.') 27 | 28 | def handle(self, *args, **options): 29 | username = options['username'] 30 | projectname = options['projectname'] 31 | sessionname = options['sessionname'] 32 | session = Session.objects.filter(user__username=username, 33 | project__name=projectname, 34 | name=sessionname).first() 35 | if session: 36 | self.stdout.write(self.style.NOTICE(session.upload_code)) 37 | else: 38 | self.stdout.write(self.style.NOTICE( 39 | "ERROR: Session '{}' for user '{}' under project '{}' does not exist.".format( 40 | sessionname, username, projectname))) 41 | -------------------------------------------------------------------------------- /server/website/website/management/commands/listusers.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - listusers.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from django.contrib.auth.models import User 7 | from django.core.management.base import BaseCommand 8 | 9 | 10 | class Command(BaseCommand): 11 | help = 'List all users.' 12 | default_fields = ('username', 'email', 'is_superuser') 13 | item_fmt = '{{{i}: <{w}}}'.format 14 | col_space = 3 15 | 16 | def add_arguments(self, parser): 17 | parser.add_argument( 18 | 'fields', 19 | nargs='*', 20 | default='DEFAULT', 21 | choices=[f.name for f in User._meta.get_fields()] + ['DEFAULT'], 22 | metavar='FIELDS', 23 | help='Fields from the User model to display. (default: {})'.format( 24 | list(self.default_fields))) 25 | 26 | def handle(self, *args, **options): 27 | fields = options['fields'] 28 | if fields == 'DEFAULT': 29 | fields = self.default_fields 30 | 31 | users = User.objects.values_list(*fields) 32 | self.stdout.write(self.style.NOTICE( 33 | '\nFound {} existing users.\n'.format(len(users)))) 34 | if users: 35 | fmt = '' 36 | for i, field in enumerate(fields): 37 | w = max(len(field), max(len(str(u[i])) for u in users)) + self.col_space 38 | fmt += self.item_fmt(i=i, w=w) 39 | fmt = (fmt + '\n').format 40 | h = fmt(*fields) 41 | out = h + ('-' * (len(h) + 1)) + '\n' 42 | for user_info in users: 43 | out += fmt(*(str(ui) for ui in user_info)) 44 | out += '\n' 45 | 46 | self.stdout.write(out) 47 | -------------------------------------------------------------------------------- /server/website/website/management/commands/loadknob.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - loadknob.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | import json 7 | import os 8 | from argparse import RawTextHelpFormatter 9 | 10 | from django.core.management.base import BaseCommand, CommandError 11 | 12 | from website.models import Session, SessionKnob, SessionKnobManager 13 | 14 | HELP = """ 15 | Load knobs for the session with the given upload code. 16 | 17 | example of JSON file format: 18 | { 19 | "global.knob1": { 20 | "minval": 0, 21 | "maxval": 100, 22 | "tunable": true 23 | }, 24 | "global.knob2": { 25 | "minval": 1000000, 26 | "maxval": 2000000, 27 | "tunable": false 28 | } 29 | } 30 | """ 31 | 32 | 33 | class Command(BaseCommand): 34 | help = HELP 35 | 36 | def create_parser(self, prog_name, subcommand): 37 | parser = super(Command, self).create_parser(prog_name, subcommand) 38 | parser.formatter_class = RawTextHelpFormatter 39 | return parser 40 | 41 | def add_arguments(self, parser): 42 | parser.add_argument( 43 | 'uploadcode', 44 | metavar='UPLOADCODE', 45 | help="The session's upload code.") 46 | parser.add_argument( 47 | '-f', '--filename', 48 | metavar='FILE', 49 | default='session_knobs.json', 50 | help='Name of the file to read the session knob tunability from. ' 51 | 'Default: session_knobs.json') 52 | parser.add_argument( 53 | '-d', '--directory', 54 | metavar='DIR', 55 | help='Path of the directory to read the session knob tunability from. ' 56 | 'Default: current directory') 57 | parser.add_argument( 58 | '--disable-others', 59 | action='store_true', 60 | help='Disable the knob tunability of all session knobs NOT included ' 61 | 'in the JSON file. Default: False') 62 | 63 | def handle(self, *args, **options): 64 | directory = options['directory'] or '' 65 | path = os.path.join(directory, options['filename']) 66 | 67 | try: 68 | with open(path, 'r') as f: 69 | knobs = json.load(f) 70 | except FileNotFoundError: 71 | raise CommandError("ERROR: File '{}' does not exist.".format(path)) 72 | except json.decoder.JSONDecodeError: 73 | raise CommandError("ERROR: Unable to decode JSON file '{}'.".format(path)) 74 | 75 | try: 76 | session = Session.objects.get(upload_code=options['uploadcode']) 77 | except Session.DoesNotExist: 78 | raise CommandError( 79 | "ERROR: Session with upload code '{}' not exist.".format(options['uploadcode'])) 80 | 81 | SessionKnobManager.set_knob_min_max_tunability( 82 | session, knobs, disable_others=options['disable_others']) 83 | 84 | self.stdout.write(self.style.SUCCESS( 85 | "Successfully load knob information from '{}'.".format(path))) 86 | -------------------------------------------------------------------------------- /server/website/website/management/commands/resetwebsite.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - resetwebsite.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from django import db 7 | from django.core.management import call_command 8 | from django.core.management.base import BaseCommand 9 | from fabric.api import local 10 | from website.settings import DATABASES 11 | 12 | 13 | class Command(BaseCommand): 14 | help = 'dump the website; reset the website; load data from file if specified.' 15 | 16 | def __init__(self, *args, **kwargs): 17 | super().__init__(*args, **kwargs) 18 | engine = DATABASES['default']['ENGINE'] 19 | user = DATABASES['default']['USER'] 20 | passwd = DATABASES['default']['PASSWORD'] 21 | host = DATABASES['default']['HOST'] 22 | port = DATABASES['default']['PORT'] 23 | 24 | if engine.endswith('mysql'): 25 | db_cmd_fmt = 'mysql -u {user} -p -h {host} -P {port} -N -B -e "{{cmd}}"' 26 | elif engine.endswith('postgresql'): 27 | db_cmd_fmt = 'PGPASSWORD={passwd} psql -U {user} -h {host} -p {port} -c "{{cmd}}"' 28 | else: 29 | raise NotImplementedError("Database engine '{}' is not implemented.".format(engine)) 30 | 31 | self._db_cmd_fmt = db_cmd_fmt.format(user=user, passwd=passwd, host=host, port=port).format 32 | 33 | def call_db_command(self, cmd): 34 | local(self._db_cmd_fmt(cmd=cmd)) 35 | 36 | def add_arguments(self, parser): 37 | parser.add_argument( 38 | '-d', '--dumpfile', 39 | metavar='FILE', 40 | default='dump_website.json', 41 | help="Name of the file to dump data to. " 42 | "Default: 'dump_website.json'") 43 | parser.add_argument( 44 | '-l', '--loadfile', 45 | metavar='FILE', 46 | help="Name of the file to load data from. " 47 | "Default: '' (no data loaded)") 48 | 49 | def reset_website(self): 50 | # WARNING: destroys the existing website and creates with all 51 | # of the required inital data loaded (e.g., the KnobCatalog) 52 | 53 | # Recreate the ottertune database 54 | db.connections.close_all() 55 | dbname = DATABASES['default']['NAME'] 56 | self.call_db_command("DROP DATABASE IF EXISTS {}".format(dbname)) 57 | self.call_db_command("CREATE DATABASE {}".format(dbname)) 58 | 59 | # Reinitialize the website 60 | call_command('makemigrations', 'website') 61 | call_command('migrate') 62 | call_command('startcelery') 63 | 64 | def handle(self, *args, **options): 65 | call_command('dumpwebsite', dumpfile=options['dumpfile']) 66 | call_command('stopcelery') 67 | 68 | self.reset_website() 69 | 70 | loadfile = options['loadfile'] 71 | if loadfile: 72 | self.stdout.write("Loading database from file '{}'...".format(loadfile)) 73 | call_command('loaddata', loadfile) 74 | 75 | self.stdout.write(self.style.SUCCESS("Successfully reset website.")) 76 | -------------------------------------------------------------------------------- /server/website/website/management/commands/setuploadcode.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - setuploadcode.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from django.core.management.base import BaseCommand 7 | 8 | from website.models import Session 9 | from website.utils import MediaUtil 10 | 11 | 12 | class Command(BaseCommand): 13 | help = 'Set the upload code for a session.' 14 | 15 | def add_arguments(self, parser): 16 | parser.add_argument( 17 | 'username', 18 | metavar='USERNAME', 19 | help='Specifies the username of the session owner.') 20 | parser.add_argument( 21 | 'projectname', 22 | metavar='PROJECTNAME', 23 | help='Specifies the name of the project that the session belongs to.') 24 | parser.add_argument( 25 | 'sessionname', 26 | metavar='SESSIONNAME', 27 | help='Specifies the name of the session.') 28 | parser.add_argument( 29 | '--uploadcode', 30 | metavar='UPLOADCODE', 31 | default=None, 32 | help='Specifies the value to set the upload code to.') 33 | 34 | def handle(self, *args, **options): 35 | username = options['username'] 36 | projectname = options['projectname'] 37 | sessionname = options['sessionname'] 38 | session = Session.objects.filter(user__username=username, 39 | project__name=projectname, 40 | name=sessionname).first() 41 | if session: 42 | upload_code = options['uploadcode'] or MediaUtil.upload_code_generator() 43 | session.upload_code = upload_code 44 | session.save() 45 | self.stdout.write(self.style.NOTICE(upload_code)) 46 | else: 47 | self.stdout.write(self.style.NOTICE( 48 | "ERROR: Session '{}' for user '{}' under project '{}' does not exist.".format( 49 | sessionname, username, projectname))) 50 | -------------------------------------------------------------------------------- /server/website/website/management/commands/stopcelery.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - stopcelery.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | import os 7 | import time 8 | 9 | from django.core.management.base import BaseCommand 10 | from fabric.api import local, quiet, settings 11 | 12 | 13 | class Command(BaseCommand): 14 | help = 'Stop celery and celerybeat and remove pid files.' 15 | celery_cmd = 'python3 manage.py {cmd} {opts} &'.format 16 | max_wait_sec = 15 17 | 18 | def add_arguments(self, parser): 19 | parser.add_argument( 20 | '--celery-pidfile', 21 | metavar='PIDFILE', 22 | default='celery.pid', 23 | help="Alternate path to the process' pid file if not located at ./celery.pid.") 24 | parser.add_argument( 25 | '--celerybeat-pidfile', 26 | metavar='PIDFILE', 27 | default='celerybeat.pid', 28 | help="Alternate path to the process' pid file if not located at ./celerybeat.pid.") 29 | 30 | def handle(self, *args, **options): 31 | check_pidfiles = [] 32 | for name in ('celery', 'celerybeat'): 33 | try: 34 | pidfile = options[name + '_pidfile'] 35 | with open(pidfile, 'r') as f: 36 | pid = f.read() 37 | with settings(warn_only=True): 38 | local('kill {}'.format(pid)) 39 | check_pidfiles.append((name, pidfile)) 40 | except Exception as e: # pylint: disable=broad-except 41 | self.stdout.write(self.style.NOTICE( 42 | "WARNING: an exception occurred while stopping '{}': {}\n".format(name, e))) 43 | 44 | if check_pidfiles: 45 | self.stdout.write("Waiting for processes to shutdown...\n") 46 | for name, pidfile in check_pidfiles: 47 | wait_sec = 0 48 | while os.path.exists(pidfile) and wait_sec < self.max_wait_sec: 49 | time.sleep(1) 50 | wait_sec += 1 51 | if os.path.exists(pidfile): 52 | self.stdout.write(self.style.NOTICE(( 53 | "WARNING: file '{}' still exists after stopping {}. " 54 | "Removing it manually.").format( 55 | pidfile, name))) 56 | with quiet(): 57 | local('rm -f {}'.format(pidfile)) 58 | else: 59 | self.stdout.write(self.style.SUCCESS( 60 | "Successfully stopped '{}'.".format(name))) 61 | 62 | with quiet(): 63 | local("ps auxww | grep '[c]elery worker' | awk '{print $2}' | xargs kill -9") 64 | local("ps auxww | grep '[c]elerybeat' | awk '{print $2}' | xargs kill -9") 65 | -------------------------------------------------------------------------------- /server/website/website/migrations/0002_enable_compression.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import logging 4 | 5 | from django.db import migrations, ProgrammingError 6 | 7 | from website.settings import MYSQL_COMPRESSION 8 | 9 | LOG = logging.getLogger(__name__) 10 | 11 | TABLES_TO_COMPRESS = [ 12 | "website_backupdata", 13 | "website_knobdata", 14 | "website_metricdata", 15 | "website_pipelinedata", 16 | ] 17 | 18 | MYSQL_MIN_VERSION = (5, 7, 0) 19 | 20 | ALTER_SQL = "ALTER TABLE %s COMPRESSION='%s'" 21 | OPTIMIZE_SQL = "OPTIMIZE TABLE %s" 22 | 23 | 24 | def compression_supported(schema_editor): 25 | supported = False 26 | dbms = schema_editor.connection.vendor 27 | 28 | if dbms == 'mysql': 29 | with schema_editor.connection.cursor() as cursor: 30 | cursor.execute('SELECT VERSION()') 31 | res = cursor.fetchone()[0] 32 | 33 | version_str = res.split('-')[0] 34 | version = tuple(int(v) for v in version_str.split('.')) 35 | assert len(version) == len(MYSQL_MIN_VERSION), \ 36 | 'MySQL - current version: {}, min version: {}'.format(version, MYSQL_MIN_VERSION) 37 | 38 | if version >= MYSQL_MIN_VERSION: 39 | supported = True 40 | LOG.debug("%s %s: table compression supported.", dbms.upper(), version_str) 41 | else: 42 | LOG.debug("%s %s: table compression NOT supported.", dbms.upper(), version_str) 43 | else: 44 | LOG.debug("%s: table compression NOT supported.", dbms.upper()) 45 | 46 | return supported 47 | 48 | 49 | def enable_compression(apps, schema_editor): 50 | if compression_supported(schema_editor): 51 | for table in TABLES_TO_COMPRESS: 52 | schema_editor.execute(ALTER_SQL % (table, 'zlib')) 53 | schema_editor.execute(OPTIMIZE_SQL % table) 54 | 55 | 56 | def disable_compression(apps, schema_editor): 57 | try: 58 | if compression_supported(schema_editor): 59 | for table in TABLES_TO_COMPRESS: 60 | schema_editor.execute(ALTER_SQL % (table, 'none')) 61 | schema_editor.execute(OPTIMIZE_SQL % table) 62 | 63 | except ProgrammingError: 64 | LOG.warning("Error applying reverse migration '0002_enable_compression'... Skipping.") 65 | 66 | 67 | class Migration(migrations.Migration): 68 | 69 | atomic = False 70 | 71 | dependencies = [ 72 | ('website', '0001_initial'), 73 | ] 74 | 75 | if MYSQL_COMPRESSION: 76 | operations = [ 77 | migrations.RunPython(enable_compression, 78 | disable_compression) 79 | ] 80 | else: 81 | operations = [ 82 | migrations.RunSQL(migrations.RunSQL.noop, 83 | migrations.RunSQL.noop), 84 | ] 85 | 86 | -------------------------------------------------------------------------------- /server/website/website/migrations/0003_load_initial_data.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.10.1 on 2017-12-07 19:51 3 | 4 | 5 | from django.core.management import call_command 6 | from django.db import migrations 7 | 8 | 9 | def load_initial_data(apps, schema_editor): 10 | initial_data_fixtures = [ 11 | "dbms_catalog.json", 12 | "postgres-96_knobs.json", 13 | "postgres-96_metrics.json", 14 | "postgres-92_knobs.json", 15 | "postgres-92_metrics.json", 16 | "postgres-93_knobs.json", 17 | "postgres-93_metrics.json", 18 | "postgres-94_knobs.json", 19 | "postgres-94_metrics.json", 20 | "myrocks-5.6_knobs.json", 21 | "myrocks-5.6_metrics.json", 22 | "oracle-12_knobs.json", 23 | "oracle-12_metrics.json", 24 | "oracle-121_knobs.json", 25 | "oracle-121_metrics.json", 26 | "oracle-19_knobs.json", 27 | "oracle-19_metrics.json", 28 | "mysql-56_knobs.json", 29 | "mysql-56_metrics.json", 30 | "mysql-57_knobs.json", 31 | "mysql-57_metrics.json", 32 | "mysql-80_knobs.json", 33 | "mysql-80_metrics.json", 34 | ] 35 | for fixture in initial_data_fixtures: 36 | call_command("loaddata", fixture, app_label="website") 37 | 38 | 39 | def unload_initial_data(apps, schema_editor): 40 | model_names = [ 41 | "DBMSCatalog", 42 | "KnobCatalog", 43 | "MetricCatalog", 44 | "Hardware" 45 | ] 46 | for model_name in model_names: 47 | model = apps.get_model("website", model_name) 48 | model.objects.all().delete() 49 | 50 | 51 | class Migration(migrations.Migration): 52 | 53 | dependencies = [ 54 | ('website', '0002_enable_compression'), 55 | ] 56 | 57 | operations = [ 58 | migrations.RunPython(load_initial_data, unload_initial_data) 59 | ] 60 | -------------------------------------------------------------------------------- /server/website/website/migrations/0004_add_lhs.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.23 on 2019-11-07 00:58 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('website', '0003_load_initial_data'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AddField( 16 | model_name='session', 17 | name='lhs_samples', 18 | field=models.TextField(default='[]'), 19 | ), 20 | migrations.AlterField( 21 | model_name='session', 22 | name='tuning_session', 23 | field=models.CharField(choices=[('tuning_session', 'Tuning Session'), ('no_tuning_session', 'No Tuning'), ('randomly_generate', 'Randomly Generate'), ('lhs', 'Run LHS')], default='tuning_session', max_length=64, verbose_name='session type'), 24 | ), 25 | ] 26 | -------------------------------------------------------------------------------- /server/website/website/migrations/0005_add_workload_field.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.23 on 2019-12-15 06:16 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | import django.db.models.deletion 7 | 8 | 9 | class Migration(migrations.Migration): 10 | 11 | dependencies = [ 12 | ('website', '0004_add_lhs'), 13 | ] 14 | 15 | operations = [ 16 | migrations.AddField( 17 | model_name='workload', 18 | name='project', 19 | field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='website.Project'), 20 | preserve_default=False, 21 | ), 22 | migrations.AlterUniqueTogether( 23 | name='workload', 24 | unique_together=set([('dbms', 'hardware', 'name', 'project')]), 25 | ), 26 | ] 27 | -------------------------------------------------------------------------------- /server/website/website/migrations/0006_session_hyperparameters.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.23 on 2020-01-18 16:22 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('website', '0005_add_workload_field'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AddField( 16 | model_name='session', 17 | name='hyperparameters', 18 | field=models.TextField(default='{\n "DDPG_ACTOR_HIDDEN_SIZES": [128, 128, 64],\n "DDPG_ACTOR_LEARNING_RATE": 0.02,\n "DDPG_CRITIC_HIDDEN_SIZES": [64, 128, 64],\n "DDPG_CRITIC_LEARNING_RATE": 0.001,\n "DDPG_BATCH_SIZE": 32,\n "DDPG_GAMMA": 0.0,\n "DDPG_SIMPLE_REWARD": true,\n "DDPG_UPDATE_EPOCHS": 30,\n "DDPG_USE_DEFAULT": false,\n "DNN_DEBUG": true,\n "DNN_DEBUG_INTERVAL": 100,\n "DNN_EXPLORE": false,\n "DNN_EXPLORE_ITER": 500,\n "DNN_GD_ITER": 100,\n "DNN_NOISE_SCALE_BEGIN": 0.1,\n "DNN_NOISE_SCALE_END": 0.0,\n "DNN_TRAIN_ITER": 100,\n "FLIP_PROB_DECAY": 0.5,\n "GPR_BATCH_SIZE": 3000,\n "GPR_DEBUG": true,\n "GPR_EPS": 0.001,\n "GPR_EPSILON": 1e-06,\n "GPR_LEARNING_RATE": 0.01,\n "GPR_LENGTH_SCALE": 2.0,\n "GPR_MAGNITUDE": 1.0,\n "GPR_MAX_ITER": 500,\n "GPR_MAX_TRAIN_SIZE": 7000,\n "GPR_MU_MULTIPLIER": 1.0,\n "GPR_MODEL_NAME": "BasicGP",\n "GPR_HP_LEARNING_RATE": 0.001,\n "GPR_HP_MAX_ITER": 5000,\n "GPR_RIDGE": 1.0,\n "GPR_SIGMA_MULTIPLIER": 1.0,\n "GPR_UCB_SCALE": 0.2,\n "GPR_USE_GPFLOW": true,\n "GPR_UCB_BETA": "get_beta_td",\n "IMPORTANT_KNOB_NUMBER": 10000,\n "INIT_FLIP_PROB": 0.3,\n "NUM_SAMPLES": 30,\n "TF_NUM_THREADS": 4,\n "TOP_NUM_CONFIG": 10}'), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /server/website/website/migrations/0007_executiontime.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.27 on 2020-01-24 00:15 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | import django.db.models.deletion 7 | 8 | 9 | class Migration(migrations.Migration): 10 | 11 | dependencies = [ 12 | ('website', '0006_session_hyperparameters'), 13 | ] 14 | 15 | operations = [ 16 | migrations.CreateModel( 17 | name='ExecutionTime', 18 | fields=[ 19 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 20 | ('module', models.CharField(max_length=32)), 21 | ('function', models.CharField(max_length=32)), 22 | ('tag', models.CharField(blank=True, default='', max_length=64)), 23 | ('start_time', models.DateTimeField()), 24 | ('execution_time', models.FloatField()), 25 | ('result', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='website.Result')), 26 | ], 27 | ), 28 | ] 29 | -------------------------------------------------------------------------------- /server/website/website/migrations/0008_change_result_taskids_field.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.27 on 2020-02-06 20:05 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('website', '0007_executiontime'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='result', 17 | name='task_ids', 18 | field=models.TextField(null=True), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /server/website/website/migrations/0009_change_executiontime_function_field.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.27 on 2020-02-20 05:02 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('website', '0008_change_result_taskids_field'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='executiontime', 17 | name='function', 18 | field=models.CharField(max_length=64), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /server/website/website/migrations/0010_add_pipeline_data_field.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.27 on 2020-03-02 03:44 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | import django.db.models.deletion 7 | 8 | 9 | class Migration(migrations.Migration): 10 | 11 | dependencies = [ 12 | ('website', '0009_change_executiontime_function_field'), 13 | ] 14 | 15 | operations = [ 16 | migrations.AddField( 17 | model_name='result', 18 | name='pipeline_knobs', 19 | field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, 20 | related_name='pipeline_knobs', to='website.PipelineData'), 21 | ), 22 | migrations.AddField( 23 | model_name='result', 24 | name='pipeline_metrics', 25 | field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, 26 | related_name='pipeline_metrics', to='website.PipelineData'), 27 | ), 28 | ] 29 | -------------------------------------------------------------------------------- /server/website/website/migrations/0011_knob_bound_fields.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.23 on 2020-03-03 21:07 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('website', '0010_add_pipeline_data_field'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AddField( 16 | model_name='sessionknob', 17 | name='lowerbound', 18 | field=models.CharField(max_length=32, null=True, verbose_name='lowerbound'), 19 | ), 20 | migrations.AddField( 21 | model_name='sessionknob', 22 | name='upperbound', 23 | field=models.CharField(max_length=32, null=True, verbose_name='upperbound'), 24 | ), 25 | ] 26 | -------------------------------------------------------------------------------- /server/website/website/migrations/0012_make_workload_status_editable.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.27 on 2020-04-02 18:37 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('website', '0011_knob_bound_fields'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='workload', 17 | name='status', 18 | field=models.IntegerField(choices=[(1, 'MODIFIED'), (2, 'PROCESSING'), (3, 'PROCESSED')], default=1), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /server/website/website/migrations/0013_backupdata_other.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.27 on 2020-04-03 14:31 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('website', '0012_make_workload_status_editable'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AddField( 16 | model_name='backupdata', 17 | name='other', 18 | field=models.TextField(default='{}'), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /server/website/website/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/migrations/__init__.py -------------------------------------------------------------------------------- /server/website/website/settings/.gitignore: -------------------------------------------------------------------------------- 1 | *credentials.py 2 | -------------------------------------------------------------------------------- /server/website/website/settings/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from .common import * # pylint: disable=wildcard-import 7 | -------------------------------------------------------------------------------- /server/website/website/settings/constants.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - constants.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | 7 | from website.types import DBMSType 8 | 9 | # These parameters are not specified for any session, so they can only be set here 10 | 11 | # If this flag is set, we check if celery is running, and restart celery if it is not. 12 | CHECK_CELERY = True 13 | 14 | # address categorical knobs (enum, boolean) 15 | ENABLE_DUMMY_ENCODER = False 16 | 17 | # Whether to include the pruned metrics from the workload characterization subtask in 18 | # the output (y) when ranking the knobs for a given workload in the knob identification 19 | # subtask. 20 | 21 | # When computing the ranked knobs in the knob identification subtask, the output (y) is 22 | # the set of target objectives used to tune the given workload. If this flag is enabled 23 | # then the pruned metrics from the workload characterization subtask are also included 24 | # in the output. (See website/tasks/periodic_tasks.py) 25 | KNOB_IDENT_USE_PRUNED_METRICS = False 26 | 27 | # The background tasks only process workloads containing this minimum amount of results 28 | MIN_WORKLOAD_RESULTS_COUNT = 5 29 | 30 | # The views used for metrics pruning 31 | VIEWS_FOR_PRUNING = { 32 | DBMSType.ORACLE: ['dba_hist_osstat', 'dba_hist_sysstat', 'dba_hist_system_event', 33 | 'dba_workload_replays', 'dba_hist_sys_time_model'], 34 | } 35 | 36 | # The views used for DDPG 37 | # WARNING: modifying this parameter will cause all existing DDPG sessions broken 38 | VIEWS_FOR_DDPG = { 39 | DBMSType.ORACLE: ['dba_hist_sys_time_model'], 40 | } 41 | -------------------------------------------------------------------------------- /server/website/website/settings/credentials_TEMPLATE.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - credentials_TEMPLATE.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | """ 7 | Private/custom Django settings for the OtterTune project. 8 | 9 | """ 10 | # pylint: disable=invalid-name 11 | 12 | # ============================================== 13 | # SECRET KEY CONFIGURATION 14 | # ============================================== 15 | 16 | # SECURITY WARNING: keep the secret key used in production secret! 17 | SECRET_KEY = 'ADD ME!!' 18 | 19 | # ============================================== 20 | # DATABASE CONFIGURATION 21 | # ============================================== 22 | 23 | DATABASES = { 24 | 'default': { 25 | 'ENGINE': 'django.db.backends.mysql', 26 | 'NAME': 'ottertune', 27 | 'USER': 'ADD ME!!', 28 | 'PASSWORD': 'ADD ME!!', 29 | 'HOST': '', 30 | 'PORT': '', 31 | 'OPTIONS': { 32 | 'init_command': "SET sql_mode='STRICT_TRANS_TABLES',innodb_strict_mode=1", 33 | }, 34 | } 35 | } 36 | 37 | # ============================================== 38 | # DEBUG CONFIGURATION 39 | # ============================================== 40 | 41 | # Can override the DEBUG setting here 42 | DEBUG = False 43 | 44 | # ============================================== 45 | # MANAGER CONFIGURATION 46 | # ============================================== 47 | 48 | # Admin and managers for this project. These people receive private 49 | # site alerts. 50 | ADMINS = ( 51 | # ('Your Name', 'your_email@example.com'), 52 | ) 53 | MANAGERS = ADMINS 54 | 55 | # ============================================== 56 | # GENERAL CONFIGURATION 57 | # ============================================== 58 | 59 | # Hosts/domain names that are valid for this site; required if DEBUG is False 60 | # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts 61 | ALLOWED_HOSTS = [] 62 | -------------------------------------------------------------------------------- /server/website/website/static/css/base.css: -------------------------------------------------------------------------------- 1 | @import url("bootstrap.min.css"); 2 | @import url("bootstrap-select.min.css"); 3 | 4 | .table-nonfluid { 5 | width: auto; 6 | } 7 | 8 | .boxbody ul { 9 | list-style: none; 10 | margin: 0; 11 | padding: 0 0 0.2em; 12 | # margin-bottom: 0.5em; 13 | } 14 | .boxbody ul li { padding-left: 0.3em; } 15 | 16 | .plotcontainer { 17 | margin-bottom: 2em; 18 | } 19 | 20 | .FixedHeader_Cloned th { background-color: white; } 21 | 22 | 23 | /* SIDEBAR */ 24 | 25 | #sidebar { 26 | font-size: x-small; 27 | } 28 | 29 | 30 | /* .container { */ 31 | /* padding-left: 10px; */ 32 | /* } */ 33 | 34 | .gray-border { 35 | min-height: 20px; 36 | margin: 50px 140px; 37 | background-color: #f5f5f5; 38 | border: 2px solid #dcdcdc; 39 | border-radius: 4px; 40 | -webkit-box-shadow: inset 0 2px 2px rgba(0, 0, 0, 0.1); 41 | box-shadow: inset 0 2px 2px rgba(0, 0, 0, 0.1); 42 | } 43 | 44 | a {color: #3498db;} 45 | a:hover {color: #217dbb;} 46 | 47 | /* .navbar-default .navbar-brand { */ 48 | /* /* color: #18bc9c; */ */ 49 | /* color: #ffffff; */ 50 | /* font-weight: bold; */ 51 | /* font-size: 22px; */ 52 | /* } */ 53 | 54 | /* BREADCRUMBS */ 55 | 56 | div.breadcrumbs { 57 | background: #79aec8; 58 | padding: 10px 40px; 59 | border: none; 60 | font-size: 14px; 61 | color: #c4dce8; 62 | text-align: left; 63 | } 64 | 65 | div.breadcrumbs a { 66 | color: #fff; 67 | } 68 | 69 | div.breadcrumbs a:focus, div.breadcrumbs a:hover { 70 | color: #c4dce8; 71 | } 72 | 73 | div.miniplot:hover { background-color: #F1F1F1; } 74 | 75 | div.miniplot { 76 | cursor: pointer; 77 | margin: 0.5em; 78 | text-align: left; 79 | float: left; 80 | height: 200px; 81 | -moz-border-radius: 10px; 82 | -webkit-border-radius: 10px; 83 | border-radius: 10px; 84 | } 85 | 86 | /* #success_message{ display: none;} */ 87 | -------------------------------------------------------------------------------- /server/website/website/static/css/bootstrap.min.css: -------------------------------------------------------------------------------- 1 | themes/bootstrap-flatly.min.css -------------------------------------------------------------------------------- /server/website/website/static/css/style.css: -------------------------------------------------------------------------------- 1 | /* 2 | Theme Name: 3 | Author: yaminncco 4 | 5 | Colors: 6 | Body : #868F9B 7 | Headers : #10161A 8 | Primary : #6195FF 9 | Dark : #FCFCFF 10 | Grey : #F4F4F4 #FAFAFA #EEE 11 | 12 | Fonts: Montserrat Varela Round 13 | */ 14 | 15 | .bg-img { 16 | position: absolute; 17 | left: 0; 18 | top: 0; 19 | right: 0; 20 | bottom: 0; 21 | z-index: -1; 22 | background-position: center; 23 | background-size: cover; 24 | background-attachment: fixed; 25 | } 26 | 27 | .bg-img .overlay { 28 | position: absolute; 29 | left: 0; 30 | top: 0; 31 | right: 0; 32 | bottom: 0; 33 | opacity: .8; 34 | background: #1C1D21; 35 | } 36 | 37 | /* -- Buttons -- */ 38 | 39 | .main-btn, .white-btn, .outline-btn { 40 | display: inline-block; 41 | padding: 10px 35px; 42 | margin: 3px; 43 | border: 2px solid transparent; 44 | border-radius: 3px; 45 | -webkit-transition: 0.2s opacity; 46 | transition: 0.2s opacity; 47 | } 48 | 49 | .main-btn { 50 | background: #6195FF; 51 | color: #FFF; 52 | } 53 | 54 | .white-btn { 55 | background: #FFF; 56 | color: #10161A !important; 57 | } 58 | 59 | .outline-btn { 60 | background: transparent; 61 | color: #6195FF !important; 62 | border-color: #6195FF; 63 | } 64 | 65 | .main-btn:hover, .white-btn:hover, .outline-btn:hover { 66 | opacity: 0.8; 67 | } 68 | 69 | .white-text { 70 | color: #FFF; 71 | list-style: none; 72 | font-size: 20pt; 73 | } 74 | 75 | 76 | /* Home Content */ 77 | 78 | #home { 79 | height: 100vh; 80 | } 81 | 82 | #home .home-wrapper { 83 | position: absolute; 84 | left: 0px; 85 | right: 0px; 86 | top: 50%; 87 | -webkit-transform: translateY(-50%); 88 | -ms-transform: translateY(-50%); 89 | transform: translateY(-50%); 90 | text-align: center; 91 | } 92 | 93 | .home-content h1 { 94 | text-transform: uppercase; 95 | font-size: 8em; 96 | } 97 | .home-content button { 98 | margin-top: 20px; 99 | } 100 | 101 | .header-wrapper h2 { 102 | display: inline-block; 103 | margin-bottom: 0px; 104 | } 105 | 106 | .header-wrapper .breadcrumb { 107 | float: right; 108 | background: transparent; 109 | margin-bottom: 0px; 110 | } 111 | 112 | .header-wrapper .breadcrumb .breadcrumb-item.active { 113 | color: #868F9B; 114 | } 115 | 116 | .breadcrumb>li+li:before { 117 | color: #868F9B; 118 | } 119 | 120 | /* Footer Copyright */ 121 | 122 | .footer-copyright p { 123 | text-align: center; 124 | font-size: 14px; 125 | text-transform: uppercase; 126 | margin: 0; 127 | } 128 | -------------------------------------------------------------------------------- /server/website/website/static/fonts/glyphicons-halflings-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/fonts/glyphicons-halflings-regular.eot -------------------------------------------------------------------------------- /server/website/website/static/fonts/glyphicons-halflings-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/fonts/glyphicons-halflings-regular.ttf -------------------------------------------------------------------------------- /server/website/website/static/fonts/glyphicons-halflings-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/fonts/glyphicons-halflings-regular.woff -------------------------------------------------------------------------------- /server/website/website/static/fonts/glyphicons-halflings-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/fonts/glyphicons-halflings-regular.woff2 -------------------------------------------------------------------------------- /server/website/website/static/img/ajax-loader.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/img/ajax-loader.gif -------------------------------------------------------------------------------- /server/website/website/static/img/glyphicons-halflings-white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/img/glyphicons-halflings-white.png -------------------------------------------------------------------------------- /server/website/website/static/img/glyphicons-halflings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/img/glyphicons-halflings.png -------------------------------------------------------------------------------- /server/website/website/static/img/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/img/logo.png -------------------------------------------------------------------------------- /server/website/website/static/img/otter.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/img/otter.jpg -------------------------------------------------------------------------------- /server/website/website/static/img/sort_asc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/img/sort_asc.png -------------------------------------------------------------------------------- /server/website/website/static/img/sort_both.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/img/sort_both.png -------------------------------------------------------------------------------- /server/website/website/static/img/sort_desc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmu-db/ottertune/9758c65721d2624b813857ba9340d5550e899bda/server/website/website/static/img/sort_desc.png -------------------------------------------------------------------------------- /server/website/website/static/js/common.js: -------------------------------------------------------------------------------- 1 | function readCheckbox(el) { 2 | /* Builds a string that holds all checked values in an input form */ 3 | var config = ""; 4 | $(el).each(function() { 5 | config += $(this).val() + ","; 6 | }); 7 | // Remove last comma 8 | config = config.slice(0, -1); 9 | 10 | if (config == "") { 11 | return "none"; 12 | } 13 | return config; 14 | } 15 | 16 | function getLoadText(text, h, showloader) { 17 | var loadtext = '
'; 18 | var pstyle = ""; 19 | if (h > 0) { 20 | h = h - 32; 21 | if(h < 80) { h = 180; } 22 | else if (h > 400) { h = 400; } 23 | pstyle = ' style="line-height:' + h + 'px;"'; 24 | } 25 | loadtext += ''+ text; 26 | if (showloader) { 27 | loadtext += ' '; 28 | } 29 | loadtext += '

'; 30 | return loadtext; 31 | } 32 | 33 | function valueOrDefault(obj, defaultObj) { 34 | return (obj) ? obj : defaultObj; 35 | } 36 | 37 | function allChecked(el) { 38 | var ret = true; 39 | $(el).each(function() { 40 | if ($(this).prop('checked') != true) { 41 | ret = false; 42 | } 43 | }); 44 | return ret; 45 | } 46 | -------------------------------------------------------------------------------- /server/website/website/static/js/jqplot/jqplot.canvasAxisLabelRenderer.min.js: -------------------------------------------------------------------------------- 1 | /* jqPlot 1.0.8r1250 | (c) 2009-2013 Chris Leonello | jplot.com 2 | jsDate | (c) 2010-2013 Chris Leonello 3 | */(function(a){a.jqplot.CanvasAxisLabelRenderer=function(b){this.angle=0;this.axis;this.show=true;this.showLabel=true;this.label="";this.fontFamily='"Trebuchet MS", Arial, Helvetica, sans-serif';this.fontSize="11pt";this.fontWeight="normal";this.fontStretch=1;this.textColor="#666666";this.enableFontSupport=true;this.pt2px=null;this._elem;this._ctx;this._plotWidth;this._plotHeight;this._plotDimensions={height:null,width:null};a.extend(true,this,b);if(b.angle==null&&this.axis!="xaxis"&&this.axis!="x2axis"){this.angle=-90}var c={fontSize:this.fontSize,fontWeight:this.fontWeight,fontStretch:this.fontStretch,fillStyle:this.textColor,angle:this.getAngleRad(),fontFamily:this.fontFamily};if(this.pt2px){c.pt2px=this.pt2px}if(this.enableFontSupport){if(a.jqplot.support_canvas_text()){this._textRenderer=new a.jqplot.CanvasFontRenderer(c)}else{this._textRenderer=new a.jqplot.CanvasTextRenderer(c)}}else{this._textRenderer=new a.jqplot.CanvasTextRenderer(c)}};a.jqplot.CanvasAxisLabelRenderer.prototype.init=function(b){a.extend(true,this,b);this._textRenderer.init({fontSize:this.fontSize,fontWeight:this.fontWeight,fontStretch:this.fontStretch,fillStyle:this.textColor,angle:this.getAngleRad(),fontFamily:this.fontFamily})};a.jqplot.CanvasAxisLabelRenderer.prototype.getWidth=function(d){if(this._elem){return this._elem.outerWidth(true)}else{var f=this._textRenderer;var c=f.getWidth(d);var e=f.getHeight(d);var b=Math.abs(Math.sin(f.angle)*e)+Math.abs(Math.cos(f.angle)*c);return b}};a.jqplot.CanvasAxisLabelRenderer.prototype.getHeight=function(d){if(this._elem){return this._elem.outerHeight(true)}else{var f=this._textRenderer;var c=f.getWidth(d);var e=f.getHeight(d);var b=Math.abs(Math.cos(f.angle)*e)+Math.abs(Math.sin(f.angle)*c);return b}};a.jqplot.CanvasAxisLabelRenderer.prototype.getAngleRad=function(){var b=this.angle*Math.PI/180;return b};a.jqplot.CanvasAxisLabelRenderer.prototype.draw=function(c,f){if(this._elem){if(a.jqplot.use_excanvas&&window.G_vmlCanvasManager.uninitElement!==undefined){window.G_vmlCanvasManager.uninitElement(this._elem.get(0))}this._elem.emptyForce();this._elem=null}var e=f.canvasManager.getCanvas();this._textRenderer.setText(this.label,c);var b=this.getWidth(c);var d=this.getHeight(c);e.width=b;e.height=d;e.style.width=b;e.style.height=d;e=f.canvasManager.initCanvas(e);this._elem=a(e);this._elem.css({position:"absolute"});this._elem.addClass("jqplot-"+this.axis+"-label");e=null;return this._elem};a.jqplot.CanvasAxisLabelRenderer.prototype.pack=function(){this._textRenderer.draw(this._elem.get(0).getContext("2d"),this.label)}})(jQuery); -------------------------------------------------------------------------------- /server/website/website/static/js/jqplot/jqplot.canvasAxisTickRenderer.min.js: -------------------------------------------------------------------------------- 1 | /* jqPlot 1.0.8r1250 | (c) 2009-2013 Chris Leonello | jplot.com 2 | jsDate | (c) 2010-2013 Chris Leonello 3 | */(function(a){a.jqplot.CanvasAxisTickRenderer=function(b){this.mark="outside";this.showMark=true;this.showGridline=true;this.isMinorTick=false;this.angle=0;this.markSize=4;this.show=true;this.showLabel=true;this.labelPosition="auto";this.label="";this.value=null;this._styles={};this.formatter=a.jqplot.DefaultTickFormatter;this.formatString="";this.prefix="";this.fontFamily='"Trebuchet MS", Arial, Helvetica, sans-serif';this.fontSize="10pt";this.fontWeight="normal";this.fontStretch=1;this.textColor="#666666";this.enableFontSupport=true;this.pt2px=null;this._elem;this._ctx;this._plotWidth;this._plotHeight;this._plotDimensions={height:null,width:null};a.extend(true,this,b);var c={fontSize:this.fontSize,fontWeight:this.fontWeight,fontStretch:this.fontStretch,fillStyle:this.textColor,angle:this.getAngleRad(),fontFamily:this.fontFamily};if(this.pt2px){c.pt2px=this.pt2px}if(this.enableFontSupport){if(a.jqplot.support_canvas_text()){this._textRenderer=new a.jqplot.CanvasFontRenderer(c)}else{this._textRenderer=new a.jqplot.CanvasTextRenderer(c)}}else{this._textRenderer=new a.jqplot.CanvasTextRenderer(c)}};a.jqplot.CanvasAxisTickRenderer.prototype.init=function(b){a.extend(true,this,b);this._textRenderer.init({fontSize:this.fontSize,fontWeight:this.fontWeight,fontStretch:this.fontStretch,fillStyle:this.textColor,angle:this.getAngleRad(),fontFamily:this.fontFamily})};a.jqplot.CanvasAxisTickRenderer.prototype.getWidth=function(d){if(this._elem){return this._elem.outerWidth(true)}else{var f=this._textRenderer;var c=f.getWidth(d);var e=f.getHeight(d);var b=Math.abs(Math.sin(f.angle)*e)+Math.abs(Math.cos(f.angle)*c);return b}};a.jqplot.CanvasAxisTickRenderer.prototype.getHeight=function(d){if(this._elem){return this._elem.outerHeight(true)}else{var f=this._textRenderer;var c=f.getWidth(d);var e=f.getHeight(d);var b=Math.abs(Math.cos(f.angle)*e)+Math.abs(Math.sin(f.angle)*c);return b}};a.jqplot.CanvasAxisTickRenderer.prototype.getTop=function(b){if(this._elem){return this._elem.position().top}else{return null}};a.jqplot.CanvasAxisTickRenderer.prototype.getAngleRad=function(){var b=this.angle*Math.PI/180;return b};a.jqplot.CanvasAxisTickRenderer.prototype.setTick=function(b,d,c){this.value=b;if(c){this.isMinorTick=true}return this};a.jqplot.CanvasAxisTickRenderer.prototype.draw=function(c,f){if(!this.label){this.label=this.prefix+this.formatter(this.formatString,this.value)}if(this._elem){if(a.jqplot.use_excanvas&&window.G_vmlCanvasManager.uninitElement!==undefined){window.G_vmlCanvasManager.uninitElement(this._elem.get(0))}this._elem.emptyForce();this._elem=null}var e=f.canvasManager.getCanvas();this._textRenderer.setText(this.label,c);var b=this.getWidth(c);var d=this.getHeight(c);e.width=b;e.height=d;e.style.width=b;e.style.height=d;e.style.textAlign="left";e.style.position="absolute";e=f.canvasManager.initCanvas(e);this._elem=a(e);this._elem.css(this._styles);this._elem.addClass("jqplot-"+this.axis+"-tick");e=null;return this._elem};a.jqplot.CanvasAxisTickRenderer.prototype.pack=function(){this._textRenderer.draw(this._elem.get(0).getContext("2d"),this.label)}})(jQuery); -------------------------------------------------------------------------------- /server/website/website/tasks/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | from .async_tasks import (aggregate_target_results, 7 | configuration_recommendation, 8 | map_workload, 9 | train_ddpg, 10 | configuration_recommendation_ddpg) 11 | 12 | 13 | from .periodic_tasks import (run_background_tasks) 14 | -------------------------------------------------------------------------------- /server/website/website/templates/404.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Page not found{% endblock %} 4 | 5 | {% block body %} 6 |
7 |

Page not found

8 | 9 |

Sorry, but the requested page could not be found.

10 |
11 | {% endblock %} 12 | -------------------------------------------------------------------------------- /server/website/website/templates/change_password.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block body %} 3 | 4 |
5 |
{% csrf_token %} 6 |
7 | 8 | 9 |

Change Your Password

10 | 11 | 12 | 13 |
14 | 15 |
16 | 17 |
18 |
19 | 20 | 21 | 22 |
23 | 24 |
25 | 26 |
27 |
28 | 29 | 30 | 31 |
32 | 33 |
34 | 35 |
36 |
37 | 38 | 39 |
40 | 41 |
42 | 43 |
44 |
45 |
46 | 47 | {% endblock body %} 48 | -------------------------------------------------------------------------------- /server/website/website/templates/dbms_reference.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block extra_head %} 4 | 5 | 13 | 14 | {% endblock %} 15 | 16 | {% block body %} 17 | 18 |
19 |

{{ dbms }}

20 |
21 | {% if is_used %} 22 |
{{ title }} {{used_label}}
23 | {% else %} 24 |
{{ title }} {{used_label}}
25 | {% endif %} 26 |
27 | {% for label, value in list_items.items %} 28 |
29 |
{{ label }}
30 |
{{ value }}
31 |
32 | {% endfor %} 33 |
34 | 35 | 36 | {% endblock body %} 37 | -------------------------------------------------------------------------------- /server/website/website/templates/edit_knobs.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% load util_functions %} 4 | 5 | {% block body %} 6 |
7 |

Edit Knobs

8 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | {% for form in forms %} 20 | 21 | {% csrf_token %} 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | {% endfor %} 33 |
NameMinMaxLowerboundUpperboundTunable
{{ form.name }}{{ form.minval }}{{ form.maxval }}{{ form.lowerbound }}{{ form.upperbound }}{{ form.tunable }}
34 | Done 35 |
36 | 37 | 44 | 45 | {% endblock body %} 46 | -------------------------------------------------------------------------------- /server/website/website/templates/edit_project.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block body %} 4 |
5 |
{% csrf_token %} 6 | 7 | {{ form.as_table }} 8 |
9 | {% if project %} 10 | 11 | Cancel 12 | {% else %} 13 | 14 | Cancel 15 | {% endif %} 16 |
17 |
18 | {% endblock body %} 19 | -------------------------------------------------------------------------------- /server/website/website/templates/edit_session.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block body %} 4 |
5 | {% if session %} 6 |
7 | {% else %} 8 | 9 | {% endif %} 10 | {% csrf_token %} 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 |
{{ form.name.label_tag }}{{ form.name }}
{{ form.description.label_tag }}{{ form.description }}
{{ form.dbms.label_tag }}{{ form.dbms }}
{{ form.cpu.label_tag }}{{ form.cpu }}
{{ form.memory.label_tag }}{{ form.memory }}
{{ form.storage.label_tag }}{{ form.storage }}
{{ form.storage_type.label_tag }}{{ form.storage_type }}
{{ form.algorithm.label_tag }}{{ form.algorithm }}
{{ form.tuning_session.label_tag }}{{ form.tuning_session }}
{{ form.target_objective.label_tag }}{{ form.target_objective }}
{{ form.hyperparameters.label_tag }}{{ form.hyperparameters }}
{{ form.gen_upload_code.label_tag }}{{ form.gen_upload_code }}
61 | 62 | {% if session %} 63 | Cancel 64 | {% else %} 65 | Cancel 66 | {% endif %} 67 |
68 |
69 | 70 | 95 | 96 | {% endblock body %} 97 | -------------------------------------------------------------------------------- /server/website/website/templates/edit_workload.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block body %} 4 |
5 |
{% csrf_token %} 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 17 | 18 |

Edit Workload Information

Name:
19 | 20 | Cancel 21 |
22 |
23 | {% endblock body %} 24 | -------------------------------------------------------------------------------- /server/website/website/templates/home_projects.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block body %} 4 |
5 |
7 | {% csrf_token %} 8 | 9 | 10 | 11 | 12 | 13 | {% if show_descriptions %} 14 | 15 | {% endif %} 16 | 17 | 18 | 19 | {% for project in projects %} 20 | 21 | 22 | 23 | {% if show_descriptions %} 24 | 25 | {% endif %} 26 | 27 | 28 | 29 | {% endfor %} 30 |

{{ labels.title }}

{{ labels.name }}{{ labels.description }}{{ labels.creation_time }}{{ labels.last_update }}
{{ project.name }}{{ project.description|linebreaks }}{{ project.creation_time }}{{ project.last_update }}
31 | 32 | {{ labels.button_create }} 33 |
34 |
35 | 36 | 44 | 45 | {% endblock body %} 46 | -------------------------------------------------------------------------------- /server/website/website/templates/login.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block body %} 4 |
5 |
6 |
7 |
8 | 9 |
10 |
11 |
12 | 13 |
14 |
15 |

OtterTune

16 |
    17 |
  • Automatically tune your database management system's configuration
  • 18 |
  • Supported Systems: MySQL, Postgres, Vector
  • 19 |
20 |

Sign-up » 22 | Source Code » 24 | Learn More »

26 |
27 |
28 | 29 |
30 |
31 |
32 |
33 | 34 | 35 | 36 | 43 | 44 | 45 | 48 | {% endblock body %} 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | -------------------------------------------------------------------------------- /server/website/website/templates/pipeline_data.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block body %} 4 | 5 |
6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 |

Pipeline Data Info

ID:
{{ id }}
Creation Time:
{{ creation_time }}
Task Name:
{{ task_name }}
Workload:
{{ workload }}
27 |
28 | 29 |
30 | 31 | 32 | 33 | {% for element in data %} 34 | 35 | 36 | 37 | 38 | {% endfor %} 39 | 40 |

{{ task_name }}

{{forloop.counter}}
{{ element }}
41 |
42 | 43 | {% endblock body %} 44 | -------------------------------------------------------------------------------- /server/website/website/templates/project_sessions.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block body %} 4 |
5 |
7 | {% csrf_token %} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | {% for session in sessions %} 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | {% endfor %} 32 |

{{ labels.title }}

{{ labels.name }}{{ labels.dbms }}{{ labels.description }}{{ labels.result_count }}{{ labels.algorithm }}
{{ session.name }}{{ session.dbms.full_name }}{{ session.description }}{{ session.result_count }}{{ session.algorithm_name }}
33 | 34 | {{ labels.button_create }} 35 |
36 |
37 | 38 | 46 | 47 | {% endblock body %} 48 | -------------------------------------------------------------------------------- /server/website/website/templates/signup.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block body %} 3 | 4 |
5 |
{% csrf_token %} 6 |
7 | 8 | 9 |

Create Your Account

10 | 11 | 12 | 13 |
14 | 15 |
16 | 17 |
18 |
19 | 20 | 21 | 22 |
23 | 24 |
25 | 26 |
27 |
28 | 29 | 30 | 31 |
32 | 33 |
34 | 35 |
36 |
37 | 38 | 39 | 40 |
41 | 42 |
43 | 44 |
45 |
46 | 47 | 48 |
49 | 50 |
51 | 52 |
53 |
54 |
55 | 56 | {% endblock body %} 57 | -------------------------------------------------------------------------------- /server/website/website/templates/task_status.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block body %} 4 | 5 |
6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | {% if result.pipeline_knobs %} 42 | 43 | 44 | 45 | 46 | {% endif %} 47 | {% if result.pipeline_metrics %} 48 | 49 | 50 | 51 | 52 | {% endif %} 53 | 54 |

Task Info

ID:
{{ id }}
Project:
{{ result.session.project.name|linebreaks }}
Session:
{{result.session.name}}
Overall status:
{{ overall_status }}
Tasks completed:
{{ num_completed }}
Start time:
{{ result.creation_time }}
Completion time:
{{ completion_time }}
Total runtime:
{{ total_runtime }}
Ranked Knobs:
Ranked Knobs
Pruned Metrics:
Pruned Metrics
55 | 56 | {% for task_name, task in tasks %} 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | {% if task.traceback %} 65 | 66 | 67 | 68 | 69 | {% endif %} 70 | 71 | 72 | 73 | 74 | 75 |

{{ task_name }}

Status:
{{ task.status }}
Traceback:
{{ task.traceback }}
Result:
{{ task.result|linebreaks }}
76 | {% endfor %} 77 |
78 | 79 | 80 | {% endblock body %} 81 | -------------------------------------------------------------------------------- /server/website/website/templatetags/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - __init__.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | -------------------------------------------------------------------------------- /server/website/website/templatetags/util_functions.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - util_functions.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | ''' 7 | Created on Aug 14, 2017 8 | 9 | @author: dvanaken 10 | ''' 11 | 12 | from django import template 13 | 14 | register = template.Library() # pylint: disable=invalid-name 15 | 16 | 17 | @register.filter 18 | def get_item(dictionary, key): 19 | return dictionary.get(key) 20 | 21 | 22 | @register.filter 23 | def get_attr(instance, attr_name): 24 | return getattr(instance, attr_name) 25 | 26 | 27 | @register.filter 28 | def keys(dictionary): 29 | return list(dictionary.keys()) 30 | 31 | 32 | @register.filter 33 | def safe_floatformat(text, arg=-2): 34 | val = template.defaultfilters.floatformat(text, arg) 35 | return val if val != '' else text 36 | -------------------------------------------------------------------------------- /server/website/website/wsgi.py: -------------------------------------------------------------------------------- 1 | # 2 | # OtterTune - wsgi.py 3 | # 4 | # Copyright (c) 2017-18, Carnegie Mellon University Database Group 5 | # 6 | """ 7 | WSGI config for the OtterTune website. 8 | 9 | This module contains the WSGI application used by Django's development server 10 | and any production WSGI deployments. It should expose a module-level variable 11 | named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover 12 | this application via the ``WSGI_APPLICATION`` setting. 13 | 14 | Usually you will have the standard Django WSGI application here, but it also 15 | might make sense to replace the whole Django WSGI application with a custom one 16 | that later delegates to the Django one. For example, you could introduce WSGI 17 | middleware here, or combine a Django application with an application of another 18 | framework. 19 | 20 | """ 21 | import os 22 | import sys 23 | 24 | from django.core.wsgi import get_wsgi_application 25 | 26 | 27 | sys.path.append('/var/www/ottertune') 28 | sys.path.append('/var/www/ottertune/website') 29 | 30 | # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks 31 | # if running multiple sites in the same mod_wsgi process. To fix this, use 32 | # mod_wsgi daemon mode with each site in its own daemon process, or use 33 | os.environ["DJANGO_SETTINGS_MODULE"] = "website.settings" 34 | 35 | # This application object is used by any WSGI server configured to use this 36 | # file. This includes Django's development server, if the WSGI_APPLICATION 37 | # setting points here. 38 | application = get_wsgi_application() # pylint: disable=invalid-name 39 | --------------------------------------------------------------------------------